Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
e49333611f
|
|||
|
0055938338
|
|||
|
75e31c2008
|
|||
|
1aa8eb363f
|
|||
|
34240dfbd8
|
|||
|
46e7694ec6
|
|||
|
04166daeee
|
|||
|
e9e329ccc1
|
|||
|
6adf1281ef
|
|||
|
5b836a4e0c
|
|||
|
c28572e3b6
|
|||
|
17caa22331
|
|||
|
be8cd00c00
|
|||
|
7dc15e360d
|
|||
|
b1cd8449d9
|
|||
|
78dbd3b0ef
|
|||
|
df266bab12
|
|||
|
0ab62dab02
|
|||
|
38b617728c
|
|||
|
16f638301b
|
|||
|
d0a100359d
|
|||
|
c300aefc0b
|
|||
|
074806528f
|
|||
|
d62d7962fe
|
|||
|
1f4108b888
|
|||
|
eca757b24f
|
|||
|
e0d4844c5b
|
|||
|
4669783b67
|
|||
|
94c96ebf15
|
|||
|
121aa9d1ad
|
|||
|
ebe4670c08
|
|||
|
08177f4e6f
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -103,6 +103,9 @@ web_modules/
|
|||||||
# Optional npm cache directory
|
# Optional npm cache directory
|
||||||
.npm
|
.npm
|
||||||
|
|
||||||
|
# Optional npm package-lock.json
|
||||||
|
package-lock.json
|
||||||
|
|
||||||
# Optional eslint cache
|
# Optional eslint cache
|
||||||
.eslintcache
|
.eslintcache
|
||||||
|
|
||||||
|
|||||||
@@ -11,16 +11,22 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
- repo: local
|
||||||
rev: v10.0.3
|
hooks:
|
||||||
|
- id: prettier
|
||||||
|
name: prettier
|
||||||
|
entry: npx prettier --write
|
||||||
|
language: system
|
||||||
|
files: "\\.(js|jsx|ts|tsx)$"
|
||||||
|
|
||||||
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: eslint
|
- id: eslint
|
||||||
|
name: eslint
|
||||||
|
entry: npx eslint --fix
|
||||||
|
language: system
|
||||||
files: "\\.(js|jsx|ts|tsx)$"
|
files: "\\.(js|jsx|ts|tsx)$"
|
||||||
exclude: node_modules/
|
|
||||||
|
|
||||||
# Use stylelint (local) instead of the deprecated scss-lint Ruby gem which
|
|
||||||
# cannot parse modern Sass `@use` and module syntax. This invokes the
|
|
||||||
# project's installed `stylelint` via `npx` so the devDependency is used.
|
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: stylelint
|
- id: stylelint
|
||||||
|
|||||||
19
.prettierrc.ts
Normal file
19
.prettierrc.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { type Config } from "prettier";
|
||||||
|
|
||||||
|
const config: Config = {
|
||||||
|
plugins: ["@trivago/prettier-plugin-sort-imports"],
|
||||||
|
trailingComma: "none",
|
||||||
|
printWidth: 120,
|
||||||
|
importOrder: [
|
||||||
|
"<BUILTIN_MODULES>",
|
||||||
|
"<THIRD_PARTY_MODULES>",
|
||||||
|
"(?:services|components|contexts|pages|libs|types)/(.*)$",
|
||||||
|
"^[./].*\\.(?:ts|tsx)$",
|
||||||
|
"\\.(?:scss|css)$",
|
||||||
|
"^[./]"
|
||||||
|
],
|
||||||
|
importOrderSeparation: true,
|
||||||
|
importOrderSortSpecifiers: true
|
||||||
|
};
|
||||||
|
|
||||||
|
export default config;
|
||||||
118
README.md
118
README.md
@@ -0,0 +1,118 @@
|
|||||||
|
# @hamradio/aprs
|
||||||
|
|
||||||
|
APRS (Automatic Packet Reporting System) utilities and parsers for TypeScript/JavaScript.
|
||||||
|
|
||||||
|
> For AX.25 frame parsing, see [@hamradio/ax25](https://www.npmjs.com/package/@hamradio/ax25).
|
||||||
|
|
||||||
|
This package provides lightweight parsing and helpers for APRS frames (APRS-IS style payloads). It exposes a small API for parsing frames, decoding payloads, working with APRS timestamps and addresses, and a few utility conversions.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @hamradio/aprs
|
||||||
|
```
|
||||||
|
|
||||||
|
Or with yarn:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn add @hamradio/aprs
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick examples
|
||||||
|
|
||||||
|
Examples below show ESM / TypeScript usage. For CommonJS require() the same symbols are available from the package entrypoint.
|
||||||
|
|
||||||
|
### Import
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import {
|
||||||
|
Frame,
|
||||||
|
Address,
|
||||||
|
Timestamp,
|
||||||
|
base91ToNumber,
|
||||||
|
knotsToKmh,
|
||||||
|
} from '@hamradio/aprs';
|
||||||
|
```
|
||||||
|
|
||||||
|
### Parse a raw APRS frame and decode payload
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const raw = 'NOCALL-1>APRS,WIDE1-1:@092345z/:*E";qZ=OMRC/A=088132Hello World!';
|
||||||
|
|
||||||
|
// Parse into a Frame instance
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
|
||||||
|
// Inspect routing and payload
|
||||||
|
console.log(frame.source.toString()); // e.g. NOCALL-1
|
||||||
|
console.log(frame.destination.toString()); // APRS
|
||||||
|
console.log(frame.path.map(p => p.toString()));
|
||||||
|
|
||||||
|
// Decode payload (returns a structured payload object or null)
|
||||||
|
const payload = frame.decode();
|
||||||
|
console.log(payload?.type); // e.g. 'position' | 'message' | 'status' | ...
|
||||||
|
|
||||||
|
// Or ask for sections (dissection) along with decoded payload
|
||||||
|
const res = frame.decode(true) as { payload: any | null; structure: any };
|
||||||
|
console.log(res.payload, res.structure);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Message decoding
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const msg = 'W1AW>APRS::KB1ABC-5 :Hello World';
|
||||||
|
const f = Frame.fromString(msg);
|
||||||
|
const decoded = f.decode();
|
||||||
|
if (decoded && decoded.type === 'message') {
|
||||||
|
console.log(decoded.addressee); // KB1ABC-5
|
||||||
|
console.log(decoded.text); // Hello World
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Work with addresses and timestamps
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const a = Address.parse('WA1PLE-4*');
|
||||||
|
console.log(a.call, a.ssid, a.isRepeated);
|
||||||
|
|
||||||
|
const ts = new Timestamp(12, 45, 'HMS', { seconds: 30, zulu: true });
|
||||||
|
console.log(ts.toDate()); // JavaScript Date representing the timestamp
|
||||||
|
```
|
||||||
|
|
||||||
|
### Utility conversions
|
||||||
|
|
||||||
|
```ts
|
||||||
|
console.log(base91ToNumber('!!!!')); // decode base91 values used in some APRS payloads
|
||||||
|
console.log(knotsToKmh(10)); // convert speed
|
||||||
|
```
|
||||||
|
|
||||||
|
## API summary
|
||||||
|
|
||||||
|
- `Frame` — parse frames with `Frame.fromString()` / `Frame.parse()` and decode payloads with `frame.decode()`.
|
||||||
|
- `Address` — helpers to parse and format APRS addresses: `Address.parse()` / `Address.fromString()`.
|
||||||
|
- `Timestamp` — APRS timestamp wrapper with `toDate()` conversion.
|
||||||
|
- Utility functions: `base91ToNumber`, `knotsToKmh`, `kmhToKnots`, `feetToMeters`, `metersToFeet`, `celsiusToFahrenheit`, `fahrenheitToCelsius`.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
Run tests with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
Build the distribution with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
See the project repository for contribution guidelines and tests.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Project: @hamradio/aprs — APRS parsing utilities for TypeScript
|
||||||
|
|||||||
3283
package-lock.json
generated
3283
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@hamradio/aprs",
|
"name": "@hamradio/aprs",
|
||||||
"version": "1.0.0",
|
"type": "module",
|
||||||
|
"version": "1.4.0",
|
||||||
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
|
"description": "APRS (Automatic Packet Reporting System) protocol support for Typescript",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"APRS",
|
"APRS",
|
||||||
@@ -11,12 +12,12 @@
|
|||||||
],
|
],
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://git.maze.io/ham/aprs.js"
|
"url": "https://git.maze.io/ham/aprs.ts"
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "Wijnand Modderman-Lenstra",
|
"author": "Wijnand Modderman-Lenstra",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/index.mjs",
|
"module": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist"
|
||||||
@@ -24,7 +25,7 @@
|
|||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"import": "./dist/index.mjs",
|
"import": "./dist/index.js",
|
||||||
"require": "./dist/index.js"
|
"require": "./dist/index.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -36,17 +37,25 @@
|
|||||||
"test:watch": "vitest --watch",
|
"test:watch": "vitest --watch",
|
||||||
"test:ci": "vitest --run",
|
"test:ci": "vitest --run",
|
||||||
"lint": "eslint .",
|
"lint": "eslint .",
|
||||||
"prepare": "npm run build"
|
"prepare": "npm run build",
|
||||||
|
"push": "npm version patch && git push",
|
||||||
|
"push-minor": "npm version minor && git push",
|
||||||
|
"push-major": "npm version major && git push"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@hamradio/packet": "^1.1.1",
|
||||||
|
"extended-nmea": "^2.1.3"
|
||||||
},
|
},
|
||||||
"dependencies": {},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^10.0.1",
|
"@eslint/js": "^10.0.1",
|
||||||
"@vitest/coverage-v8": "^4.0.18",
|
"@trivago/prettier-plugin-sort-imports": "^6.0.2",
|
||||||
|
"@vitest/coverage-v8": "^4.1.0",
|
||||||
"eslint": "^10.0.3",
|
"eslint": "^10.0.3",
|
||||||
"globals": "^17.4.0",
|
"globals": "^17.4.0",
|
||||||
|
"prettier": "^3.8.1",
|
||||||
"tsup": "^8.5.1",
|
"tsup": "^8.5.1",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"typescript-eslint": "^8.57.0",
|
"typescript-eslint": "^8.57.1",
|
||||||
"vitest": "^4.0.18"
|
"vitest": "^4.1.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
115
scripts/release.js
Executable file
115
scripts/release.js
Executable file
@@ -0,0 +1,115 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
// Minimal safe release script.
|
||||||
|
// Usage: node scripts/release.js [major|minor|patch|<version>]
|
||||||
|
const { execSync } = require("child_process");
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
|
||||||
|
const root = path.resolve(__dirname, "..");
|
||||||
|
const pkgPath = path.join(root, "package.json");
|
||||||
|
|
||||||
|
function run(cmd, opts = {}) {
|
||||||
|
return execSync(cmd, { stdio: "inherit", cwd: root, ...opts });
|
||||||
|
}
|
||||||
|
function runOutput(cmd) {
|
||||||
|
return execSync(cmd, { cwd: root }).toString().trim();
|
||||||
|
}
|
||||||
|
function bumpSemver(current, spec) {
|
||||||
|
if (["major","minor","patch"].includes(spec)) {
|
||||||
|
const [maj, min, patch] = current.split(".").map(n=>parseInt(n,10));
|
||||||
|
if (spec==="major") return `${maj+1}.0.0`;
|
||||||
|
if (spec==="minor") return `${maj}.${min+1}.0`;
|
||||||
|
return `${maj}.${min}.${patch+1}`;
|
||||||
|
}
|
||||||
|
if (!/^\d+\.\d+\.\d+$/.test(spec)) throw new Error("Invalid version spec");
|
||||||
|
return spec;
|
||||||
|
}
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
const arg = process.argv[2] || "patch";
|
||||||
|
const pkgRaw = fs.readFileSync(pkgPath, "utf8");
|
||||||
|
const pkg = JSON.parse(pkgRaw);
|
||||||
|
const oldVersion = pkg.version;
|
||||||
|
const newVersion = bumpSemver(oldVersion, arg);
|
||||||
|
let committed = false;
|
||||||
|
let tagged = false;
|
||||||
|
let pushedTags = false;
|
||||||
|
try {
|
||||||
|
// refuse to run if there are unstaged/uncommitted changes
|
||||||
|
const status = runOutput("git status --porcelain");
|
||||||
|
if (status) throw new Error("Repository has uncommitted changes; please commit or stash before releasing.");
|
||||||
|
|
||||||
|
console.log("Running tests...");
|
||||||
|
run("npm run test:ci");
|
||||||
|
|
||||||
|
console.log("Building...");
|
||||||
|
run("npm run build");
|
||||||
|
|
||||||
|
// write new version
|
||||||
|
pkg.version = newVersion;
|
||||||
|
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n", "utf8");
|
||||||
|
console.log(`Bumped version: ${oldVersion} -> ${newVersion}`);
|
||||||
|
|
||||||
|
// commit
|
||||||
|
run(`git add ${pkgPath}`);
|
||||||
|
run(`git commit -m "chore(release): v${newVersion} - bump from v${oldVersion}"`);
|
||||||
|
committed = true;
|
||||||
|
|
||||||
|
// ensure tag doesn't already exist locally
|
||||||
|
let localTagExists = false;
|
||||||
|
try {
|
||||||
|
runOutput(`git rev-parse --verify refs/tags/v${newVersion}`);
|
||||||
|
localTagExists = true;
|
||||||
|
} catch (_) {
|
||||||
|
localTagExists = false;
|
||||||
|
}
|
||||||
|
if (localTagExists) throw new Error(`Tag v${newVersion} already exists locally — aborting to avoid overwrite.`);
|
||||||
|
|
||||||
|
// ensure tag doesn't exist on remote
|
||||||
|
const remoteTagInfo = (() => {
|
||||||
|
try { return runOutput(`git ls-remote --tags origin v${newVersion}`); } catch (_) { return ""; }
|
||||||
|
})();
|
||||||
|
if (remoteTagInfo) throw new Error(`Tag v${newVersion} already exists on remote — aborting to avoid overwrite.`);
|
||||||
|
|
||||||
|
// tag
|
||||||
|
run(`git tag -a v${newVersion} -m "Release v${newVersion}"`);
|
||||||
|
tagged = true;
|
||||||
|
|
||||||
|
// push commit and tags
|
||||||
|
run("git push");
|
||||||
|
run("git push --tags");
|
||||||
|
pushedTags = true;
|
||||||
|
|
||||||
|
// publish
|
||||||
|
console.log("Publishing to npm...");
|
||||||
|
const publishCmd = pkg.name && pkg.name.startsWith("@") ? "npm publish --access public" : "npm publish";
|
||||||
|
run(publishCmd);
|
||||||
|
|
||||||
|
console.log(`Release v${newVersion} succeeded.`);
|
||||||
|
process.exit(0);
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Release failed:", err.message || err);
|
||||||
|
try {
|
||||||
|
// delete local tag
|
||||||
|
if (tagged) {
|
||||||
|
try { run(`git tag -d v${newVersion}`); } catch {}
|
||||||
|
if (pushedTags) {
|
||||||
|
try { run(`git push origin :refs/tags/v${newVersion}`); } catch {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// undo commit if made
|
||||||
|
if (committed) {
|
||||||
|
try { run("git reset --hard HEAD~1"); } catch {
|
||||||
|
// fallback: restore package.json content
|
||||||
|
fs.writeFileSync(pkgPath, pkgRaw, "utf8");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// restore package.json
|
||||||
|
fs.writeFileSync(pkgPath, pkgRaw, "utf8");
|
||||||
|
}
|
||||||
|
} catch (rbErr) {
|
||||||
|
console.error("Rollback error:", rbErr.message || rbErr);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
})();
|
||||||
1088
src/deviceid.ts
Normal file
1088
src/deviceid.ts
Normal file
File diff suppressed because it is too large
Load Diff
1040
src/frame.ts
1040
src/frame.ts
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,11 @@
|
|||||||
import { PacketSegment, PacketStructure } from "./parser.types";
|
import { Dissected, Field, Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
// Any comment that contains this marker will set the doNotArchive flag on the
|
||||||
|
// decoded payload, which can be used by applications to skip archiving or
|
||||||
|
// logging frames that are meant to be transient or test data. This allows users
|
||||||
|
// to include the marker in their APRS comments when they want to indicate that
|
||||||
|
// a particular frame should not be stored long-term.
|
||||||
|
export const DO_NOT_ARCHIVE_MARKER = "!x!";
|
||||||
|
|
||||||
export interface IAddress {
|
export interface IAddress {
|
||||||
call: string;
|
call: string;
|
||||||
@@ -14,53 +21,72 @@ export interface IFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// APRS Data Type Identifiers (first character of payload)
|
// APRS Data Type Identifiers (first character of payload)
|
||||||
export const DataTypeIdentifier = {
|
export enum DataType {
|
||||||
// Position Reports
|
// Position Reports
|
||||||
PositionNoTimestampNoMessaging: '!',
|
PositionNoTimestampNoMessaging = "!",
|
||||||
PositionNoTimestampWithMessaging: '=',
|
PositionNoTimestampWithMessaging = "=",
|
||||||
PositionWithTimestampNoMessaging: '/',
|
PositionWithTimestampNoMessaging = "/",
|
||||||
PositionWithTimestampWithMessaging: '@',
|
PositionWithTimestampWithMessaging = "@",
|
||||||
|
|
||||||
// Mic-E
|
// Mic-E
|
||||||
MicECurrent: '`',
|
MicE = "`",
|
||||||
MicEOld: "'",
|
MicEOld = "'",
|
||||||
|
|
||||||
// Messages and Bulletins
|
// Messages and Bulletins
|
||||||
Message: ':',
|
Message = ":",
|
||||||
|
|
||||||
// Objects and Items
|
// Objects and Items
|
||||||
Object: ';',
|
Object = ";",
|
||||||
Item: ')',
|
Item = ")",
|
||||||
|
|
||||||
// Status
|
// Status
|
||||||
Status: '>',
|
Status = ">",
|
||||||
|
|
||||||
// Query
|
// Query
|
||||||
Query: '?',
|
Query = "?",
|
||||||
|
|
||||||
// Telemetry
|
// Telemetry
|
||||||
TelemetryData: 'T',
|
TelemetryData = "T",
|
||||||
|
|
||||||
// Weather
|
// Weather
|
||||||
WeatherReportNoPosition: '_',
|
WeatherReportNoPosition = "_",
|
||||||
|
|
||||||
// Raw GPS Data
|
// Raw GPS Data
|
||||||
RawGPS: '$',
|
RawGPS = "$",
|
||||||
|
|
||||||
// Station Capabilities
|
// Station Capabilities
|
||||||
StationCapabilities: '<',
|
StationCapabilities = "<",
|
||||||
|
|
||||||
// User-Defined
|
// User-Defined
|
||||||
UserDefined: '{',
|
UserDefined = "{",
|
||||||
|
|
||||||
// Third-Party Traffic
|
// Third-Party Traffic
|
||||||
ThirdParty: '}',
|
ThirdParty = "}",
|
||||||
|
|
||||||
// Invalid/Test Data
|
// Invalid/Test Data
|
||||||
InvalidOrTest: ',',
|
InvalidOrTest = ","
|
||||||
} as const;
|
}
|
||||||
|
|
||||||
export type DataTypeIdentifier = typeof DataTypeIdentifier[keyof typeof DataTypeIdentifier];
|
export const DataTypeNames: { [key in DataType]: string } = {
|
||||||
|
[DataType.PositionNoTimestampNoMessaging]: "position",
|
||||||
|
[DataType.PositionNoTimestampWithMessaging]: "position with messaging",
|
||||||
|
[DataType.PositionWithTimestampNoMessaging]: "position with timestamp",
|
||||||
|
[DataType.PositionWithTimestampWithMessaging]: "position with timestamp and messaging",
|
||||||
|
[DataType.MicE]: "Mic-E",
|
||||||
|
[DataType.MicEOld]: "Mic-E (old)",
|
||||||
|
[DataType.Message]: "message/bulletin",
|
||||||
|
[DataType.Object]: "object",
|
||||||
|
[DataType.Item]: "item",
|
||||||
|
[DataType.Status]: "status",
|
||||||
|
[DataType.Query]: "query",
|
||||||
|
[DataType.TelemetryData]: "telemetry data",
|
||||||
|
[DataType.WeatherReportNoPosition]: "weather report",
|
||||||
|
[DataType.RawGPS]: "raw GPS data",
|
||||||
|
[DataType.StationCapabilities]: "station capabilities",
|
||||||
|
[DataType.UserDefined]: "user defined",
|
||||||
|
[DataType.ThirdParty]: "third-party traffic",
|
||||||
|
[DataType.InvalidOrTest]: "invalid/test"
|
||||||
|
};
|
||||||
|
|
||||||
export interface ISymbol {
|
export interface ISymbol {
|
||||||
table: string; // Symbol table identifier
|
table: string; // Symbol table identifier
|
||||||
@@ -75,8 +101,11 @@ export interface IPosition {
|
|||||||
longitude: number; // Decimal degrees
|
longitude: number; // Decimal degrees
|
||||||
ambiguity?: number; // Position ambiguity (0-4)
|
ambiguity?: number; // Position ambiguity (0-4)
|
||||||
altitude?: number; // Meters
|
altitude?: number; // Meters
|
||||||
speed?: number; // Speed in knots/kmh depending on source
|
speed?: number; // Speed in km/h
|
||||||
course?: number; // Course in degrees
|
course?: number; // Course in degrees
|
||||||
|
range?: number; // Kilometers
|
||||||
|
phg?: IPowerHeightGain;
|
||||||
|
dfs?: IDirectionFinding;
|
||||||
symbol?: ISymbol;
|
symbol?: ISymbol;
|
||||||
comment?: string;
|
comment?: string;
|
||||||
|
|
||||||
@@ -85,20 +114,47 @@ export interface IPosition {
|
|||||||
distanceTo?(other: IPosition): number; // Optional method to calculate distance to another position
|
distanceTo?(other: IPosition): number; // Optional method to calculate distance to another position
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface IPowerHeightGain {
|
||||||
|
power?: number; // Transmit power in watts
|
||||||
|
height?: number; // Antenna height in meters
|
||||||
|
gain?: number; // Antenna gain in dBi
|
||||||
|
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDirectionFinding {
|
||||||
|
bearing?: number; // Direction finding bearing in degrees
|
||||||
|
strength?: number; // Relative signal strength (0-9)
|
||||||
|
height?: number; // Antenna height in meters
|
||||||
|
gain?: number; // Antenna gain in dBi
|
||||||
|
quality?: number; // Signal quality or other metric (0-9)
|
||||||
|
directivity?: number | "omni" | "unknown"; // Optional directivity pattern (numeric code or "omni")
|
||||||
|
}
|
||||||
|
|
||||||
export interface ITimestamp {
|
export interface ITimestamp {
|
||||||
day?: number; // Day of month (DHM format)
|
day?: number; // Day of month (DHM format)
|
||||||
month?: number; // Month (MDHM format)
|
month?: number; // Month (MDHM format)
|
||||||
hours: number;
|
hours: number;
|
||||||
minutes: number;
|
minutes: number;
|
||||||
seconds?: number;
|
seconds?: number;
|
||||||
format: 'DHM' | 'HMS' | 'MDHM'; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
|
format: "DHM" | "HMS" | "MDHM"; // Day-Hour-Minute, Hour-Minute-Second, Month-Day-Hour-Minute
|
||||||
zulu?: boolean; // Is UTC/Zulu time
|
zulu?: boolean; // Is UTC/Zulu time
|
||||||
toDate(): Date; // Convert to Date object respecting timezone
|
toDate(): Date; // Convert to Date object respecting timezone
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ITelemetry {
|
||||||
|
sequence: number;
|
||||||
|
analog: number[];
|
||||||
|
digital?: number;
|
||||||
|
}
|
||||||
|
|
||||||
// Position Report Payload
|
// Position Report Payload
|
||||||
export interface PositionPayload {
|
export interface PositionPayload {
|
||||||
type: 'position';
|
type:
|
||||||
|
| DataType.PositionNoTimestampNoMessaging
|
||||||
|
| DataType.PositionNoTimestampWithMessaging
|
||||||
|
| DataType.PositionWithTimestampNoMessaging
|
||||||
|
| DataType.PositionWithTimestampWithMessaging;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
timestamp?: ITimestamp;
|
timestamp?: ITimestamp;
|
||||||
position: IPosition;
|
position: IPosition;
|
||||||
messaging: boolean; // Whether APRS messaging is enabled
|
messaging: boolean; // Whether APRS messaging is enabled
|
||||||
@@ -106,7 +162,7 @@ export interface PositionPayload {
|
|||||||
messageType?: string;
|
messageType?: string;
|
||||||
isStandard?: boolean;
|
isStandard?: boolean;
|
||||||
};
|
};
|
||||||
sections?: PacketSegment[];
|
sections?: Segment[];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compressed Position Format
|
// Compressed Position Format
|
||||||
@@ -122,24 +178,27 @@ export interface CompressedPosition {
|
|||||||
range?: number; // Miles
|
range?: number; // Miles
|
||||||
altitude?: number; // Feet
|
altitude?: number; // Feet
|
||||||
radioRange?: number; // Miles
|
radioRange?: number; // Miles
|
||||||
compression: 'old' | 'current';
|
compression: "old" | "current";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mic-E Payload (compressed in destination address)
|
// Mic-E Payload (compressed in destination address)
|
||||||
export interface MicEPayload {
|
export interface MicEPayload {
|
||||||
type: 'mic-e';
|
type: DataType.MicE | DataType.MicEOld;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
position: IPosition;
|
position: IPosition;
|
||||||
course?: number;
|
|
||||||
speed?: number;
|
|
||||||
altitude?: number;
|
|
||||||
messageType?: string; // Standard Mic-E message
|
messageType?: string; // Standard Mic-E message
|
||||||
|
isStandard?: boolean; // Whether messageType is a standard Mic-E message
|
||||||
telemetry?: number[]; // Optional telemetry channels
|
telemetry?: number[]; // Optional telemetry channels
|
||||||
status?: string;
|
status?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type MessageVariant = "message" | "bulletin";
|
||||||
|
|
||||||
// Message Payload
|
// Message Payload
|
||||||
export interface MessagePayload {
|
export interface MessagePayload {
|
||||||
type: 'message';
|
type: DataType.Message;
|
||||||
|
variant: "message";
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
addressee: string; // 9 character padded callsign
|
addressee: string; // 9 character padded callsign
|
||||||
text: string; // Message text
|
text: string; // Message text
|
||||||
messageNumber?: string; // Message ID for acknowledgment
|
messageNumber?: string; // Message ID for acknowledgment
|
||||||
@@ -149,7 +208,9 @@ export interface MessagePayload {
|
|||||||
|
|
||||||
// Bulletin/Announcement (variant of message)
|
// Bulletin/Announcement (variant of message)
|
||||||
export interface BulletinPayload {
|
export interface BulletinPayload {
|
||||||
type: 'bulletin';
|
type: DataType.Message;
|
||||||
|
variant: "bulletin";
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
bulletinId: string; // Bulletin identifier (BLN#)
|
bulletinId: string; // Bulletin identifier (BLN#)
|
||||||
text: string;
|
text: string;
|
||||||
group?: string; // Optional group bulletin
|
group?: string; // Optional group bulletin
|
||||||
@@ -157,7 +218,8 @@ export interface BulletinPayload {
|
|||||||
|
|
||||||
// Object Payload
|
// Object Payload
|
||||||
export interface ObjectPayload {
|
export interface ObjectPayload {
|
||||||
type: 'object';
|
type: DataType.Object;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
name: string; // 9 character object name
|
name: string; // 9 character object name
|
||||||
timestamp: ITimestamp;
|
timestamp: ITimestamp;
|
||||||
alive: boolean; // True if object is active, false if killed
|
alive: boolean; // True if object is active, false if killed
|
||||||
@@ -168,7 +230,8 @@ export interface ObjectPayload {
|
|||||||
|
|
||||||
// Item Payload
|
// Item Payload
|
||||||
export interface ItemPayload {
|
export interface ItemPayload {
|
||||||
type: 'item';
|
type: DataType.Item;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
name: string; // 3-9 character item name
|
name: string; // 3-9 character item name
|
||||||
alive: boolean; // True if item is active, false if killed
|
alive: boolean; // True if item is active, false if killed
|
||||||
position: IPosition;
|
position: IPosition;
|
||||||
@@ -176,7 +239,8 @@ export interface ItemPayload {
|
|||||||
|
|
||||||
// Status Payload
|
// Status Payload
|
||||||
export interface StatusPayload {
|
export interface StatusPayload {
|
||||||
type: 'status';
|
type: DataType.Status;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
timestamp?: ITimestamp;
|
timestamp?: ITimestamp;
|
||||||
text: string;
|
text: string;
|
||||||
maidenhead?: string; // Optional Maidenhead grid locator
|
maidenhead?: string; // Optional Maidenhead grid locator
|
||||||
@@ -188,14 +252,17 @@ export interface StatusPayload {
|
|||||||
|
|
||||||
// Query Payload
|
// Query Payload
|
||||||
export interface QueryPayload {
|
export interface QueryPayload {
|
||||||
type: 'query';
|
type: DataType.Query;
|
||||||
queryType: string; // e.g., 'APRSD', 'APRST', 'PING'
|
queryType: string; // e.g., 'APRSD', 'APRST', 'PING'
|
||||||
target?: string; // Target callsign or area
|
target?: string; // Target callsign or area
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type TelemetryVariant = "data" | "parameters" | "unit" | "coefficients" | "bitsense";
|
||||||
|
|
||||||
// Telemetry Data Payload
|
// Telemetry Data Payload
|
||||||
export interface TelemetryDataPayload {
|
export interface TelemetryDataPayload {
|
||||||
type: 'telemetry-data';
|
type: DataType.TelemetryData;
|
||||||
|
variant: "data";
|
||||||
sequence: number;
|
sequence: number;
|
||||||
analog: number[]; // Up to 5 analog channels
|
analog: number[]; // Up to 5 analog channels
|
||||||
digital: number; // 8-bit digital value
|
digital: number; // 8-bit digital value
|
||||||
@@ -203,19 +270,22 @@ export interface TelemetryDataPayload {
|
|||||||
|
|
||||||
// Telemetry Parameter Names
|
// Telemetry Parameter Names
|
||||||
export interface TelemetryParameterPayload {
|
export interface TelemetryParameterPayload {
|
||||||
type: 'telemetry-parameters';
|
type: DataType.TelemetryData;
|
||||||
|
variant: "parameters";
|
||||||
names: string[]; // Parameter names
|
names: string[]; // Parameter names
|
||||||
}
|
}
|
||||||
|
|
||||||
// Telemetry Unit/Label
|
// Telemetry Unit/Label
|
||||||
export interface TelemetryUnitPayload {
|
export interface TelemetryUnitPayload {
|
||||||
type: 'telemetry-units';
|
type: DataType.TelemetryData;
|
||||||
|
variant: "unit";
|
||||||
units: string[]; // Units for each parameter
|
units: string[]; // Units for each parameter
|
||||||
}
|
}
|
||||||
|
|
||||||
// Telemetry Coefficients
|
// Telemetry Coefficients
|
||||||
export interface TelemetryCoefficientsPayload {
|
export interface TelemetryCoefficientsPayload {
|
||||||
type: 'telemetry-coefficients';
|
type: DataType.TelemetryData;
|
||||||
|
variant: "coefficients";
|
||||||
coefficients: {
|
coefficients: {
|
||||||
a: number[]; // a coefficients
|
a: number[]; // a coefficients
|
||||||
b: number[]; // b coefficients
|
b: number[]; // b coefficients
|
||||||
@@ -225,14 +295,15 @@ export interface TelemetryCoefficientsPayload {
|
|||||||
|
|
||||||
// Telemetry Bit Sense/Project Name
|
// Telemetry Bit Sense/Project Name
|
||||||
export interface TelemetryBitSensePayload {
|
export interface TelemetryBitSensePayload {
|
||||||
type: 'telemetry-bitsense';
|
type: DataType.TelemetryData;
|
||||||
|
variant: "bitsense";
|
||||||
sense: number; // 8-bit sense value
|
sense: number; // 8-bit sense value
|
||||||
projectName?: string;
|
projectName?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Weather Report Payload
|
// Weather Report Payload
|
||||||
export interface WeatherPayload {
|
export interface WeatherPayload {
|
||||||
type: 'weather';
|
type: DataType.WeatherReportNoPosition;
|
||||||
timestamp?: ITimestamp;
|
timestamp?: ITimestamp;
|
||||||
position?: IPosition;
|
position?: IPosition;
|
||||||
windDirection?: number; // Degrees
|
windDirection?: number; // Degrees
|
||||||
@@ -249,37 +320,38 @@ export interface WeatherPayload {
|
|||||||
rawRain?: number; // Raw rain counter
|
rawRain?: number; // Raw rain counter
|
||||||
software?: string; // Weather software type
|
software?: string; // Weather software type
|
||||||
weatherUnit?: string; // Weather station type
|
weatherUnit?: string; // Weather station type
|
||||||
|
comment?: string; // Additional comment
|
||||||
}
|
}
|
||||||
|
|
||||||
// Raw GPS Payload (NMEA sentences)
|
// Raw GPS Payload (NMEA sentences)
|
||||||
export interface RawGPSPayload {
|
export interface RawGPSPayload {
|
||||||
type: 'raw-gps';
|
type: DataType.RawGPS;
|
||||||
sentence: string; // Raw NMEA sentence
|
sentence: string; // Raw NMEA sentence
|
||||||
|
position?: IPosition; // Optional parsed position if available
|
||||||
}
|
}
|
||||||
|
|
||||||
// Station Capabilities Payload
|
// Station Capabilities Payload
|
||||||
export interface StationCapabilitiesPayload {
|
export interface StationCapabilitiesPayload {
|
||||||
type: 'capabilities';
|
type: DataType.StationCapabilities;
|
||||||
capabilities: string[];
|
capabilities: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
// User-Defined Payload
|
// User-Defined Payload
|
||||||
export interface UserDefinedPayload {
|
export interface UserDefinedPayload {
|
||||||
type: 'user-defined';
|
type: DataType.UserDefined;
|
||||||
userPacketType: string;
|
userPacketType: string;
|
||||||
data: string;
|
data: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Third-Party Traffic Payload
|
// Third-Party Traffic Payload
|
||||||
export interface ThirdPartyPayload {
|
export interface ThirdPartyPayload {
|
||||||
type: 'third-party';
|
type: DataType.ThirdParty;
|
||||||
header: string; // Source path of third-party packet
|
frame?: IFrame; // Optional nested frame if payload contains another APRS frame
|
||||||
payload: string; // Nested APRS packet
|
comment?: string; // Optional comment
|
||||||
}
|
}
|
||||||
|
|
||||||
// DF Report Payload
|
// DF Report Payload
|
||||||
export interface DFReportPayload {
|
export interface DFReportPayload {
|
||||||
type: 'df-report';
|
|
||||||
timestamp?: ITimestamp;
|
timestamp?: ITimestamp;
|
||||||
position: IPosition;
|
position: IPosition;
|
||||||
course?: number;
|
course?: number;
|
||||||
@@ -292,11 +364,13 @@ export interface DFReportPayload {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface BasePayload {
|
export interface BasePayload {
|
||||||
type: string;
|
type: DataType;
|
||||||
|
doNotArchive?: boolean; // Optional flag to indicate frame should not be archived
|
||||||
}
|
}
|
||||||
|
|
||||||
// Union type for all decoded payload types
|
// Union type for all decoded payload types
|
||||||
export type Payload = BasePayload & (
|
export type Payload = BasePayload &
|
||||||
|
(
|
||||||
| PositionPayload
|
| PositionPayload
|
||||||
| MicEPayload
|
| MicEPayload
|
||||||
| MessagePayload
|
| MessagePayload
|
||||||
@@ -315,11 +389,26 @@ export type Payload = BasePayload & (
|
|||||||
| StationCapabilitiesPayload
|
| StationCapabilitiesPayload
|
||||||
| UserDefinedPayload
|
| UserDefinedPayload
|
||||||
| ThirdPartyPayload
|
| ThirdPartyPayload
|
||||||
| DFReportPayload
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Extended Frame with decoded payload
|
// Extended Frame with decoded payload
|
||||||
export interface DecodedFrame extends IFrame {
|
export interface DecodedFrame extends IFrame {
|
||||||
decoded?: Payload;
|
decoded?: Payload;
|
||||||
structure?: PacketStructure; // Routing and other frame-level sections
|
structure?: Dissected; // Routing and other frame-level sections
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extras is an internal helper type used during decoding to accumulate additional
|
||||||
|
// information that may not fit directly into the standard payload structure,
|
||||||
|
// such as comments, calculated fields, or other metadata that can be useful for
|
||||||
|
// applications consuming the decoded frames.
|
||||||
|
export interface Extras {
|
||||||
|
comment: string;
|
||||||
|
altitude?: number;
|
||||||
|
range?: number;
|
||||||
|
phg?: IPowerHeightGain;
|
||||||
|
dfs?: IDirectionFinding;
|
||||||
|
cse?: number;
|
||||||
|
spd?: number;
|
||||||
|
fields?: Field[];
|
||||||
|
telemetry?: ITelemetry;
|
||||||
}
|
}
|
||||||
|
|||||||
34
src/index.ts
34
src/index.ts
@@ -1,19 +1,14 @@
|
|||||||
export {
|
export { Frame, Address } from "./frame";
|
||||||
Frame,
|
|
||||||
Address,
|
export { type IAddress, type IFrame, DataType as DataTypeIdentifier } from "./frame.types";
|
||||||
Timestamp,
|
|
||||||
} from "./frame";
|
|
||||||
|
|
||||||
export {
|
|
||||||
type IAddress,
|
|
||||||
type IFrame,
|
|
||||||
DataTypeIdentifier,
|
|
||||||
} from "./frame.types";
|
|
||||||
|
|
||||||
export {
|
export {
|
||||||
|
DataType,
|
||||||
type ISymbol,
|
type ISymbol,
|
||||||
type IPosition,
|
type IPosition,
|
||||||
type ITimestamp,
|
type ITimestamp,
|
||||||
|
type IPowerHeightGain,
|
||||||
|
type IDirectionFinding,
|
||||||
type PositionPayload,
|
type PositionPayload,
|
||||||
type CompressedPosition,
|
type CompressedPosition,
|
||||||
type MicEPayload,
|
type MicEPayload,
|
||||||
@@ -36,9 +31,12 @@ export {
|
|||||||
type DFReportPayload,
|
type DFReportPayload,
|
||||||
type BasePayload,
|
type BasePayload,
|
||||||
type Payload,
|
type Payload,
|
||||||
type DecodedFrame,
|
type DecodedFrame
|
||||||
} from "./frame.types";
|
} from "./frame.types";
|
||||||
|
|
||||||
|
export { Position } from "./position";
|
||||||
|
export { Timestamp } from "./timestamp";
|
||||||
|
|
||||||
export {
|
export {
|
||||||
base91ToNumber,
|
base91ToNumber,
|
||||||
knotsToKmh,
|
knotsToKmh,
|
||||||
@@ -46,12 +44,8 @@ export {
|
|||||||
feetToMeters,
|
feetToMeters,
|
||||||
metersToFeet,
|
metersToFeet,
|
||||||
celsiusToFahrenheit,
|
celsiusToFahrenheit,
|
||||||
fahrenheitToCelsius,
|
fahrenheitToCelsius
|
||||||
} from "./parser";
|
} from "./parser";
|
||||||
export {
|
|
||||||
type PacketStructure,
|
export { getDeviceID } from "./deviceid";
|
||||||
type PacketSegment,
|
export type { DeviceID } from "./deviceid";
|
||||||
type PacketField,
|
|
||||||
type PacketFieldBit,
|
|
||||||
FieldType,
|
|
||||||
} from "./parser.types";
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ export const base91ToNumber = (str: string): number => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
}
|
};
|
||||||
|
|
||||||
/* Conversions from Freedom Units to whatever the rest of the world uses and understands. */
|
/* Conversions from Freedom Units to whatever the rest of the world uses and understands. */
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ const FAHRENHEIT_TO_CELSIUS_OFFSET = 32;
|
|||||||
*/
|
*/
|
||||||
export const knotsToKmh = (knots: number): number => {
|
export const knotsToKmh = (knots: number): number => {
|
||||||
return knots * KNOTS_TO_KMH;
|
return knots * KNOTS_TO_KMH;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert speed from kilometers per hour to knots.
|
* Convert speed from kilometers per hour to knots.
|
||||||
@@ -48,7 +48,7 @@ export const knotsToKmh = (knots: number): number => {
|
|||||||
*/
|
*/
|
||||||
export const kmhToKnots = (kmh: number): number => {
|
export const kmhToKnots = (kmh: number): number => {
|
||||||
return kmh / KNOTS_TO_KMH;
|
return kmh / KNOTS_TO_KMH;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert altitude from feet to meters.
|
* Convert altitude from feet to meters.
|
||||||
@@ -58,7 +58,16 @@ export const kmhToKnots = (kmh: number): number => {
|
|||||||
*/
|
*/
|
||||||
export const feetToMeters = (feet: number): number => {
|
export const feetToMeters = (feet: number): number => {
|
||||||
return feet * FEET_TO_METERS;
|
return feet * FEET_TO_METERS;
|
||||||
}
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert miles to meters.
|
||||||
|
* @param miles number of miles
|
||||||
|
* @returns meters
|
||||||
|
*/
|
||||||
|
export const milesToMeters = (miles: number): number => {
|
||||||
|
return miles * 1609.344;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert altitude from meters to feet.
|
* Convert altitude from meters to feet.
|
||||||
@@ -68,7 +77,7 @@ export const feetToMeters = (feet: number): number => {
|
|||||||
*/
|
*/
|
||||||
export const metersToFeet = (meters: number): number => {
|
export const metersToFeet = (meters: number): number => {
|
||||||
return meters / FEET_TO_METERS;
|
return meters / FEET_TO_METERS;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert temperature from Celsius to Fahrenheit.
|
* Convert temperature from Celsius to Fahrenheit.
|
||||||
@@ -77,8 +86,8 @@ export const metersToFeet = (meters: number): number => {
|
|||||||
* @returns equivalent temperature in Fahrenheit
|
* @returns equivalent temperature in Fahrenheit
|
||||||
*/
|
*/
|
||||||
export const celsiusToFahrenheit = (celsius: number): number => {
|
export const celsiusToFahrenheit = (celsius: number): number => {
|
||||||
return (celsius * 9/5) + FAHRENHEIT_TO_CELSIUS_OFFSET;
|
return (celsius * 9) / 5 + FAHRENHEIT_TO_CELSIUS_OFFSET;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert temperature from Fahrenheit to Celsius.
|
* Convert temperature from Fahrenheit to Celsius.
|
||||||
@@ -87,5 +96,5 @@ export const celsiusToFahrenheit = (celsius: number): number => {
|
|||||||
* @returns equivalent temperature in Celsius
|
* @returns equivalent temperature in Celsius
|
||||||
*/
|
*/
|
||||||
export const fahrenheitToCelsius = (fahrenheit: number): number => {
|
export const fahrenheitToCelsius = (fahrenheit: number): number => {
|
||||||
return (fahrenheit - FAHRENHEIT_TO_CELSIUS_OFFSET) * 5/9;
|
return ((fahrenheit - FAHRENHEIT_TO_CELSIUS_OFFSET) * 5) / 9;
|
||||||
}
|
};
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
export enum FieldType {
|
|
||||||
BITS = 0,
|
|
||||||
UINT8 = 1,
|
|
||||||
UINT16_LE = 2,
|
|
||||||
UINT16_BE = 3,
|
|
||||||
UINT32_LE = 4,
|
|
||||||
UINT32_BE = 5,
|
|
||||||
BYTES = 6, // 8-bits per value
|
|
||||||
WORDS = 7, // 16-bits per value
|
|
||||||
DWORDS = 8, // 32-bits per value
|
|
||||||
QWORDS = 9, // 64-bits per value
|
|
||||||
STRING = 10,
|
|
||||||
C_STRING = 11, // Null-terminated string
|
|
||||||
CHAR = 12, // Single ASCII character
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interface for the parsed packet segments, used for debugging and testing.
|
|
||||||
export type PacketStructure = PacketSegment[];
|
|
||||||
|
|
||||||
export interface PacketSegment {
|
|
||||||
name: string;
|
|
||||||
data: Uint8Array;
|
|
||||||
fields: PacketField[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PacketField {
|
|
||||||
type: FieldType;
|
|
||||||
size: number; // Size in bytes
|
|
||||||
name?: string;
|
|
||||||
bits?: PacketFieldBit[]; // Only for bit fields in FieldType.BITS
|
|
||||||
value?: any; // Optional decoded value
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PacketFieldBit {
|
|
||||||
name: string;
|
|
||||||
size: number; // Size in bits
|
|
||||||
}
|
|
||||||
71
src/payload.capabilities.ts
Normal file
71
src/payload.capabilities.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DataType, type Payload, type StationCapabilitiesPayload } from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeCapabilitiesPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
// Extract the text after the '<' identifier
|
||||||
|
let rest = raw.substring(1).trim();
|
||||||
|
|
||||||
|
// Some implementations include a closing '>' or other trailing chars; strip common wrappers
|
||||||
|
if (rest.endsWith(">")) rest = rest.slice(0, -1).trim();
|
||||||
|
|
||||||
|
// Split capabilities by commas, semicolons or whitespace
|
||||||
|
const tokens = rest
|
||||||
|
.split(/[,;\s]+/)
|
||||||
|
.map((t) => t.trim())
|
||||||
|
.filter(Boolean);
|
||||||
|
|
||||||
|
const payload: StationCapabilitiesPayload = {
|
||||||
|
type: DataType.StationCapabilities,
|
||||||
|
capabilities: tokens
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
const segments: Segment[] = [];
|
||||||
|
segments.push({
|
||||||
|
name: "capabilities",
|
||||||
|
data: new TextEncoder().encode(rest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "capabilities",
|
||||||
|
length: rest.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const cap of tokens) {
|
||||||
|
segments.push({
|
||||||
|
name: "capability",
|
||||||
|
data: new TextEncoder().encode(cap).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "capability",
|
||||||
|
length: cap.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeCapabilitiesPayload;
|
||||||
504
src/payload.extras.ts
Normal file
504
src/payload.extras.ts
Normal file
@@ -0,0 +1,504 @@
|
|||||||
|
import { type Field, FieldType } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import type { Extras, ITelemetry, Payload } from "./frame.types";
|
||||||
|
import { base91ToNumber, feetToMeters, knotsToKmh, milesToMeters } from "./parser";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes structured extras from an APRS comment string, extracting known tokens
|
||||||
|
* for altitude, range, PHG, DFS, course/speed, and embedded telemetry, and
|
||||||
|
* returns an object with the extracted values and a cleaned comment string with
|
||||||
|
* the tokens removed.
|
||||||
|
*
|
||||||
|
* If withStructure is true, also returns an array of fields representing the
|
||||||
|
* structure of the extras for use in structured packet parsing.
|
||||||
|
*
|
||||||
|
* @param comment The APRS comment string to decode.
|
||||||
|
* @param withStructure Whether to include structured fields in the result.
|
||||||
|
* @returns An object containing the decoded extras and the cleaned comment string.
|
||||||
|
*/
|
||||||
|
export const decodeCommentExtras = (comment: string, withStructure: boolean = false): Extras => {
|
||||||
|
if (!comment || comment.length === 0) return { comment };
|
||||||
|
|
||||||
|
const extras: Partial<Extras> = {};
|
||||||
|
const fields: Field[] = [];
|
||||||
|
const beforeFields: Field[] = [];
|
||||||
|
let altitudeOffset: number | undefined = undefined;
|
||||||
|
let altitudeFields: Field[] = [];
|
||||||
|
let commentOffset: number = 0;
|
||||||
|
let commentBefore: string | undefined = undefined;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-useless-assignment
|
||||||
|
let match: RegExpMatchArray | null = null;
|
||||||
|
|
||||||
|
// Process successive 7-byte data extensions at the start of the comment.
|
||||||
|
comment = comment.trimStart();
|
||||||
|
let ext = comment;
|
||||||
|
while (ext.length >= 7) {
|
||||||
|
// We first process the altitude marker, because it may appear anywhere
|
||||||
|
// in the comment and we want to extract it and its value before
|
||||||
|
// processing other tokens that may be present.
|
||||||
|
//
|
||||||
|
// /A=NNNNNN -> altitude in feet (6 digits)
|
||||||
|
// /A=-NNNNN -> altitude in feet with leading minus for negative altitudes (5 digits)
|
||||||
|
const altMatch = ext.match(/\/A=(-\d{5}|\d{6})/);
|
||||||
|
if (altitudeOffset === undefined && altMatch) {
|
||||||
|
const altitude = feetToMeters(parseInt(altMatch[1], 10)); // feet to meters
|
||||||
|
if (isNaN(altitude)) {
|
||||||
|
break; // Invalid altitude format, stop parsing extras
|
||||||
|
}
|
||||||
|
extras.altitude = altitude;
|
||||||
|
|
||||||
|
// Keep track of where the altitude token appears in the comment for structure purposes.
|
||||||
|
altitudeOffset = comment.indexOf(altMatch[0]);
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
altitudeFields = [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "altitude marker",
|
||||||
|
data: new TextEncoder().encode("/A=").buffer,
|
||||||
|
value: "/A=",
|
||||||
|
length: 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "altitude",
|
||||||
|
data: new TextEncoder().encode(altMatch[1]).buffer,
|
||||||
|
value: altitude.toFixed(1) + "m",
|
||||||
|
length: 6
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (altitudeOffset > 0) {
|
||||||
|
// Reset the comment with the altitude marker removed.
|
||||||
|
commentBefore = comment.substring(0, altitudeOffset);
|
||||||
|
comment = comment.substring(altitudeOffset + altMatch[0].length);
|
||||||
|
ext = commentBefore; // Continue processing extensions in the part of the comment before the altitude marker
|
||||||
|
commentOffset = 0; // Reset
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove altitude token from ext and advance ext for further parsing
|
||||||
|
commentOffset += altMatch[0].length;
|
||||||
|
ext = ext.replace(altMatch[0], "").trimStart();
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// RNGrrrr -> pre-calculated range in miles (4 digits)
|
||||||
|
if ((match = ext.match(/^RNG(\d{4})/))) {
|
||||||
|
const r = match[1];
|
||||||
|
extras.range = milesToMeters(parseInt(r, 10)) / 1000.0; // Convert to kilometers
|
||||||
|
if (withStructure) {
|
||||||
|
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "range marker",
|
||||||
|
value: "RNG",
|
||||||
|
length: 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "range (rrrr)",
|
||||||
|
length: 4,
|
||||||
|
value: extras.range.toFixed(1) + "km"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove range token from ext and advance ext for further parsing
|
||||||
|
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
commentBefore = commentBefore.substring(7);
|
||||||
|
ext = commentBefore;
|
||||||
|
} else {
|
||||||
|
commentOffset += 7;
|
||||||
|
ext = ext.substring(7);
|
||||||
|
}
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// PHGphgd
|
||||||
|
//if (!extras.phg && ext.startsWith("PHG")) {
|
||||||
|
if (!extras.phg && (match = ext.match(/^PHG([0-9 ])([0-9 ])([0-9 ])([0-9 ])/))) {
|
||||||
|
// PHGphgd: p = power (0-9 or space), h = height (0-9 or space), g = gain (0-9 or space), d = directivity (0-9 or space)
|
||||||
|
const p = match[1];
|
||||||
|
const h = match[2];
|
||||||
|
const g = match[3];
|
||||||
|
const d = match[4];
|
||||||
|
const pNum = parseInt(p, 10);
|
||||||
|
const powerWatts = Number.isNaN(pNum) ? undefined : pNum * pNum;
|
||||||
|
const hIndex = h.charCodeAt(0) - 48;
|
||||||
|
const heightFeet = 10 * Math.pow(2, hIndex);
|
||||||
|
const heightMeters = feetToMeters(heightFeet);
|
||||||
|
const gNum = parseInt(g, 10);
|
||||||
|
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
|
||||||
|
const dNum = parseInt(d, 10);
|
||||||
|
let directivity: number | "omni" | "unknown" | undefined;
|
||||||
|
if (Number.isNaN(dNum)) {
|
||||||
|
directivity = undefined;
|
||||||
|
} else if (dNum === 0) {
|
||||||
|
directivity = "omni";
|
||||||
|
} else if (dNum >= 1 && dNum <= 8) {
|
||||||
|
directivity = dNum * 45;
|
||||||
|
} else if (dNum === 9) {
|
||||||
|
directivity = "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
extras.phg = {
|
||||||
|
power: powerWatts,
|
||||||
|
height: heightMeters,
|
||||||
|
gain: gainDbi,
|
||||||
|
directivity
|
||||||
|
};
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||||
|
{ type: FieldType.STRING, name: "PHG marker", length: 3, value: "PHG" },
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "power (p)",
|
||||||
|
length: 1,
|
||||||
|
value: powerWatts !== undefined ? powerWatts.toString() + "W" : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "height (h)",
|
||||||
|
length: 1,
|
||||||
|
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "gain (g)",
|
||||||
|
length: 1,
|
||||||
|
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "directivity (d)",
|
||||||
|
length: 1,
|
||||||
|
value:
|
||||||
|
directivity !== undefined
|
||||||
|
? typeof directivity === "number"
|
||||||
|
? directivity.toString() + "°"
|
||||||
|
: directivity
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove PHG token from ext and advance ext for further parsing
|
||||||
|
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
commentBefore = commentBefore.substring(7);
|
||||||
|
} else {
|
||||||
|
commentOffset += 7;
|
||||||
|
}
|
||||||
|
ext = ext.substring(7).trimStart();
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// DFSshgd
|
||||||
|
if (ext.startsWith("DFS")) {
|
||||||
|
// DFSshgd: s = strength (0-9), h = height (0-9), g = gain (0-9), d = directivity (0-9)
|
||||||
|
const s = ext.charAt(3);
|
||||||
|
const h = ext.charAt(4);
|
||||||
|
const g = ext.charAt(5);
|
||||||
|
const d = ext.charAt(6);
|
||||||
|
|
||||||
|
const sNum = parseInt(s, 10);
|
||||||
|
const hNum = parseInt(h, 10);
|
||||||
|
const gNum = parseInt(g, 10);
|
||||||
|
const dNum = parseInt(d, 10);
|
||||||
|
|
||||||
|
// Strength: s = 0-9, direct value
|
||||||
|
const strength = Number.isNaN(sNum) ? undefined : sNum;
|
||||||
|
|
||||||
|
// Height: h = 0-9, height = 10 * 2^h feet (spec: h is exponent)
|
||||||
|
const heightFeet = Number.isNaN(hNum) ? undefined : 10 * Math.pow(2, hNum);
|
||||||
|
const heightMeters = heightFeet !== undefined ? feetToMeters(heightFeet) : undefined;
|
||||||
|
|
||||||
|
// Gain: g = 0-9, gain in dB
|
||||||
|
const gainDbi = Number.isNaN(gNum) ? undefined : gNum;
|
||||||
|
|
||||||
|
// Directivity: d = 0-9, 0 = omni, 1-8 = d*45°, 9 = unknown
|
||||||
|
let directivity: number | "omni" | "unknown" | undefined;
|
||||||
|
if (Number.isNaN(dNum)) {
|
||||||
|
directivity = undefined;
|
||||||
|
} else if (dNum === 0) {
|
||||||
|
directivity = "omni";
|
||||||
|
} else if (dNum >= 1 && dNum <= 8) {
|
||||||
|
directivity = dNum * 45;
|
||||||
|
} else if (dNum === 9) {
|
||||||
|
directivity = "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
extras.dfs = {
|
||||||
|
strength,
|
||||||
|
height: heightMeters,
|
||||||
|
gain: gainDbi,
|
||||||
|
directivity
|
||||||
|
};
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||||
|
{ type: FieldType.STRING, name: "DFS marker", length: 3, value: "DFS" },
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "strength (s)",
|
||||||
|
length: 1,
|
||||||
|
value: strength !== undefined ? strength.toString() : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "height (h)",
|
||||||
|
length: 1,
|
||||||
|
value: heightMeters !== undefined ? heightMeters.toString() + "m" : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "gain (g)",
|
||||||
|
length: 1,
|
||||||
|
value: gainDbi !== undefined ? gainDbi.toString() + "dBi" : undefined
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "directivity (d)",
|
||||||
|
length: 1,
|
||||||
|
value:
|
||||||
|
directivity !== undefined
|
||||||
|
? typeof directivity === "number"
|
||||||
|
? directivity.toString() + "°"
|
||||||
|
: directivity
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove DFS token from ext and advance ext for further parsing
|
||||||
|
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
commentBefore = commentBefore.substring(7);
|
||||||
|
} else {
|
||||||
|
commentOffset += 7;
|
||||||
|
}
|
||||||
|
ext = ext.substring(7).trimStart();
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Course/Speed DDD/SSS (7 bytes: 3 digits / 3 digits)
|
||||||
|
if (extras.cse === undefined && /^\d{3}\/\d{3}/.test(ext)) {
|
||||||
|
const courseStr = ext.substring(0, 3);
|
||||||
|
const speedStr = ext.substring(4, 7);
|
||||||
|
extras.cse = parseInt(courseStr, 10);
|
||||||
|
extras.spd = knotsToKmh(parseInt(speedStr, 10));
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||||
|
{ type: FieldType.STRING, name: "course", length: 3, value: extras.cse.toString() + "°" },
|
||||||
|
{ type: FieldType.CHAR, name: "marker", length: 1, value: "/" },
|
||||||
|
{ type: FieldType.STRING, name: "speed", length: 3, value: extras.spd.toString() + " km/h" }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove course/speed token from comment and advance ext for further parsing
|
||||||
|
ext = ext.substring(7).trimStart();
|
||||||
|
|
||||||
|
// If there is an 8-byte DF/NRQ following (leading '/'), parse that too
|
||||||
|
if (ext.length >= 8 && ext.charAt(0) === "/") {
|
||||||
|
const dfExt = ext.substring(0, 8); // e.g. /270/729
|
||||||
|
const m = dfExt.match(/\/(\d{3})\/(\d{3})/);
|
||||||
|
if (m) {
|
||||||
|
const dfBearing = parseInt(m[1], 10);
|
||||||
|
const dfStrength = parseInt(m[2], 10);
|
||||||
|
if (extras.dfs === undefined) {
|
||||||
|
extras.dfs = {};
|
||||||
|
}
|
||||||
|
extras.dfs.bearing = dfBearing;
|
||||||
|
extras.dfs.strength = dfStrength;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
(altitudeOffset !== undefined && commentOffset < altitudeOffset ? beforeFields : fields).push(
|
||||||
|
{ type: FieldType.STRING, name: "DF marker", length: 1, value: "/" },
|
||||||
|
{ type: FieldType.STRING, name: "bearing", length: 3, value: dfBearing.toString() + "°" },
|
||||||
|
{ type: FieldType.CHAR, name: "separator", length: 1, value: "/" },
|
||||||
|
{ type: FieldType.STRING, name: "strength", length: 3, value: dfStrength.toString() }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove DF token from ext and advance ext for further parsing
|
||||||
|
if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
commentBefore = commentBefore.substring(8);
|
||||||
|
} else {
|
||||||
|
commentOffset += 8;
|
||||||
|
}
|
||||||
|
ext = ext.substring(8).trimStart();
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No recognized 7+-byte extension at start
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse embedded telemetry in comment. Look for |ss11|, |ss1122|, |ss112233|, |ss1122334455|, or |ss1122334455!"| patterns (where ss is sequence and each pair of digits is an analog channel in base91, and optional last pair is digital channel in base91).
|
||||||
|
if ((match = comment.match(/\|([a-z0-9]{4,14})\|/i))) {
|
||||||
|
try {
|
||||||
|
const telemetry = decodeTelemetry(match[1]);
|
||||||
|
extras.telemetry = telemetry;
|
||||||
|
if (withStructure) {
|
||||||
|
fields.push(
|
||||||
|
{
|
||||||
|
type: FieldType.CHAR,
|
||||||
|
name: "telemetry start",
|
||||||
|
length: 1,
|
||||||
|
value: "|"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "sequence",
|
||||||
|
length: 2,
|
||||||
|
value: telemetry.sequence.toString()
|
||||||
|
},
|
||||||
|
...telemetry.analog.map((a, i) => ({
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: `analog${i + 1}`,
|
||||||
|
length: 2,
|
||||||
|
value: a.toString()
|
||||||
|
})),
|
||||||
|
...(telemetry.digital !== undefined
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "digital",
|
||||||
|
length: 2,
|
||||||
|
value: telemetry.digital.toString()
|
||||||
|
}
|
||||||
|
]
|
||||||
|
: []),
|
||||||
|
{
|
||||||
|
type: FieldType.CHAR,
|
||||||
|
name: "telemetry end",
|
||||||
|
length: 1,
|
||||||
|
value: "|"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Invalid telemetry format, ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export comment with extras fields removed, if any were parsed.
|
||||||
|
if (commentOffset > 0 && commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
extras.comment = commentBefore.substring(commentOffset) + comment;
|
||||||
|
} else if (commentBefore !== undefined && commentBefore.length > 0) {
|
||||||
|
extras.comment = commentBefore + comment;
|
||||||
|
} else {
|
||||||
|
extras.comment = comment.substring(commentOffset);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
const commentBeforeFields: Field[] = commentBefore
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "comment",
|
||||||
|
length: commentBefore.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const commentFields: Field[] = comment
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "comment",
|
||||||
|
length: comment.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
: [];
|
||||||
|
|
||||||
|
// Insert the altitude fields at the correct position in the comment section based on where the altitude token was located in the original comment. If there was no altitude token, put all fields at the start of the comment section.
|
||||||
|
extras.fields = [...beforeFields, ...commentBeforeFields, ...altitudeFields, ...fields, ...commentFields];
|
||||||
|
}
|
||||||
|
|
||||||
|
return extras as Extras;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const attachExtras = (payload: Payload, extras: Extras): void => {
|
||||||
|
if ("position" in payload && payload.position) {
|
||||||
|
if (extras.altitude !== undefined) {
|
||||||
|
payload.position.altitude = extras.altitude;
|
||||||
|
}
|
||||||
|
if (extras.range !== undefined) {
|
||||||
|
payload.position.range = extras.range;
|
||||||
|
}
|
||||||
|
if (extras.phg !== undefined) {
|
||||||
|
payload.position.phg = extras.phg;
|
||||||
|
}
|
||||||
|
if (extras.dfs !== undefined) {
|
||||||
|
payload.position.dfs = extras.dfs;
|
||||||
|
}
|
||||||
|
if (extras.cse !== undefined && payload.position.course === undefined) {
|
||||||
|
payload.position.course = extras.cse;
|
||||||
|
}
|
||||||
|
if (extras.spd !== undefined && payload.position.speed === undefined) {
|
||||||
|
payload.position.speed = extras.spd;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ("altitude" in payload && payload.altitude === undefined && extras.altitude !== undefined) {
|
||||||
|
payload.altitude = extras.altitude;
|
||||||
|
}
|
||||||
|
if ("range" in payload && payload.range === undefined && extras.range !== undefined) {
|
||||||
|
payload.range = extras.range;
|
||||||
|
}
|
||||||
|
if ("phg" in payload && payload.phg === undefined && extras.phg !== undefined) {
|
||||||
|
payload.phg = extras.phg;
|
||||||
|
}
|
||||||
|
if ("dfs" in payload && payload.dfs === undefined && extras.dfs !== undefined) {
|
||||||
|
payload.dfs = extras.dfs;
|
||||||
|
}
|
||||||
|
if ("course" in payload && payload.course === undefined && extras.cse !== undefined) {
|
||||||
|
payload.course = extras.cse;
|
||||||
|
}
|
||||||
|
if ("speed" in payload && payload.speed === undefined && extras.spd !== undefined) {
|
||||||
|
payload.speed = extras.spd;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes a Base91 Telemetry extension string (delimited by '|') into its components.
|
||||||
|
*
|
||||||
|
* @param ext The string between the '|' delimiters (e.g. 'ss11', 'ss112233', 'ss1122334455!"')
|
||||||
|
* @returns An object with sequence, analog (array), and optional digital (number)
|
||||||
|
*/
|
||||||
|
export const decodeTelemetry = (ext: string): ITelemetry => {
|
||||||
|
if (!ext || ext.length < 4) throw new Error("Telemetry extension too short");
|
||||||
|
// Must be even length, at least 4 (2 for seq, 2 for ch1)
|
||||||
|
if (ext.length % 2 !== 0) throw new Error("Telemetry extension must have even length");
|
||||||
|
|
||||||
|
// Sequence counter is always first 2 chars
|
||||||
|
const sequence = base91ToNumber(ext.slice(0, 2));
|
||||||
|
const analog: number[] = [];
|
||||||
|
let i = 2;
|
||||||
|
// If there are more than 12 chars, last pair is digital
|
||||||
|
let digital: number | undefined = undefined;
|
||||||
|
const analogPairs = Math.min(Math.floor((ext.length - 2) / 2), 5);
|
||||||
|
for (let j = 0; j < analogPairs; j++, i += 2) {
|
||||||
|
analog.push(base91ToNumber(ext.slice(i, i + 2)));
|
||||||
|
}
|
||||||
|
// If there are 2 chars left after 5 analogs, it's digital
|
||||||
|
if (ext.length === 14) {
|
||||||
|
digital = base91ToNumber(ext.slice(12, 14));
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
sequence,
|
||||||
|
analog,
|
||||||
|
digital
|
||||||
|
};
|
||||||
|
};
|
||||||
149
src/payload.item.ts
Normal file
149
src/payload.item.ts
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type ItemPayload, type Payload } from "./frame.types";
|
||||||
|
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||||
|
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||||
|
import Timestamp from "./timestamp";
|
||||||
|
|
||||||
|
export const decodeItemPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
// Item format is similar to Object but name may be 3-9 chars (stored in a 9-char field)
|
||||||
|
// Example: )NNN... where ) is data type, next 9 chars are name, then state char, then timestamp, then position
|
||||||
|
if (raw.length < 12) return { payload: null }; // minimal: 1 + 3 + 1 + 7
|
||||||
|
|
||||||
|
let offset = 1; // skip data type identifier ')'
|
||||||
|
const segment: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Read 9-char name field (pad/truncate as present)
|
||||||
|
const rawName = raw.substring(offset, offset + 9);
|
||||||
|
const name = rawName.trimEnd();
|
||||||
|
if (withStructure) {
|
||||||
|
segment.push({
|
||||||
|
name: "item name",
|
||||||
|
data: new TextEncoder().encode(rawName).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
offset += 9;
|
||||||
|
|
||||||
|
// State character: '*' = alive, '_' = killed
|
||||||
|
const stateChar = raw.charAt(offset);
|
||||||
|
if (stateChar !== "*" && stateChar !== "_") {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
const alive = stateChar === "*";
|
||||||
|
if (withStructure) {
|
||||||
|
segment.push({
|
||||||
|
name: "item state",
|
||||||
|
data: new TextEncoder().encode(stateChar).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.CHAR,
|
||||||
|
name: "State (* alive, _ killed)",
|
||||||
|
length: 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
offset += 1;
|
||||||
|
|
||||||
|
// Timestamp (7 chars)
|
||||||
|
const timeStr = raw.substring(offset, offset + 7);
|
||||||
|
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr.substring(offset), withStructure);
|
||||||
|
if (!timestamp) return { payload: null };
|
||||||
|
if (timestampSection) segment.push(timestampSection);
|
||||||
|
offset += 7;
|
||||||
|
|
||||||
|
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-useless-assignment
|
||||||
|
let position: IPosition | null = null;
|
||||||
|
// eslint-disable-next-line no-useless-assignment
|
||||||
|
let consumed = 0;
|
||||||
|
|
||||||
|
if (isCompressed) {
|
||||||
|
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!compressed) return { payload: null };
|
||||||
|
|
||||||
|
position = {
|
||||||
|
latitude: compressed.latitude,
|
||||||
|
longitude: compressed.longitude,
|
||||||
|
symbol: compressed.symbol,
|
||||||
|
altitude: compressed.altitude
|
||||||
|
};
|
||||||
|
consumed = 13;
|
||||||
|
|
||||||
|
if (compressedSection) segment.push(compressedSection);
|
||||||
|
} else {
|
||||||
|
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!uncompressed) return { payload: null };
|
||||||
|
|
||||||
|
position = {
|
||||||
|
latitude: uncompressed.latitude,
|
||||||
|
longitude: uncompressed.longitude,
|
||||||
|
symbol: uncompressed.symbol,
|
||||||
|
ambiguity: uncompressed.ambiguity
|
||||||
|
};
|
||||||
|
consumed = 19;
|
||||||
|
|
||||||
|
if (uncompressedSection) segment.push(uncompressedSection);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += consumed;
|
||||||
|
const remainder = raw.substring(offset);
|
||||||
|
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
let comment = remainder;
|
||||||
|
|
||||||
|
const extras = decodeCommentExtras(comment, withStructure);
|
||||||
|
comment = extras.comment;
|
||||||
|
|
||||||
|
if (comment) {
|
||||||
|
position.comment = comment;
|
||||||
|
if (withStructure) {
|
||||||
|
segment.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (withStructure && extras.fields) {
|
||||||
|
// No free-text comment, but extras fields exist: emit comment-only segment
|
||||||
|
segment.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: ItemPayload = {
|
||||||
|
type: DataType.Item,
|
||||||
|
doNotArchive,
|
||||||
|
name,
|
||||||
|
alive,
|
||||||
|
position
|
||||||
|
};
|
||||||
|
attachExtras(payload, extras);
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
return { payload, segment };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeItemPayload;
|
||||||
94
src/payload.message.ts
Normal file
94
src/payload.message.ts
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type MessagePayload, type Payload } from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeMessagePayload = (
|
||||||
|
rawPayload: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
// Message format: :AAAAAAAAA[ ]:message text
|
||||||
|
// where AAAAAAAAA is a 9-character recipient field (padded with spaces)
|
||||||
|
if (rawPayload.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
let offset = 1; // skip ':' data type
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Attempt to read a 9-char recipient field if present
|
||||||
|
let recipient = "";
|
||||||
|
if (rawPayload.length >= offset + 1) {
|
||||||
|
// Try to read up to 9 chars for recipient, but stop early if a ':' separator appears
|
||||||
|
const look = rawPayload.substring(offset, Math.min(offset + 9, rawPayload.length));
|
||||||
|
const sepIdx = look.indexOf(":");
|
||||||
|
let raw = look;
|
||||||
|
if (sepIdx !== -1) {
|
||||||
|
raw = look.substring(0, sepIdx);
|
||||||
|
} else if (look.length < 9 && rawPayload.length >= offset + 9) {
|
||||||
|
// pad to full 9 chars if possible
|
||||||
|
raw = rawPayload.substring(offset, offset + 9);
|
||||||
|
} else if (look.length === 9) {
|
||||||
|
raw = look;
|
||||||
|
}
|
||||||
|
|
||||||
|
recipient = raw.trimEnd();
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "recipient",
|
||||||
|
data: new TextEncoder().encode(raw).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "to", length: 9 }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Advance offset past the raw we consumed
|
||||||
|
offset += raw.length;
|
||||||
|
// If there was a ':' immediately after the consumed raw, skip it as separator
|
||||||
|
if (rawPayload.charAt(offset) === ":") {
|
||||||
|
offset += 1;
|
||||||
|
} else if (sepIdx !== -1) {
|
||||||
|
// Shouldn't normally happen, but ensure we advance past separator
|
||||||
|
offset += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// After recipient there is typically a space and a colon separator before the text
|
||||||
|
// Find the first ':' after the recipient (it separates the address field from the text)
|
||||||
|
let textStart = rawPayload.indexOf(":", offset);
|
||||||
|
if (textStart === -1) {
|
||||||
|
// No explicit separator; skip any spaces and take remainder as text
|
||||||
|
while (rawPayload.charAt(offset) === " " && offset < rawPayload.length) offset += 1;
|
||||||
|
textStart = offset - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let text = "";
|
||||||
|
if (textStart >= 0 && textStart + 1 <= rawPayload.length) {
|
||||||
|
text = rawPayload.substring(textStart + 1);
|
||||||
|
}
|
||||||
|
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
|
||||||
|
const payload: MessagePayload = {
|
||||||
|
type: DataType.Message,
|
||||||
|
variant: "message",
|
||||||
|
doNotArchive,
|
||||||
|
addressee: recipient,
|
||||||
|
text
|
||||||
|
};
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
// Emit text section
|
||||||
|
segments.push({
|
||||||
|
name: "text",
|
||||||
|
data: new TextEncoder().encode(text).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
|
||||||
|
});
|
||||||
|
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeMessagePayload;
|
||||||
300
src/payload.mice.ts
Normal file
300
src/payload.mice.ts
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { base91ToNumber, knotsToKmh } from ".";
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type IAddress, MicEPayload, type Payload } from "./frame.types";
|
||||||
|
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||||
|
|
||||||
|
export const decodeMicEPayload = (
|
||||||
|
destination: IAddress,
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
// Mic-E encodes position in both destination address and information field
|
||||||
|
const dest = destination.call;
|
||||||
|
|
||||||
|
if (dest.length < 6) return { payload: null };
|
||||||
|
if (raw.length < 9) return { payload: null }; // Need at least data type + 8 bytes
|
||||||
|
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Decode latitude from destination address (6 characters)
|
||||||
|
const latResult = decodeMicELatitude(dest);
|
||||||
|
if (!latResult) return { payload: null };
|
||||||
|
|
||||||
|
const { latitude, messageType, longitudeOffset, isWest, isStandard } = latResult;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "mic-E destination",
|
||||||
|
data: new TextEncoder().encode(dest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "destination",
|
||||||
|
length: dest.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse information field (skip data type identifier at position 0)
|
||||||
|
let offset = 1;
|
||||||
|
|
||||||
|
// Longitude: 3 bytes (degrees, minutes, hundredths)
|
||||||
|
const lonDegRaw = raw.charCodeAt(offset) - 28;
|
||||||
|
const lonMinRaw = raw.charCodeAt(offset + 1) - 28;
|
||||||
|
const lonHunRaw = raw.charCodeAt(offset + 2) - 28;
|
||||||
|
offset += 3;
|
||||||
|
|
||||||
|
// Apply longitude offset and hemisphere
|
||||||
|
let lonDeg = lonDegRaw;
|
||||||
|
if (longitudeOffset) {
|
||||||
|
lonDeg += 100;
|
||||||
|
}
|
||||||
|
if (lonDeg >= 180 && lonDeg <= 189) {
|
||||||
|
lonDeg -= 80;
|
||||||
|
} else if (lonDeg >= 190 && lonDeg <= 199) {
|
||||||
|
lonDeg -= 190;
|
||||||
|
}
|
||||||
|
|
||||||
|
let longitude = lonDeg + lonMinRaw / 60.0 + lonHunRaw / 6000.0;
|
||||||
|
if (isWest) {
|
||||||
|
longitude = -longitude;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Speed and course: 3 bytes
|
||||||
|
const sp = raw.charCodeAt(offset) - 28;
|
||||||
|
const dc = raw.charCodeAt(offset + 1) - 28;
|
||||||
|
const se = raw.charCodeAt(offset + 2) - 28;
|
||||||
|
offset += 3;
|
||||||
|
|
||||||
|
let speed = sp * 10 + Math.floor(dc / 10); // Speed in knots
|
||||||
|
let course = (dc % 10) * 100 + se; // Course in degrees
|
||||||
|
|
||||||
|
if (course >= 400) course -= 400;
|
||||||
|
if (speed >= 800) speed -= 800;
|
||||||
|
|
||||||
|
// Convert speed from knots to km/h
|
||||||
|
const speedKmh = knotsToKmh(speed);
|
||||||
|
|
||||||
|
// Symbol code and table
|
||||||
|
if (raw.length < offset + 2) return { payload: null };
|
||||||
|
const symbolCode = raw.charAt(offset);
|
||||||
|
const symbolTable = raw.charAt(offset + 1);
|
||||||
|
offset += 2;
|
||||||
|
|
||||||
|
// Parse remaining data (altitude, comment, telemetry)
|
||||||
|
const remaining = raw.substring(offset);
|
||||||
|
const doNotArchive = remaining.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
let altitude: number | undefined = undefined;
|
||||||
|
let comment = remaining;
|
||||||
|
|
||||||
|
// Check for altitude in old format
|
||||||
|
if (comment.length >= 4 && comment.charAt(3) === "}") {
|
||||||
|
try {
|
||||||
|
const altBase91 = comment.substring(0, 3);
|
||||||
|
altitude = base91ToNumber(altBase91) - 10000; // Relative to 10km below mean sea level
|
||||||
|
comment = comment.substring(4); // Remove altitude token from comment
|
||||||
|
} catch {
|
||||||
|
// Ignore altitude parsing errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse RNG/PHG tokens from comment (defer attaching to result until created)
|
||||||
|
const remainder = comment; // Use the remaining comment text for parsing extras
|
||||||
|
const extras = decodeCommentExtras(remainder, withStructure);
|
||||||
|
comment = extras.comment;
|
||||||
|
|
||||||
|
let payloadType: DataType.MicE | DataType.MicEOld;
|
||||||
|
switch (raw.charAt(0)) {
|
||||||
|
case "`":
|
||||||
|
payloadType = DataType.MicE;
|
||||||
|
break;
|
||||||
|
case "'":
|
||||||
|
payloadType = DataType.MicEOld;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: MicEPayload = {
|
||||||
|
type: payloadType,
|
||||||
|
doNotArchive,
|
||||||
|
position: {
|
||||||
|
latitude,
|
||||||
|
longitude,
|
||||||
|
symbol: {
|
||||||
|
table: symbolTable,
|
||||||
|
code: symbolCode
|
||||||
|
}
|
||||||
|
},
|
||||||
|
messageType,
|
||||||
|
isStandard
|
||||||
|
};
|
||||||
|
|
||||||
|
if (speed > 0) {
|
||||||
|
result.position.speed = speedKmh;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (course > 0 && course < 360) {
|
||||||
|
result.position.course = course;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (altitude !== undefined) {
|
||||||
|
result.position.altitude = altitude;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (comment) {
|
||||||
|
result.position.comment = comment;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attach parsed extras if present
|
||||||
|
attachExtras(result, extras);
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
// Information field section (bytes after data type up to comment)
|
||||||
|
const infoData = raw.substring(1, offset);
|
||||||
|
segments.push({
|
||||||
|
name: "mic-E info",
|
||||||
|
data: new TextEncoder().encode(infoData).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.CHAR, name: "longitude deg", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "longitude min", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "longitude hundredths", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "speed byte", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "course byte 1", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "course byte 2", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "symbol code", length: 1 },
|
||||||
|
{ type: FieldType.CHAR, name: "symbol table", length: 1 }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (comment && comment.length > 0) {
|
||||||
|
segments.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
} else if (extras.fields) {
|
||||||
|
segments.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload: result, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload: result };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const decodeMicELatitude = (
|
||||||
|
dest: string
|
||||||
|
): {
|
||||||
|
latitude: number;
|
||||||
|
messageType: string;
|
||||||
|
longitudeOffset: boolean;
|
||||||
|
isWest: boolean;
|
||||||
|
isStandard: boolean;
|
||||||
|
} | null => {
|
||||||
|
if (dest.length < 6) return null;
|
||||||
|
|
||||||
|
// Each destination character encodes a latitude digit and message bits
|
||||||
|
const digits: number[] = [];
|
||||||
|
const messageBits: number[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < 6; i++) {
|
||||||
|
const code = dest.charCodeAt(i);
|
||||||
|
let digit: number;
|
||||||
|
let msgBit: number;
|
||||||
|
|
||||||
|
if (code >= 48 && code <= 57) {
|
||||||
|
// '0'-'9'
|
||||||
|
digit = code - 48;
|
||||||
|
msgBit = 0;
|
||||||
|
} else if (code >= 65 && code <= 74) {
|
||||||
|
// 'A'-'J' (A=0, B=1, ... J=9)
|
||||||
|
digit = code - 65;
|
||||||
|
msgBit = 1;
|
||||||
|
} else if (code === 75) {
|
||||||
|
// 'K' means space (used for ambiguity)
|
||||||
|
digit = 0;
|
||||||
|
msgBit = 1;
|
||||||
|
} else if (code === 76) {
|
||||||
|
// 'L' means space
|
||||||
|
digit = 0;
|
||||||
|
msgBit = 0;
|
||||||
|
} else if (code >= 80 && code <= 89) {
|
||||||
|
// 'P'-'Y' custom message types (P=0, Q=1, R=2, ... Y=9)
|
||||||
|
digit = code - 80;
|
||||||
|
msgBit = 1;
|
||||||
|
} else if (code === 90) {
|
||||||
|
// 'Z' means space
|
||||||
|
digit = 0;
|
||||||
|
msgBit = 1;
|
||||||
|
} else {
|
||||||
|
return null; // Invalid character
|
||||||
|
}
|
||||||
|
|
||||||
|
digits.push(digit);
|
||||||
|
messageBits.push(msgBit);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode latitude: format is DDMM.HH (degrees, minutes, hundredths)
|
||||||
|
const latDeg = digits[0] * 10 + digits[1];
|
||||||
|
const latMin = digits[2] * 10 + digits[3];
|
||||||
|
const latHun = digits[4] * 10 + digits[5];
|
||||||
|
|
||||||
|
let latitude = latDeg + latMin / 60.0 + latHun / 6000.0;
|
||||||
|
|
||||||
|
// Message bits determine hemisphere and other flags
|
||||||
|
// Bit 3 (messageBits[3]): 0 = North, 1 = South
|
||||||
|
// Bit 4 (messageBits[4]): 0 = West, 1 = East
|
||||||
|
// Bit 5 (messageBits[5]): 0 = longitude offset +0, 1 = longitude offset +100
|
||||||
|
const isNorth = messageBits[3] === 0;
|
||||||
|
const isWest = messageBits[4] === 0;
|
||||||
|
const longitudeOffset = messageBits[5] === 1;
|
||||||
|
|
||||||
|
if (!isNorth) {
|
||||||
|
latitude = -latitude;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode message type from bits 0, 1, 2
|
||||||
|
const msgValue = messageBits[0] * 4 + messageBits[1] * 2 + messageBits[2];
|
||||||
|
const messageTypes = [
|
||||||
|
"M0: Off Duty",
|
||||||
|
"M1: En Route",
|
||||||
|
"M2: In Service",
|
||||||
|
"M3: Returning",
|
||||||
|
"M4: Committed",
|
||||||
|
"M5: Special",
|
||||||
|
"M6: Priority",
|
||||||
|
"M7: Emergency"
|
||||||
|
];
|
||||||
|
const messageType = messageTypes[msgValue] || "Unknown";
|
||||||
|
|
||||||
|
// Standard vs custom message indicator
|
||||||
|
const isStandard = messageBits[0] === 1;
|
||||||
|
|
||||||
|
return {
|
||||||
|
latitude,
|
||||||
|
messageType,
|
||||||
|
longitudeOffset,
|
||||||
|
isWest,
|
||||||
|
isStandard
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeMicEPayload;
|
||||||
161
src/payload.object.ts
Normal file
161
src/payload.object.ts
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import { FieldType, Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, ObjectPayload, type Payload } from "./frame.types";
|
||||||
|
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||||
|
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||||
|
import Timestamp from "./timestamp";
|
||||||
|
|
||||||
|
export const decodeObjectPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
// Object format: ;AAAAAAAAAcDDHHMMzDDMM.hhN/DDDMM.hhW$comment
|
||||||
|
// ^ data type
|
||||||
|
// 9-char name
|
||||||
|
// alive (*) / killed (_)
|
||||||
|
if (raw.length < 18) return { payload: null }; // 1 + 9 + 1 + 7 minimum
|
||||||
|
|
||||||
|
let offset = 1; // Skip data type identifier ';'
|
||||||
|
const segment: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
const rawName = raw.substring(offset, offset + 9);
|
||||||
|
const name = rawName.trimEnd();
|
||||||
|
if (withStructure) {
|
||||||
|
segment.push({
|
||||||
|
name: "object",
|
||||||
|
data: new TextEncoder().encode(rawName).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "name", length: 9 }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
offset += 9;
|
||||||
|
|
||||||
|
const stateChar = raw.charAt(offset);
|
||||||
|
if (stateChar !== "*" && stateChar !== "_") {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
const alive = stateChar === "*";
|
||||||
|
if (withStructure) {
|
||||||
|
let state: string = "invalid";
|
||||||
|
if (stateChar === "*") {
|
||||||
|
state = "alive";
|
||||||
|
} else if (stateChar === "_") {
|
||||||
|
state = "killed";
|
||||||
|
}
|
||||||
|
segment[segment.length - 1].data = new TextEncoder().encode(raw.substring(offset - 9, offset + 1)).buffer;
|
||||||
|
segment[segment.length - 1].fields.push({
|
||||||
|
type: FieldType.CHAR,
|
||||||
|
name: "state",
|
||||||
|
length: 1,
|
||||||
|
value: state
|
||||||
|
});
|
||||||
|
}
|
||||||
|
offset += 1;
|
||||||
|
|
||||||
|
const timeStr = raw.substring(offset, offset + 7);
|
||||||
|
const { timestamp, segment: timestampSection } = Timestamp.fromString(timeStr, withStructure);
|
||||||
|
if (!timestamp) {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
if (timestampSection) {
|
||||||
|
segment.push(timestampSection);
|
||||||
|
}
|
||||||
|
offset += 7;
|
||||||
|
|
||||||
|
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||||
|
|
||||||
|
let position: IPosition | null = null;
|
||||||
|
let consumed = 0;
|
||||||
|
|
||||||
|
if (isCompressed) {
|
||||||
|
const { position: compressed, segment: compressedSection } = parseCompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!compressed) return { payload: null };
|
||||||
|
|
||||||
|
position = {
|
||||||
|
latitude: compressed.latitude,
|
||||||
|
longitude: compressed.longitude,
|
||||||
|
symbol: compressed.symbol,
|
||||||
|
altitude: compressed.altitude
|
||||||
|
};
|
||||||
|
consumed = 13;
|
||||||
|
|
||||||
|
if (compressedSection) {
|
||||||
|
segment.push(compressedSection);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const { position: uncompressed, segment: uncompressedSection } = parseUncompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!uncompressed) return { payload: null };
|
||||||
|
|
||||||
|
position = {
|
||||||
|
latitude: uncompressed.latitude,
|
||||||
|
longitude: uncompressed.longitude,
|
||||||
|
symbol: uncompressed.symbol,
|
||||||
|
ambiguity: uncompressed.ambiguity
|
||||||
|
};
|
||||||
|
consumed = 19;
|
||||||
|
|
||||||
|
if (uncompressedSection) {
|
||||||
|
segment.push(uncompressedSection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += consumed;
|
||||||
|
const remainder = raw.substring(offset);
|
||||||
|
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
let comment = remainder;
|
||||||
|
|
||||||
|
// Parse RNG/PHG tokens
|
||||||
|
const extras = decodeCommentExtras(comment, withStructure);
|
||||||
|
comment = extras.comment;
|
||||||
|
|
||||||
|
if (comment) {
|
||||||
|
position.comment = comment;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
segment.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (withStructure && extras.fields) {
|
||||||
|
segment.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: ObjectPayload = {
|
||||||
|
type: DataType.Object,
|
||||||
|
doNotArchive,
|
||||||
|
name,
|
||||||
|
timestamp,
|
||||||
|
alive,
|
||||||
|
position
|
||||||
|
};
|
||||||
|
attachExtras(payload, extras);
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
return { payload, segment };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeObjectPayload;
|
||||||
344
src/payload.position.ts
Normal file
344
src/payload.position.ts
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type IPosition, type Payload, type PositionPayload } from "./frame.types";
|
||||||
|
import { base91ToNumber, feetToMeters } from "./parser";
|
||||||
|
import { attachExtras, decodeCommentExtras } from "./payload.extras";
|
||||||
|
import Position from "./position";
|
||||||
|
import Timestamp from "./timestamp";
|
||||||
|
|
||||||
|
export const decodePositionPayload = (
|
||||||
|
dataType: string,
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): { payload: Payload | null; segment?: Segment[] } => {
|
||||||
|
try {
|
||||||
|
const hasTimestamp = dataType === "/" || dataType === "@";
|
||||||
|
const messaging = dataType === "=" || dataType === "@";
|
||||||
|
let offset = 1; // Skip data type identifier
|
||||||
|
|
||||||
|
// Build structure as we parse
|
||||||
|
const structure: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
let timestamp: Timestamp | undefined = undefined;
|
||||||
|
|
||||||
|
// Parse timestamp if present (7 characters: DDHHMMz or HHMMSSh or MMDDHMMM)
|
||||||
|
if (hasTimestamp) {
|
||||||
|
if (raw.length < 8) return { payload: null };
|
||||||
|
const timeStr = raw.substring(offset, offset + 7);
|
||||||
|
const { timestamp: parsedTimestamp, segment: timestampSegment } = Timestamp.fromString(timeStr, withStructure);
|
||||||
|
timestamp = parsedTimestamp;
|
||||||
|
|
||||||
|
if (timestampSegment) {
|
||||||
|
structure.push(timestampSegment);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Need at least enough characters for compressed position (13) or
|
||||||
|
// uncompressed (19). Allow parsing to continue if compressed-length is present.
|
||||||
|
if (raw.length < offset + 13) return { payload: null };
|
||||||
|
|
||||||
|
// Check if compressed format
|
||||||
|
const isCompressed = isCompressedPosition(raw.substring(offset));
|
||||||
|
|
||||||
|
let position: Position;
|
||||||
|
let comment = "";
|
||||||
|
|
||||||
|
if (isCompressed) {
|
||||||
|
// Compressed format: /YYYYXXXX$csT
|
||||||
|
const { position: compressed, segment: compressedSegment } = parseCompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!compressed) return { payload: null };
|
||||||
|
|
||||||
|
position = new Position({
|
||||||
|
latitude: compressed.latitude,
|
||||||
|
longitude: compressed.longitude,
|
||||||
|
symbol: compressed.symbol
|
||||||
|
});
|
||||||
|
|
||||||
|
if (compressed.altitude !== undefined) {
|
||||||
|
position.altitude = compressed.altitude;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (compressedSegment) {
|
||||||
|
structure.push(compressedSegment);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += 13; // Compressed position is 13 chars
|
||||||
|
comment = raw.substring(offset);
|
||||||
|
} else {
|
||||||
|
// Uncompressed format: DDMMmmH/DDDMMmmH$
|
||||||
|
const { position: uncompressed, segment: uncompressedSegment } = parseUncompressedPosition(
|
||||||
|
raw.substring(offset),
|
||||||
|
withStructure
|
||||||
|
);
|
||||||
|
if (!uncompressed) return { payload: null };
|
||||||
|
|
||||||
|
position = new Position({
|
||||||
|
latitude: uncompressed.latitude,
|
||||||
|
longitude: uncompressed.longitude,
|
||||||
|
symbol: uncompressed.symbol
|
||||||
|
});
|
||||||
|
|
||||||
|
if (uncompressed.ambiguity !== undefined) {
|
||||||
|
position.ambiguity = uncompressed.ambiguity;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uncompressedSegment) {
|
||||||
|
structure.push(uncompressedSegment);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += 19; // Uncompressed position is 19 chars
|
||||||
|
comment = raw.substring(offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract Altitude, CSE/SPD, RNG and PHG tokens and optionally emit sections
|
||||||
|
const remainder = comment; // Use the remaining comment text for parsing extras
|
||||||
|
const doNotArchive = remainder.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
const extras = decodeCommentExtras(remainder, withStructure);
|
||||||
|
comment = extras.comment;
|
||||||
|
|
||||||
|
if (comment) {
|
||||||
|
position.comment = comment;
|
||||||
|
|
||||||
|
// Emit comment section as we parse
|
||||||
|
if (withStructure) {
|
||||||
|
structure.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode(remainder).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if (withStructure && extras.fields) {
|
||||||
|
// No free-text comment, but extras were present: emit a comment section containing only fields
|
||||||
|
structure.push({
|
||||||
|
name: "comment",
|
||||||
|
data: new TextEncoder().encode("").buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: extras.fields || []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let payloadType:
|
||||||
|
| DataType.PositionNoTimestampNoMessaging
|
||||||
|
| DataType.PositionNoTimestampWithMessaging
|
||||||
|
| DataType.PositionWithTimestampNoMessaging
|
||||||
|
| DataType.PositionWithTimestampWithMessaging;
|
||||||
|
switch (dataType) {
|
||||||
|
case "!":
|
||||||
|
payloadType = DataType.PositionNoTimestampNoMessaging;
|
||||||
|
break;
|
||||||
|
case "=":
|
||||||
|
payloadType = DataType.PositionNoTimestampWithMessaging;
|
||||||
|
break;
|
||||||
|
case "/":
|
||||||
|
payloadType = DataType.PositionWithTimestampNoMessaging;
|
||||||
|
break;
|
||||||
|
case "@":
|
||||||
|
payloadType = DataType.PositionWithTimestampWithMessaging;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: PositionPayload = {
|
||||||
|
type: payloadType,
|
||||||
|
doNotArchive,
|
||||||
|
timestamp,
|
||||||
|
position,
|
||||||
|
messaging
|
||||||
|
};
|
||||||
|
attachExtras(payload, extras);
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
return { payload, segment: structure };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const isCompressedPosition = (data: string): boolean => {
|
||||||
|
if (data.length < 13) return false;
|
||||||
|
|
||||||
|
// First prefer uncompressed detection by attempting an uncompressed parse.
|
||||||
|
// Uncompressed APRS positions do not have a fixed symbol table separator;
|
||||||
|
// position 8 is a symbol table identifier and may vary.
|
||||||
|
if (data.length >= 19) {
|
||||||
|
const uncompressed = parseUncompressedPosition(data, false);
|
||||||
|
if (uncompressed.position) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For compressed format, check if the position part looks like base-91 encoded data
|
||||||
|
// Compressed format: STYYYYXXXXcsT where ST is symbol table/code
|
||||||
|
// Base-91 chars are in range 33-124 (! to |)
|
||||||
|
const lat1 = data.charCodeAt(1);
|
||||||
|
const lat2 = data.charCodeAt(2);
|
||||||
|
const lon1 = data.charCodeAt(5);
|
||||||
|
const lon2 = data.charCodeAt(6);
|
||||||
|
|
||||||
|
return (
|
||||||
|
lat1 >= 33 && lat1 <= 124 && lat2 >= 33 && lat2 <= 124 && lon1 >= 33 && lon1 <= 124 && lon2 >= 33 && lon2 <= 124
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const parseCompressedPosition = (
|
||||||
|
data: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
position: IPosition | null;
|
||||||
|
segment?: Segment;
|
||||||
|
} => {
|
||||||
|
if (data.length < 13) return { position: null };
|
||||||
|
|
||||||
|
const symbolTable = data.charAt(0);
|
||||||
|
const symbolCode = data.charAt(9);
|
||||||
|
|
||||||
|
// Extract base-91 encoded position (4 characters each)
|
||||||
|
const latStr = data.substring(1, 5);
|
||||||
|
const lonStr = data.substring(5, 9);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Decode base-91 encoded latitude and longitude
|
||||||
|
const latBase91 = base91ToNumber(latStr);
|
||||||
|
const lonBase91 = base91ToNumber(lonStr);
|
||||||
|
|
||||||
|
// Convert to degrees
|
||||||
|
const latitude = 90 - latBase91 / 380926;
|
||||||
|
const longitude = -180 + lonBase91 / 190463;
|
||||||
|
|
||||||
|
const result: IPosition = {
|
||||||
|
latitude,
|
||||||
|
longitude,
|
||||||
|
symbol: {
|
||||||
|
table: symbolTable,
|
||||||
|
code: symbolCode
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for compressed altitude (csT format)
|
||||||
|
const cs = data.charAt(10);
|
||||||
|
const t = data.charCodeAt(11);
|
||||||
|
|
||||||
|
if (cs === " " && t >= 33 && t <= 124) {
|
||||||
|
// Compressed altitude: altitude = 1.002^(t-33) feet
|
||||||
|
const altFeet = Math.pow(1.002, t - 33);
|
||||||
|
result.altitude = feetToMeters(altFeet); // Convert to meters
|
||||||
|
}
|
||||||
|
|
||||||
|
const section: Segment | undefined = withStructure
|
||||||
|
? {
|
||||||
|
name: "position",
|
||||||
|
data: new TextEncoder().encode(data.substring(0, 13)).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
|
||||||
|
{ type: FieldType.STRING, length: 4, name: "latitude" },
|
||||||
|
{ type: FieldType.STRING, length: 4, name: "longitude" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "symbol code" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "course/speed type" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "course/speed value" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "altitude" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { position: result, segment: section };
|
||||||
|
} catch {
|
||||||
|
return { position: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const parseUncompressedPosition = (
|
||||||
|
data: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
position: IPosition | null;
|
||||||
|
segment?: Segment;
|
||||||
|
} => {
|
||||||
|
if (data.length < 19) return { position: null };
|
||||||
|
|
||||||
|
// Format: DDMMmmH/DDDMMmmH$ where H is hemisphere, $ is symbol code
|
||||||
|
// Positions: 0-7 (latitude), 8 (symbol table), 9-17 (longitude), 18 (symbol code)
|
||||||
|
// Spaces may replace rightmost digits for ambiguity/privacy
|
||||||
|
|
||||||
|
const latStr = data.substring(0, 8); // DDMMmmH (8 chars: 49 03.50 N)
|
||||||
|
const symbolTable = data.charAt(8);
|
||||||
|
const lonStr = data.substring(9, 18); // DDDMMmmH (9 chars: 072 01.75 W)
|
||||||
|
const symbolCode = data.charAt(18);
|
||||||
|
|
||||||
|
// Count and handle ambiguity (spaces in minutes part replace rightmost digits)
|
||||||
|
let ambiguity = 0;
|
||||||
|
const latSpaceCount = (latStr.match(/ /g) || []).length;
|
||||||
|
const lonSpaceCount = (lonStr.match(/ /g) || []).length;
|
||||||
|
|
||||||
|
if (latSpaceCount > 0 || lonSpaceCount > 0) {
|
||||||
|
// Use the maximum space count (they should be the same, but be defensive)
|
||||||
|
ambiguity = Math.max(latSpaceCount, lonSpaceCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace spaces with zeros for parsing
|
||||||
|
const latStrNormalized = latStr.replace(/ /g, "0");
|
||||||
|
const lonStrNormalized = lonStr.replace(/ /g, "0");
|
||||||
|
|
||||||
|
// Parse latitude
|
||||||
|
const latDeg = parseInt(latStrNormalized.substring(0, 2), 10);
|
||||||
|
const latMin = parseFloat(latStrNormalized.substring(2, 7));
|
||||||
|
const latHem = latStrNormalized.charAt(7);
|
||||||
|
|
||||||
|
if (isNaN(latDeg) || isNaN(latMin)) return { position: null };
|
||||||
|
if (latHem !== "N" && latHem !== "S") return { position: null };
|
||||||
|
|
||||||
|
let latitude = latDeg + latMin / 60;
|
||||||
|
if (latHem === "S") latitude = -latitude;
|
||||||
|
|
||||||
|
// Parse longitude
|
||||||
|
const lonDeg = parseInt(lonStrNormalized.substring(0, 3), 10);
|
||||||
|
const lonMin = parseFloat(lonStrNormalized.substring(3, 8));
|
||||||
|
const lonHem = lonStrNormalized.charAt(8);
|
||||||
|
|
||||||
|
if (isNaN(lonDeg) || isNaN(lonMin)) return { position: null };
|
||||||
|
if (lonHem !== "E" && lonHem !== "W") return { position: null };
|
||||||
|
|
||||||
|
let longitude = lonDeg + lonMin / 60;
|
||||||
|
if (lonHem === "W") longitude = -longitude;
|
||||||
|
|
||||||
|
const result: IPosition = {
|
||||||
|
latitude,
|
||||||
|
longitude,
|
||||||
|
symbol: {
|
||||||
|
table: symbolTable,
|
||||||
|
code: symbolCode
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (ambiguity > 0) {
|
||||||
|
result.ambiguity = ambiguity;
|
||||||
|
}
|
||||||
|
|
||||||
|
const segment: Segment | undefined = withStructure
|
||||||
|
? {
|
||||||
|
name: "position",
|
||||||
|
data: new TextEncoder().encode(data.substring(0, 19)).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.STRING, length: 8, name: "latitude" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "symbol table" },
|
||||||
|
{ type: FieldType.STRING, length: 9, name: "longitude" },
|
||||||
|
{ type: FieldType.CHAR, length: 1, name: "symbol code" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { position: result, segment };
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodePositionPayload;
|
||||||
69
src/payload.query.ts
Normal file
69
src/payload.query.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DataType, type Payload, type QueryPayload } from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeQueryPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
// Skip data type identifier '?'
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Remaining payload
|
||||||
|
const rest = raw.substring(1).trim();
|
||||||
|
if (!rest) return { payload: null };
|
||||||
|
|
||||||
|
// Query type is the first token (up to first space)
|
||||||
|
const firstSpace = rest.indexOf(" ");
|
||||||
|
let queryType = "";
|
||||||
|
let target: string | undefined = undefined;
|
||||||
|
|
||||||
|
if (firstSpace === -1) {
|
||||||
|
queryType = rest;
|
||||||
|
} else {
|
||||||
|
queryType = rest.substring(0, firstSpace);
|
||||||
|
target = rest.substring(firstSpace + 1).trim();
|
||||||
|
if (target === "") target = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!queryType) return { payload: null };
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
// Emit query type section
|
||||||
|
segments.push({
|
||||||
|
name: "query type",
|
||||||
|
data: new TextEncoder().encode(queryType).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "type", length: queryType.length }]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (target) {
|
||||||
|
segments.push({
|
||||||
|
name: "query target",
|
||||||
|
data: new TextEncoder().encode(target).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "target", length: target.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: QueryPayload = {
|
||||||
|
type: DataType.Query,
|
||||||
|
queryType,
|
||||||
|
...(target ? { target } : {})
|
||||||
|
};
|
||||||
|
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeQueryPayload;
|
||||||
161
src/payload.rawgps.ts
Normal file
161
src/payload.rawgps.ts
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
import { DTM, GGA, INmeaSentence, Decoder as NmeaDecoder, RMC } from "extended-nmea";
|
||||||
|
|
||||||
|
import { DataType, type IPosition, type Payload, type RawGPSPayload } from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeRawGPSPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
// Raw GPS payloads start with '$' followed by an NMEA sentence
|
||||||
|
const sentence = raw.substring(1).trim();
|
||||||
|
|
||||||
|
// Attempt to parse with extended-nmea Decoder to extract position (best-effort)
|
||||||
|
let parsed: INmeaSentence | null = null;
|
||||||
|
try {
|
||||||
|
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
|
||||||
|
parsed = NmeaDecoder.decode(full);
|
||||||
|
} catch {
|
||||||
|
// ignore parse errors - accept any sentence as raw-gps per APRS
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: RawGPSPayload = {
|
||||||
|
type: DataType.RawGPS,
|
||||||
|
sentence
|
||||||
|
};
|
||||||
|
|
||||||
|
// If parse produced latitude/longitude, attach structured position.
|
||||||
|
// Otherwise fallback to a minimal NMEA parser for common sentences (RMC, GGA).
|
||||||
|
if (
|
||||||
|
parsed &&
|
||||||
|
(parsed instanceof RMC || parsed instanceof GGA || parsed instanceof DTM) &&
|
||||||
|
parsed.latitude &&
|
||||||
|
parsed.longitude
|
||||||
|
) {
|
||||||
|
// extended-nmea latitude/longitude are GeoCoordinate objects with
|
||||||
|
// fields { degrees, decimal, quadrant }
|
||||||
|
const latObj = parsed.latitude;
|
||||||
|
const lonObj = parsed.longitude;
|
||||||
|
const lat = latObj.degrees + (Number(latObj.decimal) || 0) / 60.0;
|
||||||
|
const lon = lonObj.degrees + (Number(lonObj.decimal) || 0) / 60.0;
|
||||||
|
const latitude = latObj.quadrant === "S" ? -lat : lat;
|
||||||
|
const longitude = lonObj.quadrant === "W" ? -lon : lon;
|
||||||
|
|
||||||
|
const pos: IPosition = {
|
||||||
|
latitude,
|
||||||
|
longitude
|
||||||
|
};
|
||||||
|
|
||||||
|
// altitude
|
||||||
|
if ("altMean" in parsed && parsed.altMean !== undefined) {
|
||||||
|
pos.altitude = Number(parsed.altMean);
|
||||||
|
}
|
||||||
|
if ("altitude" in parsed && parsed.altitude !== undefined) {
|
||||||
|
pos.altitude = Number(parsed.altitude);
|
||||||
|
}
|
||||||
|
|
||||||
|
// speed/course (RMC fields)
|
||||||
|
if ("speedOverGround" in parsed && parsed.speedOverGround !== undefined) {
|
||||||
|
pos.speed = Number(parsed.speedOverGround);
|
||||||
|
}
|
||||||
|
if ("courseOverGround" in parsed && parsed.courseOverGround !== undefined) {
|
||||||
|
pos.course = Number(parsed.courseOverGround);
|
||||||
|
}
|
||||||
|
|
||||||
|
payload.position = pos;
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
const full = sentence.startsWith("$") ? sentence : `$${sentence}`;
|
||||||
|
const withoutChecksum = full.split("*")[0];
|
||||||
|
const parts = withoutChecksum.split(",");
|
||||||
|
const header = parts[0].slice(1).toUpperCase();
|
||||||
|
|
||||||
|
const parseCoord = (coord: string, hemi: string) => {
|
||||||
|
if (!coord || coord === "") return undefined;
|
||||||
|
const degDigits = hemi === "N" || hemi === "S" ? 2 : 3;
|
||||||
|
if (coord.length <= degDigits) return undefined;
|
||||||
|
const degPart = coord.slice(0, degDigits);
|
||||||
|
const minPart = coord.slice(degDigits);
|
||||||
|
const degrees = parseFloat(degPart);
|
||||||
|
const mins = parseFloat(minPart);
|
||||||
|
if (Number.isNaN(degrees) || Number.isNaN(mins)) return undefined;
|
||||||
|
let dec = degrees + mins / 60.0;
|
||||||
|
if (hemi === "S" || hemi === "W") dec = -dec;
|
||||||
|
return dec;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (header.endsWith("RMC")) {
|
||||||
|
const lat = parseCoord(parts[3], parts[4]);
|
||||||
|
const lon = parseCoord(parts[5], parts[6]);
|
||||||
|
if (lat !== undefined && lon !== undefined) {
|
||||||
|
const pos: IPosition = { latitude: lat, longitude: lon };
|
||||||
|
if (parts[7]) pos.speed = Number(parts[7]);
|
||||||
|
if (parts[8]) pos.course = Number(parts[8]);
|
||||||
|
payload.position = pos;
|
||||||
|
}
|
||||||
|
} else if (header.endsWith("GGA")) {
|
||||||
|
const lat = parseCoord(parts[2], parts[3]);
|
||||||
|
const lon = parseCoord(parts[4], parts[5]);
|
||||||
|
if (lat !== undefined && lon !== undefined) {
|
||||||
|
const pos: IPosition = { latitude: lat, longitude: lon };
|
||||||
|
if (parts[9]) pos.altitude = Number(parts[9]);
|
||||||
|
payload.position = pos;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore fallback parse errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
const segments: Segment[] = [
|
||||||
|
{
|
||||||
|
name: "raw-gps",
|
||||||
|
data: new TextEncoder().encode(sentence).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "sentence",
|
||||||
|
length: sentence.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
if (payload.position) {
|
||||||
|
segments.push({
|
||||||
|
name: "raw-gps-position",
|
||||||
|
data: new TextEncoder().encode(JSON.stringify(payload.position)).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "latitude",
|
||||||
|
length: String(payload.position.latitude).length
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "longitude",
|
||||||
|
length: String(payload.position.longitude).length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeRawGPSPayload;
|
||||||
79
src/payload.status.ts
Normal file
79
src/payload.status.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DO_NOT_ARCHIVE_MARKER, DataType, type Payload, type StatusPayload } from "./frame.types";
|
||||||
|
import Timestamp from "./timestamp";
|
||||||
|
|
||||||
|
export const decodeStatusPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
// Status payload: optional 7-char timestamp followed by free text.
|
||||||
|
// We'll also detect a trailing Maidenhead locator (4 or 6 chars) and expose it.
|
||||||
|
const offsetBase = 1; // skip data type identifier '>'
|
||||||
|
if (raw.length <= offsetBase) return { payload: null };
|
||||||
|
|
||||||
|
let offset = offsetBase;
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Try parse optional timestamp (7 chars)
|
||||||
|
if (raw.length >= offset + 7) {
|
||||||
|
const timeStr = raw.substring(offset, offset + 7);
|
||||||
|
const { timestamp, segment: tsSegment } = Timestamp.fromString(timeStr, withStructure);
|
||||||
|
if (timestamp) {
|
||||||
|
offset += 7;
|
||||||
|
if (tsSegment) segments.push(tsSegment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remaining text is status text
|
||||||
|
const text = raw.substring(offset);
|
||||||
|
if (!text) return { payload: null };
|
||||||
|
const doNotArchive = text.includes(DO_NOT_ARCHIVE_MARKER);
|
||||||
|
|
||||||
|
// Detect trailing Maidenhead locator (4 or 6 chars) at end of text separated by space
|
||||||
|
let maidenhead: string | undefined;
|
||||||
|
const mhMatch = text.match(/\s([A-Ra-r]{2}\d{2}(?:[A-Ra-r]{2})?)$/);
|
||||||
|
let statusText = text;
|
||||||
|
if (mhMatch) {
|
||||||
|
maidenhead = mhMatch[1].toUpperCase();
|
||||||
|
statusText = text.slice(0, mhMatch.index).trimEnd();
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: StatusPayload = {
|
||||||
|
type: DataType.Status,
|
||||||
|
doNotArchive,
|
||||||
|
timestamp: undefined,
|
||||||
|
text: statusText
|
||||||
|
};
|
||||||
|
|
||||||
|
// If timestamp was parsed, attach it
|
||||||
|
if (segments.length > 0) {
|
||||||
|
// The first segment may be timestamp; parseTimestamp returns the Timestamp object
|
||||||
|
// Re-parse to obtain timestamp object (cheap) - alternate would be to capture earlier
|
||||||
|
const timeSegment = segments.find((s) => s.name === "timestamp");
|
||||||
|
if (timeSegment) {
|
||||||
|
const tsStr = new TextDecoder().decode(timeSegment.data);
|
||||||
|
const { timestamp } = Timestamp.fromString(tsStr, false);
|
||||||
|
if (timestamp) payload.timestamp = timestamp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maidenhead) payload.maidenhead = maidenhead;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "status",
|
||||||
|
data: new TextEncoder().encode(text).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "text", length: text.length }]
|
||||||
|
});
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeStatusPayload;
|
||||||
197
src/payload.telemetry.ts
Normal file
197
src/payload.telemetry.ts
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import {
|
||||||
|
DataType,
|
||||||
|
type Payload,
|
||||||
|
type TelemetryBitSensePayload,
|
||||||
|
type TelemetryCoefficientsPayload,
|
||||||
|
type TelemetryDataPayload,
|
||||||
|
type TelemetryParameterPayload,
|
||||||
|
type TelemetryUnitPayload
|
||||||
|
} from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeTelemetryPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
const rest = raw.substring(1).trim();
|
||||||
|
if (!rest) return { payload: null };
|
||||||
|
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Telemetry data: convention used here: starts with '#' then sequence then analogs and digital
|
||||||
|
if (rest.startsWith("#")) {
|
||||||
|
const parts = rest.substring(1).trim().split(/\s+/);
|
||||||
|
const seq = parseInt(parts[0], 10);
|
||||||
|
let analog: number[] = [];
|
||||||
|
let digital = 0;
|
||||||
|
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
// analogs as comma separated
|
||||||
|
analog = parts[1].split(",").map((v) => parseFloat(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parts.length >= 3) {
|
||||||
|
digital = parseInt(parts[2], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry sequence",
|
||||||
|
data: new TextEncoder().encode(String(seq)).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "sequence",
|
||||||
|
length: String(seq).length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry analog",
|
||||||
|
data: new TextEncoder().encode(parts[1] || "").buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "analogs",
|
||||||
|
length: (parts[1] || "").length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry digital",
|
||||||
|
data: new TextEncoder().encode(String(digital)).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "digital",
|
||||||
|
length: String(digital).length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: TelemetryDataPayload = {
|
||||||
|
type: DataType.TelemetryData,
|
||||||
|
variant: "data",
|
||||||
|
sequence: isNaN(seq) ? 0 : seq,
|
||||||
|
analog,
|
||||||
|
digital: isNaN(digital) ? 0 : digital
|
||||||
|
};
|
||||||
|
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telemetry parameters: 'PARAM' keyword
|
||||||
|
if (/^PARAM/i.test(rest)) {
|
||||||
|
const after = rest.replace(/^PARAM\s*/i, "");
|
||||||
|
const names = after.split(/[,\s]+/).filter(Boolean);
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry parameters",
|
||||||
|
data: new TextEncoder().encode(after).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "names", length: after.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payload: TelemetryParameterPayload = {
|
||||||
|
type: DataType.TelemetryData,
|
||||||
|
variant: "parameters",
|
||||||
|
names
|
||||||
|
};
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telemetry units: 'UNIT'
|
||||||
|
if (/^UNIT/i.test(rest)) {
|
||||||
|
const after = rest.replace(/^UNIT\s*/i, "");
|
||||||
|
const units = after.split(/[,\s]+/).filter(Boolean);
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry units",
|
||||||
|
data: new TextEncoder().encode(after).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "units", length: after.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payload: TelemetryUnitPayload = {
|
||||||
|
type: DataType.TelemetryData,
|
||||||
|
variant: "unit",
|
||||||
|
units
|
||||||
|
};
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telemetry coefficients: 'COEFF' a:,b:,c:
|
||||||
|
if (/^COEFF/i.test(rest)) {
|
||||||
|
const after = rest.replace(/^COEFF\s*/i, "");
|
||||||
|
const aMatch = after.match(/A:([^\s;]+)/i);
|
||||||
|
const bMatch = after.match(/B:([^\s;]+)/i);
|
||||||
|
const cMatch = after.match(/C:([^\s;]+)/i);
|
||||||
|
const parseList = (s?: string) => (s ? s.split(",").map((v) => parseFloat(v)) : []);
|
||||||
|
const coefficients = {
|
||||||
|
a: parseList(aMatch?.[1]),
|
||||||
|
b: parseList(bMatch?.[1]),
|
||||||
|
c: parseList(cMatch?.[1])
|
||||||
|
};
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry coefficients",
|
||||||
|
data: new TextEncoder().encode(after).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "coeffs", length: after.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payload: TelemetryCoefficientsPayload = {
|
||||||
|
type: DataType.TelemetryData,
|
||||||
|
variant: "coefficients",
|
||||||
|
coefficients
|
||||||
|
};
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telemetry bitsense/project: 'BITS' <number> [project]
|
||||||
|
if (/^BITS?/i.test(rest)) {
|
||||||
|
const parts = rest.split(/\s+/).slice(1);
|
||||||
|
const sense = parts.length > 0 ? parseInt(parts[0], 10) : 0;
|
||||||
|
const projectName = parts.length > 1 ? parts.slice(1).join(" ") : undefined;
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "telemetry bitsense",
|
||||||
|
data: new TextEncoder().encode(rest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "bitsense", length: rest.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payload: TelemetryBitSensePayload = {
|
||||||
|
type: DataType.TelemetryData,
|
||||||
|
variant: "bitsense",
|
||||||
|
sense: isNaN(sense) ? 0 : sense,
|
||||||
|
...(projectName ? { projectName } : {})
|
||||||
|
};
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload: null };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeTelemetryPayload;
|
||||||
135
src/payload.thirdparty.ts
Normal file
135
src/payload.thirdparty.ts
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { Frame } from "./frame";
|
||||||
|
import { DataType, type Payload, type ThirdPartyPayload, UserDefinedPayload } from "./frame.types";
|
||||||
|
|
||||||
|
export const decodeUserDefinedPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
// content after '{'
|
||||||
|
const rest = raw.substring(1);
|
||||||
|
|
||||||
|
// user packet type is first token (up to first space) often like '01' or 'TYP'
|
||||||
|
const match = rest.match(/^([^\s]+)\s*(.*)$/s);
|
||||||
|
let userPacketType = "";
|
||||||
|
let data = "";
|
||||||
|
if (match) {
|
||||||
|
userPacketType = match[1] || "";
|
||||||
|
data = (match[2] || "").trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: UserDefinedPayload = {
|
||||||
|
type: DataType.UserDefined,
|
||||||
|
userPacketType,
|
||||||
|
data
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
const segments: Segment[] = [];
|
||||||
|
segments.push({
|
||||||
|
name: "user-defined",
|
||||||
|
data: new TextEncoder().encode(rest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
|
||||||
|
});
|
||||||
|
|
||||||
|
segments.push({
|
||||||
|
name: "user-packet-type",
|
||||||
|
data: new TextEncoder().encode(userPacketType).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "type",
|
||||||
|
length: userPacketType.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
segments.push({
|
||||||
|
name: "user-data",
|
||||||
|
data: new TextEncoder().encode(data).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "data", length: data.length }]
|
||||||
|
});
|
||||||
|
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const decodeThirdPartyPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
// Content after '}' is the encapsulated third-party frame or raw data
|
||||||
|
const rest = raw.substring(1);
|
||||||
|
|
||||||
|
// Attempt to parse the embedded text as a full APRS frame (route:payload)
|
||||||
|
let nestedFrame: Frame | undefined;
|
||||||
|
try {
|
||||||
|
// parseFrame is defined in this module; use Frame.parse to attempt parse
|
||||||
|
nestedFrame = Frame.parse(rest);
|
||||||
|
} catch {
|
||||||
|
nestedFrame = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: ThirdPartyPayload = {
|
||||||
|
type: DataType.ThirdParty,
|
||||||
|
comment: rest,
|
||||||
|
...(nestedFrame ? { frame: nestedFrame } : {})
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
const segments: Segment[] = [];
|
||||||
|
|
||||||
|
segments.push({
|
||||||
|
name: "third-party",
|
||||||
|
data: new TextEncoder().encode(rest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "raw", length: rest.length }]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (nestedFrame) {
|
||||||
|
// Include a short section pointing to the nested frame's data (stringified)
|
||||||
|
const nf = nestedFrame;
|
||||||
|
const nfStr = `${nf.source.toString()}>${nf.destination.toString()}:${nf.payload}`;
|
||||||
|
segments.push({
|
||||||
|
name: "third-party-nested-frame",
|
||||||
|
data: new TextEncoder().encode(nfStr).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "nested",
|
||||||
|
length: nfStr.length
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload, segment: segments };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
129
src/payload.weather.ts
Normal file
129
src/payload.weather.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { FieldType, type Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { DataType, type IPosition, type Payload, type WeatherPayload } from "./frame.types";
|
||||||
|
import { isCompressedPosition, parseCompressedPosition, parseUncompressedPosition } from "./payload.position";
|
||||||
|
import Timestamp from "./timestamp";
|
||||||
|
|
||||||
|
export const decodeWeatherPayload = (
|
||||||
|
raw: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
payload: Payload | null;
|
||||||
|
segment?: Segment[];
|
||||||
|
} => {
|
||||||
|
try {
|
||||||
|
if (raw.length < 2) return { payload: null };
|
||||||
|
|
||||||
|
let offset = 1; // skip '_' data type
|
||||||
|
const segments: Segment[] = withStructure ? [] : [];
|
||||||
|
|
||||||
|
// Try optional timestamp (7 chars)
|
||||||
|
let timestamp;
|
||||||
|
if (raw.length >= offset + 7) {
|
||||||
|
const timeStr = raw.substring(offset, offset + 7);
|
||||||
|
const parsed = Timestamp.fromString(timeStr, withStructure);
|
||||||
|
timestamp = parsed.timestamp;
|
||||||
|
if (parsed.segment) {
|
||||||
|
segments.push(parsed.segment);
|
||||||
|
}
|
||||||
|
if (timestamp) offset += 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try optional position following timestamp
|
||||||
|
let position: IPosition | undefined;
|
||||||
|
let consumed = 0;
|
||||||
|
const tail = raw.substring(offset);
|
||||||
|
if (tail.length > 0) {
|
||||||
|
// If the tail starts with a wind token like DDD/SSS, treat it as weather data
|
||||||
|
// and do not attempt to parse it as a position (avoids mis-detecting wind
|
||||||
|
// values as compressed position fields).
|
||||||
|
if (/^\s*\d{3}\/\d{1,3}/.test(tail)) {
|
||||||
|
// no position present; leave consumed = 0
|
||||||
|
} else if (isCompressedPosition(tail)) {
|
||||||
|
const parsed = parseCompressedPosition(tail, withStructure);
|
||||||
|
if (parsed.position) {
|
||||||
|
position = {
|
||||||
|
latitude: parsed.position.latitude,
|
||||||
|
longitude: parsed.position.longitude,
|
||||||
|
symbol: parsed.position.symbol,
|
||||||
|
altitude: parsed.position.altitude
|
||||||
|
};
|
||||||
|
if (parsed.segment) segments.push(parsed.segment);
|
||||||
|
consumed = 13;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const parsed = parseUncompressedPosition(tail, withStructure);
|
||||||
|
if (parsed.position) {
|
||||||
|
position = {
|
||||||
|
latitude: parsed.position.latitude,
|
||||||
|
longitude: parsed.position.longitude,
|
||||||
|
symbol: parsed.position.symbol,
|
||||||
|
ambiguity: parsed.position.ambiguity
|
||||||
|
};
|
||||||
|
if (parsed.segment) segments.push(parsed.segment);
|
||||||
|
consumed = 19;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += consumed;
|
||||||
|
|
||||||
|
const rest = raw.substring(offset).trim();
|
||||||
|
|
||||||
|
const payload: WeatherPayload = {
|
||||||
|
type: DataType.WeatherReportNoPosition
|
||||||
|
};
|
||||||
|
if (timestamp) payload.timestamp = timestamp;
|
||||||
|
if (position) payload.position = position;
|
||||||
|
|
||||||
|
if (rest && rest.length > 0) {
|
||||||
|
// Parse common tokens
|
||||||
|
// Wind: DDD/SSS [gGGG]
|
||||||
|
const windMatch = rest.match(/(\d{3})\/(\d{1,3})(?:g(\d{1,3}))?/);
|
||||||
|
if (windMatch) {
|
||||||
|
payload.windDirection = parseInt(windMatch[1], 10);
|
||||||
|
payload.windSpeed = parseInt(windMatch[2], 10);
|
||||||
|
if (windMatch[3]) payload.windGust = parseInt(windMatch[3], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Temperature: tNNN (F)
|
||||||
|
const tempMatch = rest.match(/t(-?\d{1,3})/i);
|
||||||
|
if (tempMatch) payload.temperature = parseInt(tempMatch[1], 10);
|
||||||
|
|
||||||
|
// Rain: rNNN (last hour), pNNN (24h), PNNN (since midnight) - values are hundredths of inch
|
||||||
|
const rMatch = rest.match(/r(\d{3})/);
|
||||||
|
if (rMatch) payload.rainLastHour = parseInt(rMatch[1], 10);
|
||||||
|
const pMatch = rest.match(/p(\d{3})/);
|
||||||
|
if (pMatch) payload.rainLast24Hours = parseInt(pMatch[1], 10);
|
||||||
|
const PMatch = rest.match(/P(\d{3})/);
|
||||||
|
if (PMatch) payload.rainSinceMidnight = parseInt(PMatch[1], 10);
|
||||||
|
|
||||||
|
// Humidity: hNN
|
||||||
|
const hMatch = rest.match(/h(\d{1,3})/);
|
||||||
|
if (hMatch) payload.humidity = parseInt(hMatch[1], 10);
|
||||||
|
|
||||||
|
// Pressure: bXXXX or bXXXXX (tenths of millibar)
|
||||||
|
const bMatch = rest.match(/b(\d{4,5})/);
|
||||||
|
if (bMatch) payload.pressure = parseInt(bMatch[1], 10);
|
||||||
|
|
||||||
|
// Add raw comment
|
||||||
|
payload.comment = rest;
|
||||||
|
|
||||||
|
if (withStructure) {
|
||||||
|
segments.push({
|
||||||
|
name: "weather",
|
||||||
|
data: new TextEncoder().encode(rest).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [{ type: FieldType.STRING, name: "text", length: rest.length }]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (withStructure) return { payload, segment: segments };
|
||||||
|
return { payload };
|
||||||
|
} catch {
|
||||||
|
return { payload: null };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default decodeWeatherPayload;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { IPosition, ISymbol } from "./frame.types";
|
import { IDirectionFinding, IPosition, IPowerHeightGain, ISymbol } from "./frame.types";
|
||||||
|
|
||||||
export class Symbol implements ISymbol {
|
export class Symbol implements ISymbol {
|
||||||
table: string; // Symbol table identifier
|
table: string; // Symbol table identifier
|
||||||
@@ -32,6 +32,9 @@ export class Position implements IPosition {
|
|||||||
course?: number; // Course in degrees
|
course?: number; // Course in degrees
|
||||||
symbol?: Symbol;
|
symbol?: Symbol;
|
||||||
comment?: string;
|
comment?: string;
|
||||||
|
range?: number;
|
||||||
|
phg?: IPowerHeightGain;
|
||||||
|
dfs?: IDirectionFinding;
|
||||||
|
|
||||||
constructor(data: Partial<IPosition>) {
|
constructor(data: Partial<IPosition>) {
|
||||||
this.latitude = data.latitude ?? 0;
|
this.latitude = data.latitude ?? 0;
|
||||||
@@ -40,33 +43,38 @@ export class Position implements IPosition {
|
|||||||
this.altitude = data.altitude;
|
this.altitude = data.altitude;
|
||||||
this.speed = data.speed;
|
this.speed = data.speed;
|
||||||
this.course = data.course;
|
this.course = data.course;
|
||||||
if (typeof data.symbol === 'string') {
|
if (typeof data.symbol === "string") {
|
||||||
this.symbol = new Symbol(data.symbol);
|
this.symbol = new Symbol(data.symbol);
|
||||||
} else if (data.symbol) {
|
} else if (data.symbol) {
|
||||||
this.symbol = new Symbol(data.symbol.table, data.symbol.code);
|
this.symbol = new Symbol(data.symbol.table, data.symbol.code);
|
||||||
}
|
}
|
||||||
this.comment = data.comment;
|
this.comment = data.comment;
|
||||||
|
this.range = data.range;
|
||||||
|
this.phg = data.phg;
|
||||||
|
this.dfs = data.dfs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public toString(): string {
|
public toString(): string {
|
||||||
const latStr = this.latitude.toFixed(5);
|
const latStr = this.latitude.toFixed(5);
|
||||||
const lonStr = this.longitude.toFixed(5);
|
const lonStr = this.longitude.toFixed(5);
|
||||||
const altStr = this.altitude !== undefined ? `,${this.altitude}m` : '';
|
const altStr = this.altitude !== undefined ? `,${this.altitude}m` : "";
|
||||||
return `${latStr},${lonStr}${altStr}`;
|
return `${latStr},${lonStr}${altStr}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
public distanceTo(other: IPosition): number {
|
public distanceTo(other: IPosition): number {
|
||||||
const R = 6371e3; // Earth radius in meters
|
const R = 6371e3; // Earth radius in meters
|
||||||
const φ1 = this.latitude * Math.PI / 180;
|
const lat1 = (this.latitude * Math.PI) / 180;
|
||||||
const φ2 = other.latitude * Math.PI / 180;
|
const lat2 = (other.latitude * Math.PI) / 180;
|
||||||
const Δφ = (other.latitude - this.latitude) * Math.PI / 180;
|
const dLat = ((other.latitude - this.latitude) * Math.PI) / 180;
|
||||||
const Δλ = (other.longitude - this.longitude) * Math.PI / 180;
|
const dLon = ((other.longitude - this.longitude) * Math.PI) / 180;
|
||||||
|
|
||||||
const a = Math.sin(Δφ/2) * Math.sin(Δφ/2) +
|
const a =
|
||||||
Math.cos(φ1) * Math.cos(φ2) *
|
Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||||
Math.sin(Δλ/2) * Math.sin(Δλ/2);
|
Math.cos(lat1) * Math.cos(lat2) * Math.sin(dLon / 2) * Math.sin(dLon / 2);
|
||||||
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
||||||
|
|
||||||
return R * c; // Distance in meters
|
return R * c; // Distance in meters
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default Position;
|
||||||
|
|||||||
189
src/timestamp.ts
Normal file
189
src/timestamp.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
import { FieldType, Segment } from "@hamradio/packet";
|
||||||
|
|
||||||
|
import { ITimestamp } from "./frame.types";
|
||||||
|
|
||||||
|
export class Timestamp implements ITimestamp {
|
||||||
|
day?: number;
|
||||||
|
month?: number;
|
||||||
|
hours: number;
|
||||||
|
minutes: number;
|
||||||
|
seconds?: number;
|
||||||
|
format: "DHM" | "HMS" | "MDHM";
|
||||||
|
zulu?: boolean;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
hours: number,
|
||||||
|
minutes: number,
|
||||||
|
format: "DHM" | "HMS" | "MDHM",
|
||||||
|
options: {
|
||||||
|
day?: number;
|
||||||
|
month?: number;
|
||||||
|
seconds?: number;
|
||||||
|
zulu?: boolean;
|
||||||
|
} = {}
|
||||||
|
) {
|
||||||
|
this.hours = hours;
|
||||||
|
this.minutes = minutes;
|
||||||
|
this.format = format;
|
||||||
|
this.day = options.day;
|
||||||
|
this.month = options.month;
|
||||||
|
this.seconds = options.seconds;
|
||||||
|
this.zulu = options.zulu;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert APRS timestamp to JavaScript Date object
|
||||||
|
* Note: APRS timestamps don't include year, so we use current year
|
||||||
|
* For DHM format, we find the most recent occurrence of that day
|
||||||
|
* For HMS format, we use current date
|
||||||
|
* For MDHM format, we use the specified month/day in current year
|
||||||
|
*/
|
||||||
|
toDate(): Date {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
if (this.format === "DHM") {
|
||||||
|
// Day-Hour-Minute format (UTC)
|
||||||
|
// Find the most recent occurrence of this day
|
||||||
|
const currentYear = this.zulu ? now.getUTCFullYear() : now.getFullYear();
|
||||||
|
const currentMonth = this.zulu ? now.getUTCMonth() : now.getMonth();
|
||||||
|
|
||||||
|
let date: Date;
|
||||||
|
if (this.zulu) {
|
||||||
|
date = new Date(Date.UTC(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0));
|
||||||
|
} else {
|
||||||
|
date = new Date(currentYear, currentMonth, this.day!, this.hours, this.minutes, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the date is in the future, it's from last month
|
||||||
|
if (date > now) {
|
||||||
|
if (this.zulu) {
|
||||||
|
date = new Date(Date.UTC(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0));
|
||||||
|
} else {
|
||||||
|
date = new Date(currentYear, currentMonth - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return date;
|
||||||
|
} else if (this.format === "HMS") {
|
||||||
|
// Hour-Minute-Second format (UTC)
|
||||||
|
// Use current date
|
||||||
|
if (this.zulu) {
|
||||||
|
const date = new Date();
|
||||||
|
date.setUTCHours(this.hours, this.minutes, this.seconds || 0, 0);
|
||||||
|
|
||||||
|
// If time is in the future, it's from yesterday
|
||||||
|
if (date > now) {
|
||||||
|
date.setUTCDate(date.getUTCDate() - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return date;
|
||||||
|
} else {
|
||||||
|
const date = new Date();
|
||||||
|
date.setHours(this.hours, this.minutes, this.seconds || 0, 0);
|
||||||
|
|
||||||
|
if (date > now) {
|
||||||
|
date.setDate(date.getDate() - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// MDHM format: Month-Day-Hour-Minute (local time)
|
||||||
|
const currentYear = now.getFullYear();
|
||||||
|
let date = new Date(currentYear, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||||
|
|
||||||
|
// If date is in the future, it's from last year
|
||||||
|
if (date > now) {
|
||||||
|
date = new Date(currentYear - 1, (this.month || 1) - 1, this.day!, this.hours, this.minutes, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static fromString(
|
||||||
|
str: string,
|
||||||
|
withStructure: boolean = false
|
||||||
|
): {
|
||||||
|
timestamp: Timestamp | undefined;
|
||||||
|
segment?: Segment;
|
||||||
|
} {
|
||||||
|
if (str.length !== 7) return { timestamp: undefined };
|
||||||
|
|
||||||
|
const timeType = str.charAt(6);
|
||||||
|
|
||||||
|
if (timeType === "z") {
|
||||||
|
// DHM format: Day-Hour-Minute (UTC)
|
||||||
|
const timestamp = new Timestamp(parseInt(str.substring(2, 4), 10), parseInt(str.substring(4, 6), 10), "DHM", {
|
||||||
|
day: parseInt(str.substring(0, 2), 10),
|
||||||
|
zulu: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const segment = withStructure
|
||||||
|
? {
|
||||||
|
name: "timestamp",
|
||||||
|
data: new TextEncoder().encode(str).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||||
|
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { timestamp, segment };
|
||||||
|
} else if (timeType === "h") {
|
||||||
|
// HMS format: Hour-Minute-Second (UTC)
|
||||||
|
const timestamp = new Timestamp(parseInt(str.substring(0, 2), 10), parseInt(str.substring(2, 4), 10), "HMS", {
|
||||||
|
seconds: parseInt(str.substring(4, 6), 10),
|
||||||
|
zulu: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const segment = withStructure
|
||||||
|
? {
|
||||||
|
name: "timestamp",
|
||||||
|
data: new TextEncoder().encode(str).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "second (SS)", length: 2 },
|
||||||
|
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { timestamp, segment };
|
||||||
|
} else if (timeType === "/") {
|
||||||
|
// MDHM format: Month-Day-Hour-Minute (local)
|
||||||
|
const timestamp = new Timestamp(parseInt(str.substring(4, 6), 10), parseInt(str.substring(6, 8), 10), "MDHM", {
|
||||||
|
month: parseInt(str.substring(0, 2), 10),
|
||||||
|
day: parseInt(str.substring(2, 4), 10),
|
||||||
|
zulu: false
|
||||||
|
});
|
||||||
|
|
||||||
|
const segment = withStructure
|
||||||
|
? {
|
||||||
|
name: "timestamp",
|
||||||
|
data: new TextEncoder().encode(str).buffer,
|
||||||
|
isString: true,
|
||||||
|
fields: [
|
||||||
|
{ type: FieldType.STRING, name: "month (MM)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "day (DD)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "hour (HH)", length: 2 },
|
||||||
|
{ type: FieldType.STRING, name: "minute (MM)", length: 2 },
|
||||||
|
{ type: FieldType.CHAR, name: "timezone indicator", length: 1 }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { timestamp, segment };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { timestamp: undefined };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Timestamp;
|
||||||
22
test/deviceid.test.ts
Normal file
22
test/deviceid.test.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { getDeviceID } from "../src/deviceid";
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
|
||||||
|
describe("DeviceID parsing", () => {
|
||||||
|
it("parses known device ID from tocall", () => {
|
||||||
|
const data = "WB2OSZ-5>APDW17:!4237.14NS07120.83W#PHG7140";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const deviceID = getDeviceID(frame.destination);
|
||||||
|
expect(deviceID).not.toBeNull();
|
||||||
|
expect(deviceID?.tocall).toBe("APDW??");
|
||||||
|
expect(deviceID?.vendor).toBe("WB2OSZ");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns null for unknown device ID", () => {
|
||||||
|
const data = "CALL>WORLD:!4237.14NS07120.83W#PHG7140";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const deviceID = getDeviceID(frame.destination);
|
||||||
|
expect(deviceID).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
1366
test/frame.test.ts
1366
test/frame.test.ts
File diff suppressed because it is too large
Load Diff
@@ -1,85 +1,86 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
base91ToNumber,
|
base91ToNumber,
|
||||||
knotsToKmh,
|
|
||||||
kmhToKnots,
|
|
||||||
feetToMeters,
|
|
||||||
metersToFeet,
|
|
||||||
celsiusToFahrenheit,
|
celsiusToFahrenheit,
|
||||||
fahrenheitToCelsius,
|
fahrenheitToCelsius,
|
||||||
} from '../src/parser';
|
feetToMeters,
|
||||||
|
kmhToKnots,
|
||||||
|
knotsToKmh,
|
||||||
|
metersToFeet
|
||||||
|
} from "../src/parser";
|
||||||
|
|
||||||
describe('parser utilities', () => {
|
describe("parser utilities", () => {
|
||||||
describe('base91ToNumber', () => {
|
describe("base91ToNumber", () => {
|
||||||
it('decodes all-! to 0', () => {
|
it("decodes all-! to 0", () => {
|
||||||
expect(base91ToNumber('!!!!')).toBe(0);
|
expect(base91ToNumber("!!!!")).toBe(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('decodes single character correctly', () => {
|
it("decodes single character correctly", () => {
|
||||||
// 'A' === 65, digit = 65 - 33 = 32
|
// 'A' === 65, digit = 65 - 33 = 32
|
||||||
expect(base91ToNumber('A')).toBe(32);
|
expect(base91ToNumber("A")).toBe(32);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should decode multiple Base91 characters', () => {
|
it("should decode multiple Base91 characters", () => {
|
||||||
// "!!" = 0 * 91 + 0 = 0
|
// "!!" = 0 * 91 + 0 = 0
|
||||||
expect(base91ToNumber('!!')).toBe(0);
|
expect(base91ToNumber("!!")).toBe(0);
|
||||||
|
|
||||||
// "!#" = 0 * 91 + 2 = 2
|
// "!#" = 0 * 91 + 2 = 2
|
||||||
expect(base91ToNumber('!#')).toBe(2);
|
expect(base91ToNumber("!#")).toBe(2);
|
||||||
|
|
||||||
// "#!" = 2 * 91 + 0 = 182
|
// "#!" = 2 * 91 + 0 = 182
|
||||||
expect(base91ToNumber('#!')).toBe(182);
|
expect(base91ToNumber("#!")).toBe(182);
|
||||||
|
|
||||||
// "##" = 2 * 91 + 2 = 184
|
// "##" = 2 * 91 + 2 = 184
|
||||||
expect(base91ToNumber('##')).toBe(184);
|
expect(base91ToNumber("##")).toBe(184);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should decode 4-character Base91 strings (used in APRS)', () => {
|
it("should decode 4-character Base91 strings (used in APRS)", () => {
|
||||||
// Test with printable ASCII Base91 characters (33-123)
|
// Test with printable ASCII Base91 characters (33-123)
|
||||||
const testValue = base91ToNumber('!#%\'');
|
const testValue = base91ToNumber("!#%'");
|
||||||
expect(testValue).toBeGreaterThan(0);
|
expect(testValue).toBeGreaterThan(0);
|
||||||
expect(testValue).toBeLessThan(91 * 91 * 91 * 91);
|
expect(testValue).toBeLessThan(91 * 91 * 91 * 91);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should decode maximum valid Base91 value', () => {
|
it("should decode maximum valid Base91 value", () => {
|
||||||
// Maximum is '{' (ASCII 123, digit 90) repeated
|
// Maximum is '{' (ASCII 123, digit 90) repeated
|
||||||
const maxValue = base91ToNumber('{{{{');
|
const maxValue = base91ToNumber("{{{{");
|
||||||
const expected = 90 * 91 * 91 * 91 + 90 * 91 * 91 + 90 * 91 + 90;
|
const expected = 90 * 91 * 91 * 91 + 90 * 91 * 91 + 90 * 91 + 90;
|
||||||
expect(maxValue).toBe(expected);
|
expect(maxValue).toBe(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle APRS compressed position example', () => {
|
it("should handle APRS compressed position example", () => {
|
||||||
// Using actual characters from APRS test vector
|
// Using actual characters from APRS test vector
|
||||||
const latStr = '/:*E';
|
const latStr = "/:*E";
|
||||||
const lonStr = 'qZ=O';
|
const lonStr = "qZ=O";
|
||||||
|
|
||||||
const latValue = base91ToNumber(latStr);
|
const latValue = base91ToNumber(latStr);
|
||||||
const lonValue = base91ToNumber(lonStr);
|
const lonValue = base91ToNumber(lonStr);
|
||||||
|
|
||||||
// Just verify they decode without error and produce valid numbers
|
// Just verify they decode without error and produce valid numbers
|
||||||
expect(typeof latValue).toBe('number');
|
expect(typeof latValue).toBe("number");
|
||||||
expect(typeof lonValue).toBe('number');
|
expect(typeof lonValue).toBe("number");
|
||||||
expect(latValue).toBeGreaterThanOrEqual(0);
|
expect(latValue).toBeGreaterThanOrEqual(0);
|
||||||
expect(lonValue).toBeGreaterThanOrEqual(0);
|
expect(lonValue).toBeGreaterThanOrEqual(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('throws on invalid character', () => {
|
it("throws on invalid character", () => {
|
||||||
expect(() => base91ToNumber(' ')).toThrow(); // space (code 32) is invalid
|
expect(() => base91ToNumber(" ")).toThrow(); // space (code 32) is invalid
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('unit conversions', () => {
|
describe("unit conversions", () => {
|
||||||
it('converts knots <-> km/h', () => {
|
it("converts knots <-> km/h", () => {
|
||||||
expect(knotsToKmh(10)).toBeCloseTo(18.52, 5);
|
expect(knotsToKmh(10)).toBeCloseTo(18.52, 5);
|
||||||
expect(kmhToKnots(18.52)).toBeCloseTo(10, 3);
|
expect(kmhToKnots(18.52)).toBeCloseTo(10, 3);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('converts feet <-> meters', () => {
|
it("converts feet <-> meters", () => {
|
||||||
expect(feetToMeters(10)).toBeCloseTo(3.048, 6);
|
expect(feetToMeters(10)).toBeCloseTo(3.048, 6);
|
||||||
expect(metersToFeet(3.048)).toBeCloseTo(10, 6);
|
expect(metersToFeet(3.048)).toBeCloseTo(10, 6);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('converts celsius <-> fahrenheit', () => {
|
it("converts celsius <-> fahrenheit", () => {
|
||||||
expect(celsiusToFahrenheit(0)).toBeCloseTo(32, 6);
|
expect(celsiusToFahrenheit(0)).toBeCloseTo(32, 6);
|
||||||
expect(fahrenheitToCelsius(32)).toBeCloseTo(0, 6);
|
expect(fahrenheitToCelsius(32)).toBeCloseTo(0, 6);
|
||||||
expect(celsiusToFahrenheit(100)).toBeCloseTo(212, 6);
|
expect(celsiusToFahrenheit(100)).toBeCloseTo(212, 6);
|
||||||
|
|||||||
35
test/payload.capabilities.test.ts
Normal file
35
test/payload.capabilities.test.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import { Dissected } from "@hamradio/packet";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import { DataType, type Payload, type StationCapabilitiesPayload } from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Frame.decodeCapabilities", () => {
|
||||||
|
it("parses comma separated capabilities", () => {
|
||||||
|
const data = "CALL>APRS:<IGATE,MSG_CNT";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const decoded = frame.decode() as StationCapabilitiesPayload;
|
||||||
|
expect(decoded).not.toBeNull();
|
||||||
|
expect(decoded.type).toBe(DataType.StationCapabilities);
|
||||||
|
expect(Array.isArray(decoded.capabilities)).toBeTruthy();
|
||||||
|
expect(decoded.capabilities).toContain("IGATE");
|
||||||
|
expect(decoded.capabilities).toContain("MSG_CNT");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("emits structure sections when requested", () => {
|
||||||
|
const data = "CALL>APRS:<IGATE MSG_CNT>";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const res = frame.decode(true) as {
|
||||||
|
payload: Payload | null;
|
||||||
|
structure: Dissected;
|
||||||
|
};
|
||||||
|
expect(res.payload).not.toBeNull();
|
||||||
|
if (res.payload && res.payload.type !== DataType.StationCapabilities)
|
||||||
|
throw new Error("expected capabilities payload");
|
||||||
|
expect(res.structure).toBeDefined();
|
||||||
|
const caps = res.structure.find((s) => s.name === "capabilities");
|
||||||
|
expect(caps).toBeDefined();
|
||||||
|
const capEntry = res.structure.find((s) => s.name === "capability");
|
||||||
|
expect(capEntry).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
224
test/payload.extras.test.ts
Normal file
224
test/payload.extras.test.ts
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
import type { Dissected, Field, Segment } from "@hamradio/packet";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import type { PositionPayload } from "../src/frame.types";
|
||||||
|
import { feetToMeters, milesToMeters } from "../src/parser";
|
||||||
|
import { decodeTelemetry } from "../src/payload.extras";
|
||||||
|
|
||||||
|
describe("APRS extras test vectors", () => {
|
||||||
|
it("parses altitude token in the beginning of a comment and emits structure", () => {
|
||||||
|
const raw =
|
||||||
|
"DL3QP-R>APDG03,TCPIP*,qAC,T2ROMANIA:!5151.12ND00637.65E&/A=000000440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
// Altitude 001234 ft -> meters
|
||||||
|
expect(payload!.position.altitude).toBe(0);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
|
||||||
|
expect(hasAlt).toBe(true);
|
||||||
|
|
||||||
|
expect(payload!.position.comment).toBe("440 MMDVM Voice 439.40000MHz -7.6000MHz, DL3QP_Pi-Star");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses altitude token marker mid-comment and emits structure", () => {
|
||||||
|
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#RNG0001ALT/A=001234 Your Comment Here";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
// Altitude 001234 ft -> meters
|
||||||
|
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsAlt = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasAlt = fieldsAlt.some((f) => f.name === "altitude");
|
||||||
|
expect(hasAlt).toBe(true);
|
||||||
|
|
||||||
|
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
|
||||||
|
expect(commentIndex).toBe(2); // Range marker + range go before.
|
||||||
|
|
||||||
|
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
|
||||||
|
expect(altitudeIndex).toBeGreaterThan(0); // Altitude should come after comment in the structure
|
||||||
|
expect(altitudeIndex).toBeGreaterThan(commentIndex);
|
||||||
|
|
||||||
|
const secondCommentIndex = (commentSeg!.fields ?? []).findIndex((f, i) => f.name === "comment" && i > commentIndex);
|
||||||
|
expect(secondCommentIndex).toBeGreaterThan(altitudeIndex); // Any additional comment fields should come after altitude
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses PHG from position with messaging (spec vector 1)", () => {
|
||||||
|
const raw = "NOCALL>APZRAZ,qAS,PA2RDK-14:=5154.19N/00627.77E>PHG500073 de NOCALL";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload!.position.phg).toBeDefined();
|
||||||
|
// PHG500073 parsed per spec: p=5 -> 25 W, h='0' -> 10 ft, g='0' -> 0 dBi
|
||||||
|
expect(payload!.position.phg!.power).toBe(25);
|
||||||
|
expect(payload!.position.phg!.height).toBeCloseTo(3.048, 3);
|
||||||
|
expect(payload!.position.phg!.gain).toBe(0);
|
||||||
|
expect(payload!.position!.comment).toBe("73 de NOCALL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses PHG token with hyphen separators (spec vector 2)", () => {
|
||||||
|
const raw = "NOCALL>APRS,TCPIP*,qAC,NINTH:;P-PA3RD *061000z5156.26NP00603.29E#PHG0210DAPNET";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
// console.log(structure[structure.length - 1]); // Log the last segment for debugging
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
// Use a spec PHG example: PHG0210 -> p=0 -> power 0 W, h=2 -> 40 ft
|
||||||
|
expect(payload!.position.phg).toBeDefined();
|
||||||
|
expect(payload!.position.phg!.power).toBe(0);
|
||||||
|
expect(payload!.position.phg!.height).toBeCloseTo(12.192, 3);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fields = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasPHG = fields.some((f) => f.name === "PHG marker");
|
||||||
|
expect(hasPHG).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses DFS token with long numeric strength", () => {
|
||||||
|
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W#DFS2360/Your Comment";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload!.position.dfs).toBeDefined();
|
||||||
|
// DFSshgd: strength is single-digit s value (here '2')
|
||||||
|
expect(payload!.position.dfs!.strength).toBe(2);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsDFS = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasDFS = fieldsDFS.some((f) => f.name === "DFS marker");
|
||||||
|
expect(hasDFS).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses course/speed in DDD/SSS form and altitude /A=", () => {
|
||||||
|
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045/A=001234";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload!.position.course).toBe(90);
|
||||||
|
// Speed is converted from knots to km/h
|
||||||
|
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||||
|
// Altitude 001234 ft -> meters
|
||||||
|
expect(Math.round((payload!.position.altitude || 0) / 0.3048)).toBe(1234);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsCSE = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasCSE = fieldsCSE.some((f) => f.name === "course");
|
||||||
|
expect(hasCSE).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses combined tokens: DDD/SSS PHG and DFS", () => {
|
||||||
|
const raw = "N0CALL>APRS,WIDE1-1:!4500.00N/07000.00W>090/045PHG5132DFS2132";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload!.position.course).toBe(90);
|
||||||
|
expect(payload!.position.speed).toBeCloseTo(45 * 1.852, 3);
|
||||||
|
expect(payload!.position.phg).toBeDefined();
|
||||||
|
expect(payload!.position.dfs).toBeDefined();
|
||||||
|
expect(payload!.position.dfs!.strength).toBe(2);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsCombined = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
expect(fieldsCombined.some((f) => ["course", "PHG marker", "DFS marker"].includes(String(f.name)))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses RNG token and emits structure", () => {
|
||||||
|
const raw =
|
||||||
|
"N0CALL-S>APDG01,TCPIP*,qAC,N0CALL-GS:;N0CALL B *181721z5148.38ND00634.32EaRNG0001/A=000010 70cm Voice (D-Star) 439.50000MHz -7.6000MHz";
|
||||||
|
const frame = Frame.fromString(raw);
|
||||||
|
const res = frame.decode(true) as { payload: PositionPayload | null; structure: Dissected };
|
||||||
|
const { payload, structure } = res;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload!.position.altitude).toBeCloseTo(feetToMeters(10), 3);
|
||||||
|
expect(payload!.position.range).toBe(milesToMeters(1) / 1000);
|
||||||
|
|
||||||
|
const commentSeg = structure.find((s) => /comment/i.test(String(s.name))) as Segment | undefined;
|
||||||
|
expect(commentSeg).toBeDefined();
|
||||||
|
const fieldsRNG = (commentSeg!.fields ?? []) as Field[];
|
||||||
|
const hasRNG = fieldsRNG.some((f) => f.name === "range marker");
|
||||||
|
expect(hasRNG).toBe(true);
|
||||||
|
|
||||||
|
const rangeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "range marker");
|
||||||
|
expect(rangeIndex).toBeGreaterThanOrEqual(0);
|
||||||
|
const altitudeIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "altitude");
|
||||||
|
expect(altitudeIndex).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(rangeIndex).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(altitudeIndex).toBeGreaterThan(rangeIndex); // Altitude comes after range
|
||||||
|
const commentIndex = (commentSeg!.fields ?? []).findIndex((f) => f.name === "comment");
|
||||||
|
expect(commentIndex).toBeGreaterThan(altitudeIndex); // Comment comes after altitude
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("decodeTelemetry", () => {
|
||||||
|
it("decodes minimal telemetry (|!!!!|)", () => {
|
||||||
|
const result = decodeTelemetry("!!!!");
|
||||||
|
expect(result.sequence).toBe(0);
|
||||||
|
expect(result.analog).toEqual([0]);
|
||||||
|
expect(result.digital).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes sequence and one channel", () => {
|
||||||
|
const result = decodeTelemetry("ss11");
|
||||||
|
expect(result.sequence).toBe(7544);
|
||||||
|
expect(result.analog).toEqual([1472]);
|
||||||
|
expect(result.digital).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes sequence and two channels", () => {
|
||||||
|
const result = decodeTelemetry("ss1122");
|
||||||
|
expect(result.sequence).toBe(7544);
|
||||||
|
expect(result.analog).toEqual([1472, 1564]);
|
||||||
|
expect(result.digital).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes sequence and five channels", () => {
|
||||||
|
const result = decodeTelemetry("ss1122334455");
|
||||||
|
expect(result.sequence).toBe(7544);
|
||||||
|
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
|
||||||
|
expect(result.digital).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes sequence, five channels, and digital", () => {
|
||||||
|
const result = decodeTelemetry('ss1122334455!"');
|
||||||
|
expect(result.sequence).toBe(7544);
|
||||||
|
expect(result.analog).toEqual([1472, 1564, 1656, 1748, 1840]);
|
||||||
|
expect(result.digital).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws on too short input", () => {
|
||||||
|
expect(() => decodeTelemetry("!")).toThrow();
|
||||||
|
expect(() => decodeTelemetry("")).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws on invalid base91", () => {
|
||||||
|
expect(() => decodeTelemetry("ss11~~")).toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
40
test/payload.query.test.ts
Normal file
40
test/payload.query.test.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import { Dissected } from "@hamradio/packet";
|
||||||
|
import { expect } from "vitest";
|
||||||
|
import { describe, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import { DataType, QueryPayload } from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Frame decode - Query", () => {
|
||||||
|
it("decodes simple query without target", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:?APRS");
|
||||||
|
const payload = frame.decode() as QueryPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.Query);
|
||||||
|
expect(payload.queryType).toBe("APRS");
|
||||||
|
expect(payload.target).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes query with target", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:?PING N0CALL");
|
||||||
|
const payload = frame.decode() as QueryPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.Query);
|
||||||
|
expect(payload.queryType).toBe("PING");
|
||||||
|
expect(payload.target).toBe("N0CALL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns structure sections when requested", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:?PING N0CALL");
|
||||||
|
const result = frame.decode(true) as {
|
||||||
|
payload: QueryPayload;
|
||||||
|
structure: Dissected;
|
||||||
|
};
|
||||||
|
expect(result).toHaveProperty("payload");
|
||||||
|
expect(result.payload.type).toBe(DataType.Query);
|
||||||
|
expect(Array.isArray(result.structure)).toBe(true);
|
||||||
|
const names = result.structure.map((s) => s.name);
|
||||||
|
expect(names).toContain("query type");
|
||||||
|
expect(names).toContain("query target");
|
||||||
|
});
|
||||||
|
});
|
||||||
45
test/payload.rawgps.test.ts
Normal file
45
test/payload.rawgps.test.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { Dissected } from "@hamradio/packet";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import { DataType, type RawGPSPayload } from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Raw GPS decoding", () => {
|
||||||
|
it("decodes simple NMEA sentence as raw-gps payload", () => {
|
||||||
|
const sentence = "GPRMC,123519,A,4807.038,N,01131.000,E,022.4,084.4,230394,003.1,W*6A";
|
||||||
|
const frameStr = `SRC>DEST:$${sentence}`;
|
||||||
|
|
||||||
|
const f = Frame.parse(frameStr);
|
||||||
|
const payload = f.decode(false) as RawGPSPayload | null;
|
||||||
|
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload?.type).toBe(DataType.RawGPS);
|
||||||
|
expect(payload?.sentence).toBe(sentence);
|
||||||
|
expect(payload?.position).toBeDefined();
|
||||||
|
expect(typeof payload?.position?.latitude).toBe("number");
|
||||||
|
expect(typeof payload?.position?.longitude).toBe("number");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns structure when requested", () => {
|
||||||
|
const sentence = "GPGGA,092750.000,5321.6802,N,00630.3372,W,1,08,1.0,73.0,M,0.0,M,,*6A";
|
||||||
|
const frameStr = `SRC>DEST:$${sentence}`;
|
||||||
|
|
||||||
|
const f = Frame.parse(frameStr);
|
||||||
|
const result = f.decode(true) as {
|
||||||
|
payload: RawGPSPayload | null;
|
||||||
|
structure: Dissected;
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(result.payload).not.toBeNull();
|
||||||
|
expect(result.payload?.type).toBe(DataType.RawGPS);
|
||||||
|
expect(result.payload?.sentence).toBe(sentence);
|
||||||
|
expect(result.payload?.position).toBeDefined();
|
||||||
|
expect(typeof result.payload?.position?.latitude).toBe("number");
|
||||||
|
expect(typeof result.payload?.position?.longitude).toBe("number");
|
||||||
|
expect(result.structure).toBeDefined();
|
||||||
|
const rawSection = result.structure.find((s) => s.name === "raw-gps");
|
||||||
|
expect(rawSection).toBeDefined();
|
||||||
|
const posSection = result.structure.find((s) => s.name === "raw-gps-position");
|
||||||
|
expect(posSection).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
66
test/payload.telemetry.test.ts
Normal file
66
test/payload.telemetry.test.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import { describe, it } from "vitest";
|
||||||
|
import { expect } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import {
|
||||||
|
DataType,
|
||||||
|
TelemetryBitSensePayload,
|
||||||
|
TelemetryCoefficientsPayload,
|
||||||
|
TelemetryDataPayload,
|
||||||
|
TelemetryParameterPayload,
|
||||||
|
TelemetryUnitPayload
|
||||||
|
} from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Frame decode - Telemetry", () => {
|
||||||
|
it("decodes telemetry data payload", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:T#1 10,20,30,40,50 7");
|
||||||
|
const payload = frame.decode() as TelemetryDataPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.TelemetryData);
|
||||||
|
expect(payload.variant).toBe("data");
|
||||||
|
expect(payload.sequence).toBe(1);
|
||||||
|
expect(Array.isArray(payload.analog)).toBe(true);
|
||||||
|
expect(payload.analog.length).toBe(5);
|
||||||
|
expect(payload.digital).toBe(7);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes telemetry parameters list", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:TPARAM Temp,Hum,Wind");
|
||||||
|
const payload = frame.decode() as TelemetryParameterPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.TelemetryData);
|
||||||
|
expect(payload.variant).toBe("parameters");
|
||||||
|
expect(Array.isArray(payload.names)).toBe(true);
|
||||||
|
expect(payload.names).toEqual(["Temp", "Hum", "Wind"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes telemetry units list", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:TUNIT C,% ,mph");
|
||||||
|
const payload = frame.decode() as TelemetryUnitPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.TelemetryData);
|
||||||
|
expect(payload.variant).toBe("unit");
|
||||||
|
expect(payload.units).toEqual(["C", "%", "mph"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes telemetry coefficients", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:TCOEFF A:1,2 B:3,4 C:5,6");
|
||||||
|
const payload = frame.decode() as TelemetryCoefficientsPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.TelemetryData);
|
||||||
|
expect(payload.variant).toBe("coefficients");
|
||||||
|
expect(payload.coefficients.a).toEqual([1, 2]);
|
||||||
|
expect(payload.coefficients.b).toEqual([3, 4]);
|
||||||
|
expect(payload.coefficients.c).toEqual([5, 6]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decodes telemetry bitsense with project", () => {
|
||||||
|
const frame = Frame.fromString("SRC>DEST:TBITS 255 ProjectX");
|
||||||
|
const payload = frame.decode() as TelemetryBitSensePayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.TelemetryData);
|
||||||
|
expect(payload.variant).toBe("bitsense");
|
||||||
|
expect(payload.sense).toBe(255);
|
||||||
|
expect(payload.projectName).toBe("ProjectX");
|
||||||
|
});
|
||||||
|
});
|
||||||
37
test/payload.thirdparty.test.ts
Normal file
37
test/payload.thirdparty.test.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { Dissected } from "@hamradio/packet";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import { DataType, type UserDefinedPayload } from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Frame.decodeUserDefined", () => {
|
||||||
|
it("parses packet type only", () => {
|
||||||
|
const data = "CALL>APRS:{01";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const decoded = frame.decode() as UserDefinedPayload;
|
||||||
|
expect(decoded).not.toBeNull();
|
||||||
|
expect(decoded.type).toBe(DataType.UserDefined);
|
||||||
|
expect(decoded.userPacketType).toBe("01");
|
||||||
|
expect(decoded.data).toBe("");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("parses packet type and data and emits sections", () => {
|
||||||
|
const data = "CALL>APRS:{TEX Hello world";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const res = frame.decode(true) as {
|
||||||
|
payload: UserDefinedPayload;
|
||||||
|
structure: Dissected;
|
||||||
|
};
|
||||||
|
expect(res.payload).not.toBeNull();
|
||||||
|
expect(res.payload.type).toBe(DataType.UserDefined);
|
||||||
|
expect(res.payload.userPacketType).toBe("TEX");
|
||||||
|
expect(res.payload.data).toBe("Hello world");
|
||||||
|
|
||||||
|
const raw = res.structure.find((s) => s.name === "user-defined");
|
||||||
|
const typeSection = res.structure.find((s) => s.name === "user-packet-type");
|
||||||
|
const dataSection = res.structure.find((s) => s.name === "user-data");
|
||||||
|
expect(raw).toBeDefined();
|
||||||
|
expect(typeSection).toBeDefined();
|
||||||
|
expect(dataSection).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
38
test/payload.weather.test.ts
Normal file
38
test/payload.weather.test.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import { Dissected } from "@hamradio/packet";
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { Frame } from "../src/frame";
|
||||||
|
import { DataType, WeatherPayload } from "../src/frame.types";
|
||||||
|
|
||||||
|
describe("Frame decode - Weather", () => {
|
||||||
|
it("parses weather with timestamp, wind, temp, rain, humidity and pressure", () => {
|
||||||
|
const data = "SRC>DEST:_120345z180/10g15t072r000p025P050h50b10132";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const payload = frame.decode() as WeatherPayload;
|
||||||
|
expect(payload).not.toBeNull();
|
||||||
|
expect(payload.type).toBe(DataType.WeatherReportNoPosition);
|
||||||
|
expect(payload.timestamp).toBeDefined();
|
||||||
|
expect(payload.windDirection).toBe(180);
|
||||||
|
expect(payload.windSpeed).toBe(10);
|
||||||
|
expect(payload.windGust).toBe(15);
|
||||||
|
expect(payload.temperature).toBe(72);
|
||||||
|
expect(payload.rainLast24Hours).toBe(25);
|
||||||
|
expect(payload.rainSinceMidnight).toBe(50);
|
||||||
|
expect(payload.humidity).toBe(50);
|
||||||
|
expect(payload.pressure).toBe(10132);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("emits structure when requested", () => {
|
||||||
|
const data = "SRC>DEST:_120345z180/10g15t072r000p025P050h50b10132";
|
||||||
|
const frame = Frame.fromString(data);
|
||||||
|
const res = frame.decode(true) as {
|
||||||
|
payload: WeatherPayload;
|
||||||
|
structure: Dissected;
|
||||||
|
};
|
||||||
|
expect(res.payload).not.toBeNull();
|
||||||
|
expect(Array.isArray(res.structure)).toBe(true);
|
||||||
|
const names = res.structure.map((s) => s.name);
|
||||||
|
expect(names).toContain("timestamp");
|
||||||
|
expect(names).toContain("weather");
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user