1
0
mirror of https://github.com/craigerl/aprsd.git synced 2026-01-20 20:46:28 -05:00

Merge pull request #208 from craigerl/refactor-filter

Refactor filter
This commit is contained in:
Walter A. Boring IV 2026-01-20 11:38:44 -05:00 committed by GitHub
commit d4b76f844e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
94 changed files with 3970 additions and 853 deletions

View File

@ -0,0 +1,57 @@
name: Build Multi-Arch Docker Image on Tag
on:
push:
tags:
- "v*.*.*"
- "*.*.*"
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set version from tag
id: version
run: |
TAG="${{ github.ref_name }}"
# Strip leading 'v' if present (e.g. v3.4.0 -> 3.4.0)
VERSION="${TAG#v}"
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v5
with:
images: hemna6969/aprsd
tags: |
type=raw,value=${{ steps.version.outputs.version }},enable=${{ github.ref_type == 'tag' }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: ./docker
file: ./docker/Dockerfile
platforms: linux/amd64,linux/arm64
build-args: |
INSTALL_TYPE=pypi
VERSION=${{ steps.version.outputs.version }}
BUILDX_QEMU_ENV=true
push: true
provenance: false
tags: ${{ steps.meta.outputs.tags }}

View File

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12"]
python-version: ["3.11"]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}

View File

@ -1,16 +1,17 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-json
- id: detect-private-key
- id: check-merge-conflict
- id: check-case-conflict
- id: check-docstring-first
- id: check-builtin-literals
- id: check-illegal-windows-names
- id: double-quote-string-fixer
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v2.7.0
@ -18,18 +19,19 @@ repos:
- id: setup-cfg-fmt
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.0
rev: v0.14.10
hooks:
- id: ruff
###### Relevant part below ######
- id: ruff
- id: ruff-check
types_or: [python, pyi]
args: ["check", "--select", "I", "--fix"]
###### Relevant part above ######
- id: ruff-format
types_or: [python, pyi]
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
rev: 0.5.16
rev: 0.9.22
hooks:
# Compile requirements
- id: pip-compile

View File

@ -1 +1 @@
waboring@hemna.com : 1
waboring@hemna.com : 1

View File

@ -12,8 +12,7 @@
from importlib.metadata import PackageNotFoundError, version
try:
__version__ = version("aprsd")
__version__ = version('aprsd')
except PackageNotFoundError:
pass

View File

@ -3,6 +3,7 @@ import logging
import time
from typing import Callable
import aprslib
from aprslib.exceptions import LoginError
from loguru import logger
from oslo_config import cfg
@ -49,11 +50,12 @@ class APRSISDriver:
@staticmethod
def is_configured():
if APRSISDriver.is_enabled():
# Ensure that the config vars are correctly set
if not CONF.aprs_network.login:
LOG.error('Config aprs_network.login not set.')
# Ensure that the config vars are correctly set.
# The callsign in [DEFAULT] is used as the APRS-IS login.
if not CONF.callsign or CONF.callsign == 'NOCALL':
LOG.error('Config callsign (in [DEFAULT]) not set or is NOCALL.')
raise exception.MissingConfigOptionException(
'aprs_network.login is not set.',
'callsign (in [DEFAULT]) is not set or is NOCALL.',
)
if not CONF.aprs_network.password:
LOG.error('Config aprs_network.password not set.')
@ -88,7 +90,7 @@ class APRSISDriver:
def setup_connection(self):
if self.connected:
return
user = CONF.aprs_network.login
user = CONF.callsign
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
@ -133,6 +135,7 @@ class APRSISDriver:
continue
def set_filter(self, filter):
LOG.info(f'Setting filter to {filter}')
self._client.set_filter(filter)
def login_success(self) -> bool:
@ -166,7 +169,13 @@ class APRSISDriver:
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
return core.factory(args[0])
if not args:
LOG.warning('No frame received to decode?!?!')
return None
# If args[0] is already a dict (already parsed), pass it directly to factory
if isinstance(args[0], dict):
return core.factory(args[0])
return core.factory(aprslib.parse(args[0]))
def consumer(self, callback: Callable, raw: bool = False):
if self._client and self.connected:

View File

@ -103,16 +103,20 @@ class APRSDFakeDriver(metaclass=trace.TraceWrapperMetaclass):
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
if not kwargs:
# If packet is provided in kwargs, return it directly
if 'packet' in kwargs:
return kwargs['packet']
# If raw is provided in kwargs, use it
if 'raw' in kwargs:
return core.factory(aprslib.parse(kwargs['raw']))
# Otherwise, use args[0] if available
if not args:
LOG.warning('No frame received to decode?!?!')
return None
if kwargs.get('packet'):
return kwargs.get('packet')
if kwargs.get('raw'):
pkt_raw = aprslib.parse(kwargs.get('raw'))
pkt = core.factory(pkt_raw)
return pkt
# If args[0] is already a dict (already parsed), pass it directly to factory
if isinstance(args[0], dict):
return core.factory(args[0])
return core.factory(aprslib.parse(args[0]))
def stats(self, serializable: bool = False) -> dict:
return {

View File

@ -101,11 +101,12 @@ class KISSDriver(metaclass=trace.TraceWrapperMetaclass):
Args:
frame: Received AX.25 frame
"""
frame = kwargs.get('frame')
if not frame:
if not args:
LOG.warning('No frame received to decode?!?!')
return None
frame = args[0]
try:
aprslib_frame = aprslib.parse(str(frame))
packet = core.factory(aprslib_frame)
@ -134,10 +135,7 @@ class KISSDriver(metaclass=trace.TraceWrapperMetaclass):
frame = self.read_frame()
if frame:
LOG.info(f'GOT FRAME: {frame} calling {callback}')
kwargs = {
'frame': frame,
}
callback(**kwargs)
callback(frame)
def read_frame(self):
"""Read a frame from the KISS interface.

View File

@ -3,12 +3,12 @@ import click.shell_completion
from aprsd.main import cli
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@cli.command()
@click.argument(
"shell", type=click.Choice(list(click.shell_completion._available_shells))
'shell', type=click.Choice(list(click.shell_completion._available_shells))
)
def completion(shell):
"""Show the shell completion code"""
@ -16,10 +16,10 @@ def completion(shell):
cls = click.shell_completion.get_completion_class(shell)
prog_name = _detect_program_name()
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
complete_var = f'_{prog_name}_COMPLETE'.replace('-', '_').upper()
print(cls(cli, {}, prog_name, complete_var).source())
print(
"# Add the following line to your shell configuration file to have aprsd command line completion"
'# Add the following line to your shell configuration file to have aprsd command line completion'
)
print("# but remove the leading '#' character.")
print(f'# eval "$(aprsd completion {shell})"')

View File

@ -10,7 +10,7 @@ import click
from oslo_config import cfg
import aprsd
from aprsd import cli_helper, conf, packets, plugin, utils
from aprsd import cli_helper, packets, plugin, utils
# local imports here
from aprsd.main import cli
@ -79,12 +79,13 @@ def test_plugin(
CONF.log_opt_values(LOG, logging.DEBUG)
if not aprs_login:
if CONF.aprs_network.login == conf.client.DEFAULT_LOGIN:
click.echo('Must set --aprs_login or APRS_LOGIN')
if CONF.callsign == 'NOCALL':
click.echo(
'Must set --aprs_login or APRS_LOGIN, or set callsign in config ([DEFAULT])'
)
ctx.exit(-1)
return
else:
fromcall = CONF.aprs_network.login
fromcall = CONF.callsign
else:
fromcall = aprs_login
@ -129,6 +130,9 @@ def test_plugin(
LOG.info(f"P'{plugin_path}' F'{fromcall}' C'{message}'")
for _ in range(number):
# PluginManager.run() executes all plugins in parallel
# Results may be in a different order than plugin registration
# NULL_MESSAGE results are already filtered out
replies = pm.run(packet)
# Plugin might have threads, so lets stop them so we can exit.
# obj.stop_threads()
@ -149,12 +153,15 @@ def test_plugin(
elif isinstance(reply, packets.Packet):
# We have a message based object.
LOG.info(reply)
elif reply is not packets.NULL_MESSAGE:
LOG.info(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=reply,
),
)
else:
# Note: NULL_MESSAGE results are already filtered out
# in PluginManager.run(), but keeping this check for safety
if reply is not packets.NULL_MESSAGE:
LOG.info(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=reply,
),
)
pm.stop()

View File

@ -205,7 +205,8 @@ def dump_stats(ctx, raw, show_section):
console.print(stats[section])
return
t = Table(title='APRSD Stats')
aprsd_stats_count = len(stats['APRSDStats'])
t = Table(title=f'APRSD Stats ({aprsd_stats_count})')
t.add_column('Key')
t.add_column('Value')
for key, value in stats['APRSDStats'].items():
@ -215,7 +216,8 @@ def dump_stats(ctx, raw, show_section):
console.print(t)
# Show the thread list
t = Table(title='Thread List')
thread_list_count = len(stats['APRSDThreadList'])
t = Table(title=f'Thread List ({thread_list_count})')
t.add_column('Name')
t.add_column('Class')
t.add_column('Alive?')
@ -234,7 +236,8 @@ def dump_stats(ctx, raw, show_section):
console.print(t)
# Show the plugins
t = Table(title='Plugin List')
plugin_count = len(stats['PluginManager'])
t = Table(title=f'Plugin List ({plugin_count})')
t.add_column('Name')
t.add_column('Enabled')
t.add_column('Version')
@ -253,7 +256,8 @@ def dump_stats(ctx, raw, show_section):
console.print(t)
# Now show the client stats
t = Table(title='Client Stats')
client_stats_count = len(stats['APRSClientStats'])
t = Table(title=f'Client Stats ({client_stats_count})')
t.add_column('Key')
t.add_column('Value')
for key, value in stats['APRSClientStats'].items():
@ -264,7 +268,12 @@ def dump_stats(ctx, raw, show_section):
# now show the packet list
packet_list = stats.get('PacketList')
t = Table(title='Packet List')
# Count packet types if 'packets' key exists, otherwise count top-level keys
if 'packets' in packet_list:
packet_count = len(packet_list['packets'])
else:
packet_count = len(packet_list)
t = Table(title=f'Packet List ({packet_count})')
t.add_column('Key')
t.add_column('Value')
t.add_row('Total Received', str(packet_list['rx']))
@ -275,10 +284,15 @@ def dump_stats(ctx, raw, show_section):
# now show the seen list
seen_list = stats.get('SeenList')
sorted_seen_list = sorted(
seen_list.items(),
seen_list_count = len(seen_list) if seen_list else 0
sorted_seen_list = (
sorted(
seen_list.items(),
)
if seen_list
else []
)
t = Table(title='Seen List')
t = Table(title=f'Seen List ({seen_list_count})')
t.add_column('Callsign')
t.add_column('Message Count')
t.add_column('Last Heard')
@ -294,10 +308,15 @@ def dump_stats(ctx, raw, show_section):
# now show the watch list
watch_list = stats.get('WatchList')
sorted_watch_list = sorted(
watch_list.items(),
watch_list_count = len(watch_list) if watch_list else 0
sorted_watch_list = (
sorted(
watch_list.items(),
)
if watch_list
else []
)
t = Table(title='Watch List')
t = Table(title=f'Watch List ({watch_list_count})')
t.add_column('Callsign')
t.add_column('Last Heard')
for key, value in sorted_watch_list:

View File

@ -25,23 +25,23 @@ from aprsd.threads import stats as stats_threads
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
console = Console()
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
"--timeout",
'--timeout',
show_default=True,
default=3,
help="How long to wait for healtcheck url to come back",
help='How long to wait for healtcheck url to come back',
)
@click.pass_context
@cli_helper.process_standard_options
def healthcheck(ctx, timeout):
"""Check the health of the running aprsd server."""
ver_str = f"APRSD HealthCheck version: {aprsd.__version__}"
ver_str = f'APRSD HealthCheck version: {aprsd.__version__}'
console.log(ver_str)
with console.status(ver_str):
@ -56,33 +56,33 @@ def healthcheck(ctx, timeout):
else:
now = datetime.datetime.now()
if not stats:
console.log("No stats from aprsd")
console.log('No stats from aprsd')
sys.exit(-1)
email_stats = stats.get("EmailStats")
email_stats = stats.get('EmailStats')
if email_stats:
email_thread_last_update = email_stats["last_check_time"]
email_thread_last_update = email_stats['last_check_time']
if email_thread_last_update != "never":
if email_thread_last_update != 'never':
d = now - email_thread_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 30}
max_timeout = {'hours': 0.0, 'minutes': 5, 'seconds': 30}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
console.log(f"Email thread is very old! {d}")
console.log(f'Email thread is very old! {d}')
sys.exit(-1)
client_stats = stats.get("APRSClientStats")
client_stats = stats.get('APRSClientStats')
if not client_stats:
console.log("No APRSClientStats")
console.log('No APRSClientStats')
sys.exit(-1)
else:
aprsis_last_update = client_stats["connection_keepalive"]
aprsis_last_update = client_stats['connection_keepalive']
d = now - aprsis_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_timeout = {'hours': 0.0, 'minutes': 5, 'seconds': 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
LOG.error(f'APRS-IS last update is very old! {d}')
sys.exit(-1)
console.log("OK")
console.log('OK')
sys.exit(0)

View File

@ -3,13 +3,16 @@
#
# python included libs
import cProfile
import datetime
import logging
import pstats
import signal
import sys
import time
import click
import requests
from loguru import logger
from oslo_config import cfg
from rich.console import Console
@ -19,8 +22,7 @@ import aprsd
from aprsd import cli_helper, packets, plugin, threads, utils
from aprsd.client.client import APRSDClient
from aprsd.main import cli
from aprsd.packets import collector as packet_collector
from aprsd.packets import core, seen_list
from aprsd.packets import core
from aprsd.packets import log as packet_log
from aprsd.packets.filter import PacketFilter
from aprsd.packets.filters import dupe_filter, packet_type
@ -28,6 +30,7 @@ from aprsd.stats import collector
from aprsd.threads import keepalive, rx
from aprsd.threads import stats as stats_thread
from aprsd.threads.aprsd import APRSDThread
from aprsd.threads.stats import StatsLogThread
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
@ -68,87 +71,61 @@ class APRSDListenProcessThread(rx.APRSDFilterThread):
def print_packet(self, packet):
if self.log_packets:
packet_log.log(packet)
packet_log.log(
packet,
packet_count=self.packet_count,
force_log=True,
)
def process_packet(self, packet: type[core.Packet]):
if self.plugin_manager:
# Don't do anything with the reply.
# This is the listen only command.
# PluginManager.run() executes all plugins in parallel
# Results may be in a different order than plugin registration
self.plugin_manager.run(packet)
class ListenStatsThread(APRSDThread):
"""Log the stats from the PacketList."""
class StatsExportThread(APRSDThread):
"""Export stats to remote aprsd-exporter API."""
def __init__(self):
super().__init__('PacketStatsLog')
self._last_total_rx = 0
self.period = 31
self.start_time = time.time()
def __init__(self, exporter_url):
super().__init__('StatsExport')
self.exporter_url = exporter_url
self.period = 10 # Export stats every 60 seconds
def loop(self):
if self.loop_count % self.period == 0:
# log the stats every 10 seconds
stats_json = collector.Collector().collect()
stats = stats_json['PacketList']
total_rx = stats['rx']
packet_count = len(stats['packets'])
rx_delta = total_rx - self._last_total_rx
rate = rx_delta / self.period
try:
# Collect all stats
stats_json = collector.Collector().collect(serializable=True)
# Remove the PacketList section to reduce payload size
if 'PacketList' in stats_json:
del stats_json['PacketList']['packets']
# Get unique callsigns count from packets' from_call field
unique_callsigns = set()
if 'packets' in stats and stats['packets']:
for packet in stats['packets']:
# Handle both Packet objects and dicts (if serializable)
if hasattr(packet, 'from_call'):
if packet.from_call:
unique_callsigns.add(packet.from_call)
elif isinstance(packet, dict) and 'from_call' in packet:
if packet['from_call']:
unique_callsigns.add(packet['from_call'])
unique_callsigns_count = len(unique_callsigns)
now = datetime.datetime.now()
time_format = '%m-%d-%Y %H:%M:%S'
stats = {
'time': now.strftime(time_format),
'stats': stats_json,
}
# Calculate uptime
elapsed = time.time() - self.start_time
elapsed_minutes = elapsed / 60
elapsed_hours = elapsed / 3600
# Send stats to exporter API
url = f'{self.exporter_url}/stats'
headers = {'Content-Type': 'application/json'}
response = requests.post(url, json=stats, headers=headers, timeout=10)
# Log summary stats
LOGU.opt(colors=True).info(
f'<green>RX Rate: {rate:.2f} pps</green> '
f'<yellow>Total RX: {total_rx}</yellow> '
f'<red>RX Last {self.period} secs: {rx_delta}</red> '
f'<white>Packets in PacketListStats: {packet_count}</white>',
)
LOGU.opt(colors=True).info(
f'<cyan>Uptime: {elapsed:.0f}s ({elapsed_minutes:.1f}m / {elapsed_hours:.2f}h)</cyan> '
f'<magenta>Unique Callsigns: {unique_callsigns_count}</magenta>',
)
self._last_total_rx = total_rx
if response.status_code == 200:
LOGU.info(f'Successfully exported stats to {self.exporter_url}')
else:
LOGU.warning(
f'Failed to export stats to {self.exporter_url}: HTTP {response.status_code}'
)
# Log individual type stats, sorted by RX count (descending)
sorted_types = sorted(
stats['types'].items(), key=lambda x: x[1]['rx'], reverse=True
)
for k, v in sorted_types:
# Calculate percentage of this packet type compared to total RX
percentage = (v['rx'] / total_rx * 100) if total_rx > 0 else 0.0
# Format values first, then apply colors
packet_type_str = f'{k:<15}'
rx_count_str = f'{v["rx"]:6d}'
tx_count_str = f'{v["tx"]:6d}'
percentage_str = f'{percentage:5.1f}%'
# Use different colors for RX count based on threshold (matching mqtt_injest.py)
rx_color_tag = (
'green' if v['rx'] > 100 else 'yellow' if v['rx'] > 10 else 'red'
)
LOGU.opt(colors=True).info(
f' <cyan>{packet_type_str}</cyan>: '
f'<{rx_color_tag}>RX: {rx_count_str}</{rx_color_tag}> '
f'<red>TX: {tx_count_str}</red> '
f'<magenta>({percentage_str})</magenta>',
)
except requests.exceptions.RequestException as e:
LOGU.error(f'Error exporting stats to {self.exporter_url}: {e}')
except Exception as e:
LOGU.error(f'Unexpected error in stats export: {e}')
time.sleep(1)
return True
@ -218,6 +195,23 @@ class ListenStatsThread(APRSDThread):
is_flag=True,
help='Enable packet stats periodic logging.',
)
@click.option(
'--export-stats',
default=False,
is_flag=True,
help='Export stats to remote aprsd-exporter API.',
)
@click.option(
'--exporter-url',
default='http://localhost:8081',
help='URL of the aprsd-exporter API to send stats to.',
)
@click.option(
'--profile',
default=False,
is_flag=True,
help='Enable Python cProfile profiling to identify performance bottlenecks.',
)
@click.pass_context
@cli_helper.process_standard_options
def listen(
@ -230,6 +224,9 @@ def listen(
filter,
log_packets,
enable_packet_stats,
export_stats,
exporter_url,
profile,
):
"""Listen to packets on the APRS-IS Network based on FILTER.
@ -241,6 +238,13 @@ def listen(
o/obj1/obj2... - Object Filter Pass all objects with the exact name of obj1, obj2, ... (* wild card allowed)\n
"""
# Initialize profiler if enabled
profiler = None
if profile:
LOG.info('Starting Python cProfile profiling')
profiler = cProfile.Profile()
profiler.enable()
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
@ -256,9 +260,6 @@ def listen(
ctx.fail('Must set --aprs-password or APRS_PASSWORD')
ctx.exit()
# CONF.aprs_network.login = aprs_login
# config["aprs"]["password"] = aprs_password
LOG.info(f'Python version: {sys.version}')
LOG.info(f'APRSD Listen Started version: {aprsd.__version__}')
utils.package.log_installed_extensions_and_plugins()
@ -292,10 +293,6 @@ def listen(
keepalive_thread = keepalive.KeepAliveThread()
if not CONF.enable_seen_list:
# just deregister the class from the packet collector
packet_collector.PacketCollector().unregister(seen_list.SeenList)
# we don't want the dupe filter to run here.
PacketFilter().unregister(dupe_filter.DupePacketFilter)
if packet_filter:
@ -326,6 +323,11 @@ def listen(
for p in pm.get_plugins():
LOG.info('Loaded plugin %s', p.__class__.__name__)
if log_packets:
LOG.info('Packet Logging is enabled')
else:
LOG.info('Packet Logging is disabled')
stats = stats_thread.APRSDStatsStoreThread()
stats.start()
@ -346,13 +348,44 @@ def listen(
LOG.debug(f'enable_packet_stats: {enable_packet_stats}')
if enable_packet_stats:
LOG.debug('Start ListenStatsThread')
listen_stats = ListenStatsThread()
LOG.debug('Start StatsLogThread')
listen_stats = StatsLogThread()
listen_stats.start()
LOG.debug(f'export_stats: {export_stats}')
stats_export = None
if export_stats:
LOG.debug('Start StatsExportThread')
stats_export = StatsExportThread(exporter_url)
stats_export.start()
keepalive_thread.start()
LOG.debug('keepalive Join')
keepalive_thread.join()
rx_thread.join()
listen_thread.join()
stats.join()
if stats_export:
stats_export.join()
# Save profiling results if enabled
if profiler:
profiler.disable()
profile_file = 'aprsd_listen_profile.prof'
profiler.dump_stats(profile_file)
LOG.info(f'Profile saved to {profile_file}')
# Print profiling summary
LOG.info('Profile Summary (top 50 functions by cumulative time):')
stats = pstats.Stats(profiler)
stats.sort_stats('cumulative')
# Log the top functions
LOG.info('-' * 80)
for item in stats.get_stats().items()[:50]:
func_info, stats_tuple = item
cumulative = stats_tuple[3]
total_calls = stats_tuple[0]
LOG.info(
f'{func_info} - Calls: {total_calls}, Cumulative: {cumulative:.4f}s'
)

View File

@ -9,12 +9,7 @@ from oslo_config import cfg
import aprsd
import aprsd.packets # noqa : F401
from aprsd import (
cli_helper,
conf, # noqa : F401
packets,
utils,
)
from aprsd import cli_helper, packets, utils
from aprsd.client.client import APRSDClient
from aprsd.main import cli
from aprsd.packets import collector
@ -75,12 +70,13 @@ def send_message(
quiet = ctx.obj['quiet']
if not aprs_login:
if CONF.aprs_network.login == conf.client.DEFAULT_LOGIN:
click.echo('Must set --aprs_login or APRS_LOGIN')
if CONF.callsign == 'NOCALL':
click.echo(
'Must set --aprs_login or APRS_LOGIN, or set callsign in config ([DEFAULT])'
)
ctx.exit(-1)
return
else:
aprs_login = CONF.aprs_network.login
aprs_login = CONF.callsign
if not aprs_password:
if not CONF.aprs_network.password:

View File

@ -15,6 +15,7 @@ from aprsd.packets import collector as packet_collector
from aprsd.packets import seen_list
from aprsd.threads import keepalive, registry, rx, service, tx
from aprsd.threads import stats as stats_thread
from aprsd.threads.stats import StatsLogThread
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
@ -42,9 +43,15 @@ def _is_aprsd_gps_extension_installed():
default=False,
help='Flush out all old aged messages on disk.',
)
@click.option(
'--enable-packet-stats',
default=False,
is_flag=True,
help='Enable packet stats periodic logging.',
)
@click.pass_context
@cli_helper.process_standard_options
def server(ctx, flush):
def server(ctx, flush, enable_packet_stats):
"""Start the aprsd server gateway process."""
signal.signal(signal.SIGINT, aprsd_main.signal_handler)
signal.signal(signal.SIGTERM, aprsd_main.signal_handler)
@ -165,6 +172,11 @@ def server(ctx, flush):
LOG.info('Registry Enabled. Starting Registry thread.')
service_threads.register(registry.APRSRegistryThread())
if enable_packet_stats:
LOG.debug('Start StatsLogThread')
listen_stats = StatsLogThread()
listen_stats.start()
service_threads.start()
service_threads.join()

View File

@ -2,7 +2,6 @@ from oslo_config import cfg
from aprsd.conf import client, common, log, plugin_common
CONF = cfg.CONF
log.register_opts(CONF)
@ -37,19 +36,19 @@ def conf_to_dict():
def _sanitize(opt, value):
"""Obfuscate values of options declared secret."""
return value if not opt.secret else "*" * 4
return value if not opt.secret else '*' * 4
for opt_name in sorted(CONF._opts):
opt = CONF._get_opt_info(opt_name)["opt"]
opt = CONF._get_opt_info(opt_name)['opt']
val = str(_sanitize(opt, getattr(CONF, opt_name)))
entries[str(opt)] = val
for group_name in list(CONF._groups):
group_attr = CONF.GroupAttr(CONF, CONF._get_group(group_name))
for opt_name in sorted(CONF._groups[group_name]._opts):
opt = CONF._get_opt_info(opt_name, group_name)["opt"]
opt = CONF._get_opt_info(opt_name, group_name)['opt']
val = str(_sanitize(opt, getattr(group_attr, opt_name)))
gname_opt_name = f"{group_name}.{opt_name}"
gname_opt_name = f'{group_name}.{opt_name}'
entries[gname_opt_name] = val
return entries

View File

@ -4,107 +4,100 @@ The options for log setup
from oslo_config import cfg
DEFAULT_LOGIN = "NOCALL"
aprs_group = cfg.OptGroup(
name="aprs_network",
title="APRS-IS Network settings",
name='aprs_network',
title='APRS-IS Network settings',
)
kiss_serial_group = cfg.OptGroup(
name="kiss_serial",
title="KISS Serial device connection",
name='kiss_serial',
title='KISS Serial device connection',
)
kiss_tcp_group = cfg.OptGroup(
name="kiss_tcp",
title="KISS TCP/IP Device connection",
name='kiss_tcp',
title='KISS TCP/IP Device connection',
)
fake_client_group = cfg.OptGroup(
name="fake_client",
title="Fake Client settings",
name='fake_client',
title='Fake Client settings',
)
aprs_opts = [
cfg.BoolOpt(
"enabled",
'enabled',
default=True,
help="Set enabled to False if there is no internet connectivity."
"This is useful for a direwolf KISS aprs connection only.",
help='Set enabled to False if there is no internet connectivity.'
'This is useful for a direwolf KISS aprs connection only.',
),
cfg.StrOpt(
"login",
default=DEFAULT_LOGIN,
help="APRS Username",
),
cfg.StrOpt(
"password",
'password',
secret=True,
help="APRS Password "
"Get the passcode for your callsign here: "
"https://apps.magicbug.co.uk/passcode",
help='APRS Password for the callsign in [DEFAULT]. '
'Get the passcode for your callsign here: '
'https://apps.magicbug.co.uk/passcode',
),
cfg.HostAddressOpt(
"host",
default="noam.aprs2.net",
help="The APRS-IS hostname",
'host',
default='noam.aprs2.net',
help='The APRS-IS hostname',
),
cfg.PortOpt(
"port",
'port',
default=14580,
help="APRS-IS port",
help='APRS-IS port',
),
]
kiss_serial_opts = [
cfg.BoolOpt(
"enabled",
'enabled',
default=False,
help="Enable Serial KISS interface connection.",
help='Enable Serial KISS interface connection.',
),
cfg.StrOpt(
"device",
help="Serial Device file to use. /dev/ttyS0",
'device',
help='Serial Device file to use. /dev/ttyS0',
),
cfg.IntOpt(
"baudrate",
'baudrate',
default=9600,
help="The Serial device baud rate for communication",
help='The Serial device baud rate for communication',
),
cfg.ListOpt(
"path",
default=["WIDE1-1", "WIDE2-1"],
help="The APRS path to use for wide area coverage.",
'path',
default=['WIDE1-1', 'WIDE2-1'],
help='The APRS path to use for wide area coverage.',
),
]
kiss_tcp_opts = [
cfg.BoolOpt(
"enabled",
'enabled',
default=False,
help="Enable Serial KISS interface connection.",
help='Enable Serial KISS interface connection.',
),
cfg.HostAddressOpt(
"host",
help="The KISS TCP Host to connect to.",
'host',
help='The KISS TCP Host to connect to.',
),
cfg.PortOpt(
"port",
'port',
default=8001,
help="The KISS TCP/IP network port",
help='The KISS TCP/IP network port',
),
cfg.ListOpt(
"path",
default=["WIDE1-1", "WIDE2-1"],
help="The APRS path to use for wide area coverage.",
'path',
default=['WIDE1-1', 'WIDE2-1'],
help='The APRS path to use for wide area coverage.',
),
]
fake_client_opts = [
cfg.BoolOpt(
"enabled",
'enabled',
default=False,
help="Enable fake client connection.",
help='Enable fake client connection.',
),
]

View File

@ -22,6 +22,11 @@ aprsd_opts = [
default='NOCALL',
help='Callsign to use for messages sent by APRSD',
),
cfg.StrOpt(
'owner_callsign',
default=None,
help='The ham radio license callsign that owns this APRSD instance.',
),
cfg.BoolOpt(
'enable_save',
default=True,

View File

@ -31,7 +31,7 @@ import importlib
import os
import pkgutil
LIST_OPTS_FUNC_NAME = "list_opts"
LIST_OPTS_FUNC_NAME = 'list_opts'
def _tupleize(dct):
@ -51,7 +51,7 @@ def _list_module_names():
module_names = []
package_path = os.path.dirname(os.path.abspath(__file__))
for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]):
if modname == "opts" or ispkg:
if modname == 'opts' or ispkg:
continue
else:
module_names.append(modname)
@ -61,11 +61,11 @@ def _list_module_names():
def _import_modules(module_names):
imported_modules = []
for modname in module_names:
mod = importlib.import_module("aprsd.conf." + modname)
mod = importlib.import_module('aprsd.conf.' + modname)
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
msg = (
"The module 'aprsd.conf.%s' should have a '%s' "
"function which returns the config options."
'function which returns the config options.'
% (modname, LIST_OPTS_FUNC_NAME)
)
raise Exception(msg)

View File

@ -1,55 +1,55 @@
from oslo_config import cfg
aprsfi_group = cfg.OptGroup(
name="aprs_fi",
title="APRS.FI website settings",
name='aprs_fi',
title='APRS.FI website settings',
)
query_group = cfg.OptGroup(
name="query_plugin",
title="Options for the Query Plugin",
name='query_plugin',
title='Options for the Query Plugin',
)
avwx_group = cfg.OptGroup(
name="avwx_plugin",
title="Options for the AVWXWeatherPlugin",
name='avwx_plugin',
title='Options for the AVWXWeatherPlugin',
)
owm_wx_group = cfg.OptGroup(
name="owm_weather_plugin",
title="Options for the OWMWeatherPlugin",
name='owm_weather_plugin',
title='Options for the OWMWeatherPlugin',
)
aprsfi_opts = [
cfg.StrOpt(
"apiKey",
help="Get the apiKey from your aprs.fi account here:" "http://aprs.fi/account",
'apiKey',
help='Get the apiKey from your aprs.fi account here:http://aprs.fi/account',
),
]
owm_wx_opts = [
cfg.StrOpt(
"apiKey",
'apiKey',
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
"This plugin uses the openweathermap API to fetch"
"location and weather information."
"To use this plugin you need to get an openweathermap"
"account and apikey."
"https://home.openweathermap.org/api_keys",
'This plugin uses the openweathermap API to fetch'
'location and weather information.'
'To use this plugin you need to get an openweathermap'
'account and apikey.'
'https://home.openweathermap.org/api_keys',
),
]
avwx_opts = [
cfg.StrOpt(
"apiKey",
help="avwx-api is an opensource project that has"
"a hosted service here: https://avwx.rest/"
"You can launch your own avwx-api in a container"
"by cloning the githug repo here:"
"https://github.com/avwx-rest/AVWX-API",
'apiKey',
help='avwx-api is an opensource project that has'
'a hosted service here: https://avwx.rest/'
'You can launch your own avwx-api in a container'
'by cloning the githug repo here:'
'https://github.com/avwx-rest/AVWX-API',
),
cfg.StrOpt(
"base_url",
default="https://avwx.rest",
help="The base url for the avwx API. If you are hosting your own"
"Here is where you change the url to point to yours.",
'base_url',
default='https://avwx.rest',
help='The base url for the avwx API. If you are hosting your own'
'Here is where you change the url to point to yours.',
),
]

View File

@ -4,8 +4,7 @@ from typing import Callable, Protocol, runtime_checkable
from aprsd.packets import core
from aprsd.utils import singleton
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
@runtime_checkable
@ -36,12 +35,12 @@ class PacketCollector:
def register(self, monitor: Callable) -> None:
if not isinstance(monitor, PacketMonitor):
raise TypeError(f"Monitor {monitor} is not a PacketMonitor")
raise TypeError(f'Monitor {monitor} is not a PacketMonitor')
self.monitors.append(monitor)
def unregister(self, monitor: Callable) -> None:
if not isinstance(monitor, PacketMonitor):
raise TypeError(f"Monitor {monitor} is not a PacketMonitor")
raise TypeError(f'Monitor {monitor} is not a PacketMonitor')
self.monitors.remove(monitor)
def rx(self, packet: type[core.Packet]) -> None:
@ -50,7 +49,7 @@ class PacketCollector:
try:
cls.rx(packet)
except Exception as e:
LOG.error(f"Error in monitor {name} (rx): {e}")
LOG.error(f'Error in monitor {name} (rx): {e}')
def tx(self, packet: type[core.Packet]) -> None:
for name in self.monitors:
@ -58,7 +57,7 @@ class PacketCollector:
try:
cls.tx(packet)
except Exception as e:
LOG.error(f"Error in monitor {name} (tx): {e}")
LOG.error(f'Error in monitor {name} (tx): {e}')
def flush(self):
"""Call flush on the objects. This is used to flush out any data."""
@ -67,7 +66,7 @@ class PacketCollector:
try:
cls.flush()
except Exception as e:
LOG.error(f"Error in monitor {name} (flush): {e}")
LOG.error(f'Error in monitor {name} (flush): {e}')
def load(self):
"""Call load on the objects. This is used to load any data."""
@ -76,4 +75,4 @@ class PacketCollector:
try:
cls.load()
except Exception as e:
LOG.error(f"Error in monitor {name} (load): {e}")
LOG.error(f'Error in monitor {name} (load): {e}')

View File

@ -514,8 +514,13 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
speed: Optional[float] = field(default=None)
def _translate(self, raw: dict) -> dict:
for key in raw['weather']:
raw[key] = raw['weather'][key]
# aprslib returns the weather data in a 'weather' key
# We need to move the data out of the 'weather' key
# and into the root of the dictionary
if 'weather' in raw:
for key in raw['weather']:
raw[key] = raw['weather'][key]
del raw['weather']
# If we have the broken aprslib, then we need to
# Convert the course and speed to wind_speed and wind_direction
@ -531,28 +536,27 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
wind_speed = raw.get('speed')
if wind_speed:
raw['wind_speed'] = round(wind_speed / 1.852, 3)
raw['weather']['wind_speed'] = raw['wind_speed']
# raw['weather']['wind_speed'] = raw['wind_speed']
if 'speed' in raw:
del raw['speed']
# Let's adjust the rain numbers as well, since it's wrong
raw['rain_1h'] = round((raw.get('rain_1h', 0) / 0.254) * 0.01, 3)
raw['weather']['rain_1h'] = raw['rain_1h']
# raw['weather']['rain_1h'] = raw['rain_1h']
raw['rain_24h'] = round((raw.get('rain_24h', 0) / 0.254) * 0.01, 3)
raw['weather']['rain_24h'] = raw['rain_24h']
# raw['weather']['rain_24h'] = raw['rain_24h']
raw['rain_since_midnight'] = round(
(raw.get('rain_since_midnight', 0) / 0.254) * 0.01, 3
)
raw['weather']['rain_since_midnight'] = raw['rain_since_midnight']
# raw['weather']['rain_since_midnight'] = raw['rain_since_midnight']
if 'wind_direction' not in raw:
wind_direction = raw.get('course')
if wind_direction:
raw['wind_direction'] = wind_direction
raw['weather']['wind_direction'] = raw['wind_direction']
# raw['weather']['wind_direction'] = raw['wind_direction']
if 'course' in raw:
del raw['course']
del raw['weather']
return raw
@classmethod

View File

@ -20,6 +20,9 @@ class DupePacketFilter:
timeframe, then it's a dupe.
"""
def __init__(self):
self.pl = packets.PacketList()
def filter(self, packet: type[core.Packet]) -> Union[type[core.Packet], None]:
# LOG.debug(f"{self.__class__.__name__}.filter called for packet {packet}")
"""Filter a packet out if it's already been seen and processed."""
@ -32,12 +35,11 @@ class DupePacketFilter:
# Make sure we aren't re-processing the same packet
# For RF based APRS Clients we can get duplicate packets
# So we need to track them and not process the dupes.
pkt_list = packets.PacketList()
found = False
try:
# Find the packet in the list of already seen packets
# Based on the packet.key
found = pkt_list.find(packet)
found = self.pl.find(packet)
if not packet.msgNo:
# If the packet doesn't have a message id
# then there is no reliable way to detect
@ -54,12 +56,13 @@ class DupePacketFilter:
if not packet.processed:
# We haven't processed this packet through the plugins.
return packet
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
elif abs(packet.timestamp - found.timestamp) < CONF.packet_dupe_timeout:
# If the packet came in within N seconds of the
# Last time seeing the packet, then we drop it as a dupe.
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.'
)
return None
else:
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked '

View File

@ -22,10 +22,15 @@ DEGREES_COLOR = 'fg #FFA900'
def log_multiline(
packet, tx: Optional[bool] = False, header: Optional[bool] = True
packet,
tx: Optional[bool] = False,
header: Optional[bool] = True,
force_log: Optional[bool] = False,
) -> None:
"""LOG a packet to the logfile."""
if not CONF.enable_packet_logging:
# If logging is disabled and we're not forcing log, return early
# However, if we're forcing log, we still proceed
if not CONF.enable_packet_logging and not force_log:
return
if CONF.log_packet_format == 'compact':
return
@ -77,12 +82,15 @@ def log_multiline(
if hasattr(packet, 'comment') and packet.comment:
logit.append(f' Comment : {packet.comment}')
raw = packet.raw.replace('<', '\\<')
raw = packet.raw
if raw:
raw = raw.replace('<', '\\<')
else:
raw = ''
logit.append(f' Raw : <fg #828282>{raw}</fg #828282>')
logit.append(f'{header_str}________(<{PACKET_COLOR}>{name}</{PACKET_COLOR}>)')
LOGU.opt(colors=True).info('\n'.join(logit))
LOG.debug(repr(packet))
def log(
@ -90,13 +98,19 @@ def log(
tx: Optional[bool] = False,
header: Optional[bool] = True,
packet_count: Optional[int] = None,
force_log: Optional[bool] = False,
) -> None:
if not CONF.enable_packet_logging:
return
if CONF.log_packet_format == 'multiline':
log_multiline(packet, tx, header)
# If logging is disabled and we're not forcing log, return early
if not CONF.enable_packet_logging and not force_log:
return
# Handle multiline format
if CONF.log_packet_format == 'multiline':
log_multiline(packet, tx, header, force_log)
return
# Handle compact format - this is the default case
# This is the compact format logging logic (which was unreachable before)
if not packet_count:
packet_count = ''
else:
@ -168,4 +182,6 @@ def log(
)
LOGU.opt(colors=True).info(' '.join(logit))
log_multiline(packet, tx, header)
# Note: We don't call log_multiline again here for compact format since it's already handled above
if CONF.log_packet_format == 'both':
log_multiline(packet, tx, header, force_log)

View File

@ -1,8 +1,10 @@
import logging
import threading
from collections import OrderedDict
from oslo_config import cfg
from aprsd import conf # noqa: F401
from aprsd.packets import core
from aprsd.utils import objectstore
@ -21,6 +23,7 @@ class PacketList(objectstore.ObjectStoreMixin):
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls.lock = threading.RLock()
cls._instance.maxlen = CONF.packet_list_maxlen
cls._instance._init_data()
return cls._instance

View File

@ -1,14 +1,14 @@
import datetime
import logging
import threading
from oslo_config import cfg
from aprsd.packets import core
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class SeenList(objectstore.ObjectStoreMixin):
@ -20,13 +20,27 @@ class SeenList(objectstore.ObjectStoreMixin):
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.lock = threading.RLock()
cls._instance.data = {}
return cls._instance
def stats(self, serializable=False):
"""Return the stats for the PacketTrack class."""
with self.lock:
return self.data
if serializable:
# Convert datetime objects to strings for JSON serialization
serializable_data = {}
for callsign, data in self.data.items():
serializable_data[callsign] = data.copy()
if 'last' in serializable_data[callsign] and isinstance(
serializable_data[callsign]['last'], datetime.datetime
):
serializable_data[callsign]['last'] = serializable_data[
callsign
]['last'].isoformat()
return serializable_data
else:
return self.data
def rx(self, packet: type[core.Packet]):
"""When we get a packet from the network, update the seen list."""
@ -39,11 +53,11 @@ class SeenList(objectstore.ObjectStoreMixin):
return
if callsign not in self.data:
self.data[callsign] = {
"last": None,
"count": 0,
'last': None,
'count': 0,
}
self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["count"] += 1
self.data[callsign]['last'] = datetime.datetime.now()
self.data[callsign]['count'] += 1
def tx(self, packet: type[core.Packet]):
"""We don't care about TX packets."""

View File

@ -1,14 +1,14 @@
import datetime
import logging
import threading
from oslo_config import cfg
from aprsd.packets import core
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class PacketTrack(objectstore.ObjectStoreMixin):
@ -33,6 +33,7 @@ class PacketTrack(objectstore.ObjectStoreMixin):
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.lock = threading.RLock()
cls._instance._start_time = datetime.datetime.now()
cls._instance._init_store()
return cls._instance
@ -60,18 +61,20 @@ class PacketTrack(objectstore.ObjectStoreMixin):
def stats(self, serializable=False):
with self.lock:
stats = {
"total_tracked": self.total_tracked,
'total_tracked': self.total_tracked,
}
pkts = {}
for key in self.data:
last_send_time = self.data[key].last_send_time
if serializable and isinstance(last_send_time, datetime.datetime):
last_send_time = last_send_time.isoformat()
pkts[key] = {
"last_send_time": last_send_time,
"send_count": self.data[key].send_count,
"retry_count": self.data[key].retry_count,
"message": self.data[key].raw,
'last_send_time': last_send_time,
'send_count': self.data[key].send_count,
'retry_count': self.data[key].retry_count,
'message': self.data[key].raw,
}
stats["packets"] = pkts
stats['packets'] = pkts
return stats
def rx(self, packet: type[core.Packet]) -> None:
@ -80,7 +83,7 @@ class PacketTrack(objectstore.ObjectStoreMixin):
self._remove(packet.msgNo)
elif isinstance(packet, core.RejectPacket):
self._remove(packet.msgNo)
elif hasattr(packet, "ackMsgNo"):
elif hasattr(packet, 'ackMsgNo'):
# Got a piggyback ack, so remove the original message
self._remove(packet.ackMsgNo)

View File

@ -1,5 +1,6 @@
import datetime
import logging
import threading
from oslo_config import cfg
@ -21,6 +22,7 @@ class WatchList(objectstore.ObjectStoreMixin):
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.lock = threading.RLock()
return cls._instance
@trace.no_trace

View File

@ -7,6 +7,7 @@ import logging
import re
import textwrap
import threading
from concurrent.futures import ThreadPoolExecutor, as_completed
import pluggy
from oslo_config import cfg
@ -49,6 +50,7 @@ class APRSDPluginSpec:
class APRSDPluginBase(metaclass=abc.ABCMeta):
"""The base class for all APRSD Plugins."""
_counter_lock = threading.Lock()
config = None
rx_count = 0
tx_count = 0
@ -106,10 +108,12 @@ class APRSDPluginBase(metaclass=abc.ABCMeta):
return []
def rx_inc(self):
self.rx_count += 1
with self._counter_lock:
self.rx_count += 1
def tx_inc(self):
self.tx_count += 1
with self._counter_lock:
self.tx_count += 1
def stop_threads(self):
"""Stop any threads this plugin might have created."""
@ -513,13 +517,90 @@ class PluginManager:
LOG.info('Completed Plugin Loading.')
def run(self, packet: packets.MessagePacket):
"""Execute all the plugins run method."""
with self.lock:
return self._pluggy_pm.hook.filter(packet=packet)
"""Execute all plugins in parallel.
Plugins are executed concurrently using ThreadPoolExecutor to improve
performance, especially when plugins perform I/O operations (API calls,
subprocess calls, etc.). Each plugin's filter() method is called in
parallel, and results are collected as they complete.
Returns:
tuple: (results, handled) where:
- results: list of non-NULL plugin results
- handled: bool indicating if any plugin processed the message
(even if it returned NULL_MESSAGE)
"""
plugins = list(self._pluggy_pm.get_plugins())
if not plugins:
return ([], False)
results = []
handled = False
# Execute all plugins in parallel
with ThreadPoolExecutor(max_workers=len(plugins)) as executor:
future_to_plugin = {
executor.submit(plugin.filter, packet=packet): plugin
for plugin in plugins
}
for future in as_completed(future_to_plugin):
plugin = future_to_plugin[future]
try:
result = future.result()
# Track if any plugin processed the message (even if NULL_MESSAGE)
if result is not None:
handled = True
# Only include non-NULL results
if result and result is not packets.NULL_MESSAGE:
results.append(result)
except Exception as ex:
LOG.error(
'Plugin {} failed to process packet: {}'.format(
plugin.__class__.__name__,
ex,
),
)
LOG.exception(ex)
return (results, handled)
def run_watchlist(self, packet: packets.Packet):
with self.lock:
return self._watchlist_pm.hook.filter(packet=packet)
"""Execute all watchlist plugins in parallel.
Watchlist plugins are executed concurrently using ThreadPoolExecutor
to improve performance when multiple watchlist plugins are registered.
"""
plugins = list(self._watchlist_pm.get_plugins())
if not plugins:
return []
results = []
# Execute all plugins in parallel
with ThreadPoolExecutor(max_workers=len(plugins)) as executor:
future_to_plugin = {
executor.submit(plugin.filter, packet=packet): plugin
for plugin in plugins
}
for future in as_completed(future_to_plugin):
plugin = future_to_plugin[future]
try:
result = future.result()
# Only include non-NULL results
if result and result is not packets.NULL_MESSAGE:
results.append(result)
except Exception as ex:
LOG.error(
'Watchlist plugin {} failed to process packet: {}'.format(
plugin.__class__.__name__,
ex,
),
)
LOG.exception(ex)
return results
def stop(self):
"""Stop all threads created by all plugins."""

View File

@ -66,8 +66,8 @@ def fetch_openweathermap(api_key, lat, lon, units='metric', exclude=None):
exclude = 'minutely,hourly,daily,alerts'
try:
url = (
"https://api.openweathermap.org/data/3.0/onecall?"
"lat={}&lon={}&appid={}&units={}&exclude={}".format(
'https://api.openweathermap.org/data/3.0/onecall?'
'lat={}&lon={}&appid={}&units={}&exclude={}'.format(
lat,
lon,
api_key,

View File

@ -4,20 +4,19 @@ import time
from aprsd import plugin
from aprsd.utils import trace
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class PingPlugin(plugin.APRSDRegexCommandPluginBase):
"""Ping."""
command_regex = r"^([p]|[p]\s|ping)"
command_name = "ping"
short_description = "reply with a Pong!"
command_regex = r'^([p]|[p]\s|ping)'
command_name = 'ping'
short_description = 'reply with a Pong!'
@trace.trace
def process(self, packet):
LOG.info("PingPlugin")
LOG.info('PingPlugin')
# fromcall = packet.get("from")
# message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
@ -26,6 +25,6 @@ class PingPlugin(plugin.APRSDRegexCommandPluginBase):
m = stm.tm_min
s = stm.tm_sec
reply = (
"Pong! " + str(h).zfill(2) + ":" + str(m).zfill(2) + ":" + str(s).zfill(2)
'Pong! ' + str(h).zfill(2) + ':' + str(m).zfill(2) + ':' + str(s).zfill(2)
)
return reply.rstrip()

View File

@ -1,25 +1,24 @@
import logging
import re
from oslo_config import cfg
import pytz
from oslo_config import cfg
from tzlocal import get_localzone
from aprsd import packets, plugin, plugin_utils
from aprsd.utils import fuzzy, trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class TimePlugin(plugin.APRSDRegexCommandPluginBase):
"""Time command."""
# Look for t or t<space> or T<space> or time
command_regex = r"^([t]|[t]\s|time)"
command_name = "time"
short_description = "What is the current local time."
command_regex = r'^([t]|[t]\s|time)'
command_name = 'time'
short_description = 'What is the current local time.'
def _get_local_tz(self):
lz = get_localzone()
@ -33,12 +32,12 @@ class TimePlugin(plugin.APRSDRegexCommandPluginBase):
gmt_t = pytz.utc.localize(utcnow)
local_t = gmt_t.astimezone(localzone)
local_short_str = local_t.strftime("%H:%M %Z")
local_hour = local_t.strftime("%H")
local_min = local_t.strftime("%M")
local_short_str = local_t.strftime('%H:%M %Z')
local_hour = local_t.strftime('%H')
local_min = local_t.strftime('%M')
cur_time = fuzzy(int(local_hour), int(local_min), 1)
reply = "{} ({})".format(
reply = '{} ({})'.format(
cur_time,
local_short_str,
)
@ -47,7 +46,7 @@ class TimePlugin(plugin.APRSDRegexCommandPluginBase):
@trace.trace
def process(self, packet: packets.Packet):
LOG.info("TIME COMMAND")
LOG.info('TIME COMMAND')
# So we can mock this in unit tests
localzone = self._get_local_tz()
return self.build_date_str(localzone)
@ -56,8 +55,8 @@ class TimePlugin(plugin.APRSDRegexCommandPluginBase):
class TimeOWMPlugin(TimePlugin, plugin.APRSFIKEYMixin):
"""OpenWeatherMap based timezone fetching."""
command_regex = r"^([t]|[t]\s|time)"
command_name = "time"
command_regex = r'^([t]|[t]\s|time)'
command_name = 'time'
short_description = "Current time of GPS beacon's timezone. Uses OpenWeatherMap"
def setup(self):
@ -70,7 +69,7 @@ class TimeOWMPlugin(TimePlugin, plugin.APRSFIKEYMixin):
# ack = packet.get("msgNo", "0")
# optional second argument is a callsign to search
a = re.search(r"^.*\s+(.*)", message)
a = re.search(r'^.*\s+(.*)', message)
if a is not None:
searchcall = a.group(1)
searchcall = searchcall.upper()
@ -82,34 +81,34 @@ class TimeOWMPlugin(TimePlugin, plugin.APRSFIKEYMixin):
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
except Exception as ex:
LOG.error(f"Failed to fetch aprs.fi data {ex}")
return "Failed to fetch location"
LOG.error(f'Failed to fetch aprs.fi data {ex}')
return 'Failed to fetch location'
LOG.debug(f"LocationPlugin: aprs_data = {aprs_data}")
if not len(aprs_data["entries"]):
LOG.debug(f'LocationPlugin: aprs_data = {aprs_data}')
if not len(aprs_data['entries']):
LOG.error("Didn't get any entries from aprs.fi")
return "Failed to fetch aprs.fi location"
return 'Failed to fetch aprs.fi location'
lat = aprs_data["entries"][0]["lat"]
lon = aprs_data["entries"][0]["lng"]
lat = aprs_data['entries'][0]['lat']
lon = aprs_data['entries'][0]['lng']
try:
self.config.exists(
["services", "openweathermap", "apiKey"],
['services', 'openweathermap', 'apiKey'],
)
except Exception as ex:
LOG.error(f"Failed to find config openweathermap:apiKey {ex}")
return "No openweathermap apiKey found"
LOG.error(f'Failed to find config openweathermap:apiKey {ex}')
return 'No openweathermap apiKey found'
api_key = self.config["services"]["openweathermap"]["apiKey"]
api_key = self.config['services']['openweathermap']['apiKey']
try:
results = plugin_utils.fetch_openweathermap(api_key, lat, lon)
except Exception as ex:
LOG.error(f"Couldn't fetch openweathermap api '{ex}'")
# default to UTC
localzone = pytz.timezone("UTC")
localzone = pytz.timezone('UTC')
else:
tzone = results["timezone"]
tzone = results['timezone']
localzone = pytz.timezone(tzone)
return self.build_date_str(localzone)

View File

@ -1,31 +1,32 @@
import logging
import aprsd
from aprsd import plugin
from aprsd import conf, plugin
from aprsd.stats import collector
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class VersionPlugin(plugin.APRSDRegexCommandPluginBase):
"""Version of APRSD Plugin."""
command_regex = r"^([v]|[v]\s|version)"
command_name = "version"
short_description = "What is the APRSD Version"
command_regex = r'^([v]|[v]\s|version)'
command_name = 'version'
short_description = 'What is the APRSD Version'
# message_number:time combos so we don't resend the same email in
# five mins {int:int}
email_sent_dict = {}
def process(self, packet):
LOG.info("Version COMMAND")
LOG.info('Version COMMAND')
# fromcall = packet.get("from")
# message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
s = collector.Collector().collect()
return "APRSD ver:{} uptime:{}".format(
owner = conf.CONF.owner_callsign or '-'
return 'APRSD ver:{} uptime:{} owner:{}'.format(
aprsd.__version__,
s["APRSDStats"]["uptime"],
s['APRSDStats']['uptime'],
owner,
)

View File

@ -4,7 +4,6 @@ from aprsd.packets import packet_list, seen_list, tracker, watch_list
from aprsd.stats import app, collector
from aprsd.threads import aprsd
# Create the collector and register all the objects
# that APRSD has that implement the stats protocol
stats_collector = collector.Collector()

View File

@ -7,7 +7,6 @@ import aprsd
from aprsd import utils
from aprsd.log import log as aprsd_log
CONF = cfg.CONF
@ -37,13 +36,13 @@ class APRSDStats:
if serializable:
uptime = str(uptime)
stats = {
"version": aprsd.__version__,
"uptime": uptime,
"callsign": CONF.callsign,
"memory_current": int(current),
"memory_current_str": utils.human_size(current),
"memory_peak": int(peak),
"memory_peak_str": utils.human_size(peak),
"loging_queue": qsize,
'version': aprsd.__version__,
'uptime': uptime,
'callsign': CONF.callsign,
'memory_current': int(current),
'memory_current_str': utils.human_size(current),
'memory_peak': int(peak),
'memory_peak_str': utils.human_size(peak),
'loging_queue': qsize,
}
return stats

View File

@ -107,7 +107,7 @@ class APRSDThreadList:
'name': th.name,
'class': th.__class__.__name__,
'alive': th.is_alive(),
'age': th.loop_age(),
'age': age,
'loop_count': th.loop_count,
}
return stats
@ -118,7 +118,9 @@ class APRSDThreadList:
@wrapt.synchronized(lock)
def remove(self, thread_obj):
self.threads_list.remove(thread_obj)
"""Remove a thread from the list if it exists."""
if thread_obj in self.threads_list:
self.threads_list.remove(thread_obj)
@wrapt.synchronized(lock)
def stop_all(self):

View File

@ -13,7 +13,7 @@ from aprsd.threads import APRSDThread, APRSDThreadList
from aprsd.utils import keepalive_collector
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
LOGU = logger
@ -23,8 +23,8 @@ class KeepAliveThread(APRSDThread):
def __init__(self):
tracemalloc.start()
super().__init__("KeepAlive")
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
super().__init__('KeepAlive')
max_timeout = {'hours': 0.0, 'minutes': 2, 'seconds': 0}
self.max_delta = datetime.timedelta(**max_timeout)
def loop(self):
@ -35,58 +35,58 @@ class KeepAliveThread(APRSDThread):
now = datetime.datetime.now()
if (
"APRSClientStats" in stats_json
and stats_json["APRSClientStats"].get("transport") == "aprsis"
'APRSClientStats' in stats_json
and stats_json['APRSClientStats'].get('transport') == 'aprsis'
):
if stats_json["APRSClientStats"].get("server_keepalive"):
if stats_json['APRSClientStats'].get('server_keepalive'):
last_msg_time = utils.strfdelta(
now - stats_json["APRSClientStats"]["server_keepalive"]
now - stats_json['APRSClientStats']['server_keepalive']
)
else:
last_msg_time = "N/A"
last_msg_time = 'N/A'
else:
last_msg_time = "N/A"
last_msg_time = 'N/A'
tracked_packets = stats_json["PacketTrack"]["total_tracked"]
tracked_packets = stats_json['PacketTrack']['total_tracked']
tx_msg = 0
rx_msg = 0
if "PacketList" in stats_json:
msg_packets = stats_json["PacketList"].get("MessagePacket")
if 'PacketList' in stats_json:
msg_packets = stats_json['PacketList'].get('MessagePacket')
if msg_packets:
tx_msg = msg_packets.get("tx", 0)
rx_msg = msg_packets.get("rx", 0)
tx_msg = msg_packets.get('tx', 0)
rx_msg = msg_packets.get('rx', 0)
keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
'{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} '
'Last:{} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}'
).format(
stats_json["APRSDStats"]["callsign"],
stats_json["APRSDStats"]["uptime"],
stats_json['APRSDStats']['callsign'],
stats_json['APRSDStats']['uptime'],
pl.total_rx(),
pl.total_tx(),
tracked_packets,
tx_msg,
rx_msg,
last_msg_time,
stats_json["APRSDStats"]["memory_current_str"],
stats_json["APRSDStats"]["memory_peak_str"],
stats_json['APRSDStats']['memory_current_str'],
stats_json['APRSDStats']['memory_peak_str'],
len(thread_list),
aprsd_log.logging_queue.qsize(),
)
LOG.info(keepalive)
if "APRSDThreadList" in stats_json:
thread_list = stats_json["APRSDThreadList"]
if 'APRSDThreadList' in stats_json:
thread_list = stats_json['APRSDThreadList']
for thread_name in thread_list:
thread = thread_list[thread_name]
alive = thread["alive"]
age = thread["age"]
key = thread["name"]
alive = thread['alive']
age = thread['age']
key = thread['name']
if not alive:
LOG.error(f"Thread {thread}")
LOG.error(f'Thread {thread}')
thread_hex = f"fg {utils.hex_from_name(key)}"
t_name = f"<{thread_hex}>{key:<15}</{thread_hex}>"
thread_msg = f"{t_name} Alive? {str(alive): <5} {str(age): <20}"
thread_hex = f'fg {utils.hex_from_name(key)}'
t_name = f'<{thread_hex}>{key:<15}</{thread_hex}>'
thread_msg = f'{t_name} Alive? {str(alive): <5} {str(age): <20}'
LOGU.opt(colors=True).info(thread_msg)
# LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")

View File

@ -8,7 +8,7 @@ import aprsd
from aprsd import threads as aprsd_threads
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class APRSRegistryThread(aprsd_threads.APRSDThread):
@ -17,39 +17,39 @@ class APRSRegistryThread(aprsd_threads.APRSDThread):
_loop_cnt: int = 1
def __init__(self):
super().__init__("APRSRegistryThread")
super().__init__('APRSRegistryThread')
self._loop_cnt = 1
if not CONF.aprs_registry.enabled:
LOG.error(
"APRS Registry is not enabled. ",
'APRS Registry is not enabled. ',
)
LOG.error(
"APRS Registry thread is STOPPING.",
'APRS Registry thread is STOPPING.',
)
self.stop()
LOG.info(
"APRS Registry thread is running and will send "
f"info every {CONF.aprs_registry.frequency_seconds} seconds "
f"to {CONF.aprs_registry.registry_url}.",
'APRS Registry thread is running and will send '
f'info every {CONF.aprs_registry.frequency_seconds} seconds '
f'to {CONF.aprs_registry.registry_url}.',
)
def loop(self):
# Only call the registry every N seconds
if self._loop_cnt % CONF.aprs_registry.frequency_seconds == 0:
info = {
"callsign": CONF.callsign,
"description": CONF.aprs_registry.description,
"service_website": CONF.aprs_registry.service_website,
"software": f"APRSD version {aprsd.__version__} "
"https://github.com/craigerl/aprsd",
'callsign': CONF.callsign,
'description': CONF.aprs_registry.description,
'service_website': CONF.aprs_registry.service_website,
'software': f'APRSD version {aprsd.__version__} '
'https://github.com/craigerl/aprsd',
}
try:
requests.post(
f"{CONF.aprs_registry.registry_url}",
f'{CONF.aprs_registry.registry_url}',
json=info,
)
except Exception as e:
LOG.error(f"Failed to send registry info: {e}")
LOG.error(f'Failed to send registry info: {e}')
time.sleep(1)
self._loop_cnt += 1

View File

@ -8,7 +8,7 @@ from oslo_config import cfg
from aprsd import packets, plugin
from aprsd.client.client import APRSDClient
from aprsd.packets import collector, filter
from aprsd.packets import collector, core, filter
from aprsd.packets import log as packet_log
from aprsd.threads import APRSDThread, tx
@ -17,12 +17,11 @@ LOG = logging.getLogger('APRSD')
class APRSDRXThread(APRSDThread):
"""Main Class to connect to an APRS Client and recieve packets.
"""
Thread to receive packets from the APRS Client and put them on the packet queue.
A packet is received in the main loop and then sent to the
process_packet method, which sends the packet through the collector
to track the packet for stats, and then put into the packet queue
for processing in a separate thread.
Args:
packet_queue: The queue to put the packets in.
"""
_client = None
@ -34,7 +33,12 @@ class APRSDRXThread(APRSDThread):
pkt_count = 0
def __init__(self, packet_queue):
def __init__(self, packet_queue: queue.Queue):
"""Initialize the APRSDRXThread.
Args:
packet_queue: The queue to put the packets in.
"""
super().__init__('RX_PKT')
self.packet_queue = packet_queue
@ -67,7 +71,7 @@ class APRSDRXThread(APRSDThread):
# https://github.com/rossengeorgiev/aprs-python/pull/56
self._client.consumer(
self.process_packet,
raw=False,
raw=True,
)
except (
aprslib.exceptions.ConnectionDrop,
@ -87,63 +91,38 @@ class APRSDRXThread(APRSDThread):
return True
def process_packet(self, *args, **kwargs):
packet = self._client.decode_packet(*args, **kwargs)
if not packet:
LOG.error(
'No packet received from decode_packet. Most likely a failure to parse'
)
"""Put the raw packet on the queue.
The processing of the packet will happen in a separate thread.
"""
if not args:
LOG.warning('No frame received to process?!?!')
return
self.pkt_count += 1
packet_log.log(packet, packet_count=self.pkt_count)
pkt_list = packets.PacketList()
if isinstance(packet, packets.AckPacket):
# We don't need to drop AckPackets, those should be
# processed.
self.packet_queue.put(packet)
else:
# Make sure we aren't re-processing the same packet
# For RF based APRS Clients we can get duplicate packets
# So we need to track them and not process the dupes.
found = False
try:
# Find the packet in the list of already seen packets
# Based on the packet.key
found = pkt_list.find(packet)
if not packet.msgNo:
# If the packet doesn't have a message id
# then there is no reliable way to detect
# if it's a dupe, so we just pass it on.
# it shouldn't get acked either.
found = False
except KeyError:
found = False
if not found:
# We haven't seen this packet before, so we process it.
collector.PacketCollector().rx(packet)
self.packet_queue.put(packet)
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
# If the packet came in within N seconds of the
# Last time seeing the packet, then we drop it as a dupe.
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.'
)
else:
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked '
f'but older than {CONF.packet_dupe_timeout} seconds. processing.',
)
collector.PacketCollector().rx(packet)
self.packet_queue.put(packet)
self.packet_queue.put(args[0])
class APRSDFilterThread(APRSDThread):
def __init__(self, thread_name, packet_queue):
"""
Thread to filter packets on the packet queue.
Args:
thread_name: The name of the thread.
packet_queue: The queue to get the packets from.
"""
def __init__(self, thread_name: str, packet_queue: queue.Queue):
"""Initialize the APRSDFilterThread.
Args:
thread_name: The name of the thread.
packet_queue: The queue to get the packets from.
"""
super().__init__(thread_name)
self.packet_queue = packet_queue
self.packet_count = 0
self._client = APRSDClient()
def filter_packet(self, packet):
def filter_packet(self, packet: type[core.Packet]) -> type[core.Packet] | None:
# Do any packet filtering prior to processing
if not filter.PacketFilter().filter(packet):
return None
@ -156,14 +135,27 @@ class APRSDFilterThread(APRSDThread):
doesn't want to log packets.
"""
packet_log.log(packet)
packet_log.log(packet, packet_count=self.packet_count)
def loop(self):
try:
packet = self.packet_queue.get(timeout=1)
pkt = self.packet_queue.get(timeout=1)
self.packet_count += 1
# We use the client here, because the specific
# driver may need to decode the packet differently.
packet = self._client.decode_packet(pkt)
if not packet:
# We mark this as debug, since there are so many
# packets that are on the APRS network, and we don't
# want to spam the logs with this.
LOG.debug(f'Packet failed to parse. "{pkt}"')
return True
self.print_packet(packet)
if packet:
if self.filter_packet(packet):
# The packet has passed all filters, so we collect it.
# and process it.
collector.PacketCollector().rx(packet)
self.process_packet(packet)
except queue.Empty:
pass
@ -182,7 +174,7 @@ class APRSDProcessPacketThread(APRSDFilterThread):
will ack a message before sending the packet to the subclass
for processing."""
def __init__(self, packet_queue):
def __init__(self, packet_queue: queue.Queue):
super().__init__('ProcessPKT', packet_queue=packet_queue)
if not CONF.enable_sending_ack_packets:
LOG.warning(
@ -283,7 +275,10 @@ class APRSDProcessPacketThread(APRSDFilterThread):
class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
"""Process the packet through the plugin manager.
This is the main aprsd server plugin processing thread."""
This is the main aprsd server plugin processing thread.
Args:
packet_queue: The queue to get the packets from.
"""
def process_other_packet(self, packet, for_us=False):
pm = plugin.PluginManager()
@ -322,12 +317,16 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
pm = plugin.PluginManager()
try:
results = pm.run(packet)
replied = False
results, handled = pm.run(packet)
# Check if any plugin replied (results may be unordered due to parallel execution)
replied = any(
result and result is not packets.NULL_MESSAGE for result in results
)
LOG.debug(f'Replied: {replied}, Handled: {handled}')
for reply in results:
LOG.debug(f'Reply: {reply}')
if isinstance(reply, list):
# one of the plugins wants to send multiple messages
replied = True
for subreply in reply:
LOG.debug(f"Sending '{subreply}'")
if isinstance(subreply, packets.Packet):
@ -343,13 +342,13 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
elif isinstance(reply, packets.Packet):
# We have a message based object.
tx.send(reply)
replied = True
else:
replied = True
# A plugin can return a null message flag which signals
# us that they processed the message correctly, but have
# nothing to reply with, so we avoid replying with a
# usage string
# Note: NULL_MESSAGE results are already filtered out
# in PluginManager.run(), so we can safely send this
if reply is not packets.NULL_MESSAGE:
LOG.debug(f"Sending '{reply}'")
tx.send(
@ -362,7 +361,9 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
# If the message was for us and we didn't have a
# response, then we send a usage statement.
if to_call == CONF.callsign and not replied:
# Only send "Unknown command!" if no plugin handled the message.
# If a plugin returned NULL_MESSAGE, it handled it and we shouldn't reply.
if to_call == CONF.callsign and not replied and not handled:
# Tailor the messages accordingly
if CONF.load_help_plugin:
LOG.warning('Sending help!')

View File

@ -1,19 +1,26 @@
import logging
import threading
import time
from loguru import logger
from oslo_config import cfg
from aprsd.packets import seen_list
from aprsd.stats import collector
from aprsd.threads import APRSDThread
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOGU = logger
class StatsStore(objectstore.ObjectStoreMixin):
"""Container to save the stats from the collector."""
def __init__(self):
self.lock = threading.RLock()
def add(self, stats: dict):
with self.lock:
self.data = stats
@ -37,3 +44,109 @@ class APRSDStatsStoreThread(APRSDThread):
time.sleep(1)
return True
class StatsLogThread(APRSDThread):
"""Log the stats from the PacketList."""
def __init__(self):
super().__init__('PacketStatsLog')
self._last_total_rx = 0
self.period = 10
self.start_time = time.time()
def loop(self):
if self.loop_count % self.period == 0:
# log the stats every 10 seconds
stats_json = collector.Collector().collect(serializable=True)
stats = stats_json['PacketList']
total_rx = stats['rx']
rx_delta = total_rx - self._last_total_rx
rate = rx_delta / self.period
# Get unique callsigns count from SeenList stats
seen_list_instance = seen_list.SeenList()
# stats() returns data while holding lock internally, so copy it immediately
seen_list_stats = seen_list_instance.stats()
seen_list_instance.save()
# Copy the stats to avoid holding references to locked data
seen_list_stats = seen_list_stats.copy()
unique_callsigns_count = len(seen_list_stats)
# Calculate uptime
elapsed = time.time() - self.start_time
elapsed_minutes = elapsed / 60
elapsed_hours = elapsed / 3600
# Log summary stats
LOGU.opt(colors=True).info(
f'<green>RX Rate: {rate:.2f} pps</green> '
f'<yellow>Total RX: {total_rx}</yellow> '
f'<red>RX Last {self.period} secs: {rx_delta}</red> '
)
LOGU.opt(colors=True).info(
f'<cyan>Uptime: {elapsed:.0f}s ({elapsed_minutes:.1f}m / {elapsed_hours:.2f}h)</cyan> '
f'<magenta>Unique Callsigns: {unique_callsigns_count}</magenta>',
)
self._last_total_rx = total_rx
# Log individual type stats, sorted by RX count (descending)
sorted_types = sorted(
stats['types'].items(), key=lambda x: x[1]['rx'], reverse=True
)
for k, v in sorted_types:
# Calculate percentage of this packet type compared to total RX
percentage = (v['rx'] / total_rx * 100) if total_rx > 0 else 0.0
# Format values first, then apply colors
packet_type_str = f'{k:<15}'
rx_count_str = f'{v["rx"]:6d}'
tx_count_str = f'{v["tx"]:6d}'
percentage_str = f'{percentage:5.1f}%'
# Use different colors for RX count based on threshold (matching mqtt_injest.py)
rx_color_tag = (
'green' if v['rx'] > 100 else 'yellow' if v['rx'] > 10 else 'red'
)
LOGU.opt(colors=True).info(
f' <cyan>{packet_type_str}</cyan>: '
f'<{rx_color_tag}>RX: {rx_count_str}</{rx_color_tag}> '
f'<red>TX: {tx_count_str}</red> '
f'<magenta>({percentage_str})</magenta>',
)
# Extract callsign counts from seen_list stats
callsign_counts = {}
for callsign, data in seen_list_stats.items():
if isinstance(data, dict) and 'count' in data:
callsign_counts[callsign] = data['count']
# Sort callsigns by packet count (descending) and get top 10
sorted_callsigns = sorted(
callsign_counts.items(), key=lambda x: x[1], reverse=True
)[:10]
# Log top 10 callsigns
if sorted_callsigns:
LOGU.opt(colors=True).info(
'<cyan>Top 10 Callsigns by Packet Count:</cyan>'
)
total_ranks = len(sorted_callsigns)
for rank, (callsign, count) in enumerate(sorted_callsigns, 1):
# Calculate percentage of this callsign compared to total RX
percentage = (count / total_rx * 100) if total_rx > 0 else 0.0
# Use different colors based on rank: most packets (rank 1) = red,
# least packets (last rank) = green, middle = yellow
if rank == 1:
count_color_tag = 'red'
elif rank == total_ranks:
count_color_tag = 'green'
else:
count_color_tag = 'yellow'
LOGU.opt(colors=True).info(
f' <cyan>{rank:2d}.</cyan> '
f'<white>{callsign:<12}</white>: '
f'<{count_color_tag}>{count:6d} packets</{count_color_tag}> '
f'<magenta>({percentage:5.1f}%)</magenta>',
)
time.sleep(1)
return True

View File

@ -1,6 +1,7 @@
import logging
import threading
import time
from concurrent.futures import ThreadPoolExecutor
import wrapt
from oslo_config import cfg
@ -39,6 +40,11 @@ msg_throttle_decorator = decorator.ThrottleDecorator(throttle=msg_t)
ack_throttle_decorator = decorator.ThrottleDecorator(throttle=ack_t)
s_lock = threading.Lock()
# Global scheduler instances (singletons)
_packet_scheduler = None
_ack_scheduler = None
_scheduler_lock = threading.Lock()
@wrapt.synchronized(s_lock)
@msg_throttle_decorator.sleep_and_retry
@ -62,8 +68,15 @@ def send(packet: core.Packet, direct=False, aprs_client=None):
@msg_throttle_decorator.sleep_and_retry
def _send_packet(packet: core.Packet, direct=False, aprs_client=None):
if not direct:
thread = SendPacketThread(packet=packet)
thread.start()
# Use threadpool scheduler instead of creating individual threads
scheduler = _get_packet_scheduler()
if scheduler and scheduler.is_alive():
# Scheduler will handle the packet
pass
else:
# Fallback to old method if scheduler not available
thread = SendPacketThread(packet=packet)
thread.start()
else:
_send_direct(packet, aprs_client=aprs_client)
@ -71,12 +84,20 @@ def _send_packet(packet: core.Packet, direct=False, aprs_client=None):
@ack_throttle_decorator.sleep_and_retry
def _send_ack(packet: core.AckPacket, direct=False, aprs_client=None):
if not direct:
thread = SendAckThread(packet=packet)
thread.start()
# Use threadpool scheduler instead of creating individual threads
scheduler = _get_ack_scheduler()
if scheduler and scheduler.is_alive():
# Scheduler will handle the packet
pass
else:
# Fallback to old method if scheduler not available
thread = SendAckThread(packet=packet)
thread.start()
else:
_send_direct(packet, aprs_client=aprs_client)
@msg_throttle_decorator.sleep_and_retry
def _send_direct(packet, aprs_client=None):
if aprs_client:
cl = aprs_client
@ -94,6 +115,220 @@ def _send_direct(packet, aprs_client=None):
return True
def _get_packet_scheduler():
"""Get or create the packet send scheduler thread (singleton)."""
global _packet_scheduler
with _scheduler_lock:
if _packet_scheduler is None or not _packet_scheduler.is_alive():
_packet_scheduler = PacketSendSchedulerThread()
_packet_scheduler.start()
return _packet_scheduler
def _get_ack_scheduler():
"""Get or create the ack send scheduler thread (singleton)."""
global _ack_scheduler
with _scheduler_lock:
if _ack_scheduler is None or not _ack_scheduler.is_alive():
_ack_scheduler = AckSendSchedulerThread()
_ack_scheduler.start()
return _ack_scheduler
def _send_packet_worker(msg_no: str):
"""Worker function for threadpool to send a packet.
This function checks if the packet needs to be sent and sends it if conditions are met.
Returns True if packet should continue to be tracked, False if done.
"""
pkt_tracker = tracker.PacketTrack()
packet = pkt_tracker.get(msg_no)
if not packet:
# Packet was acked and removed from tracker
return False
if packet.send_count >= packet.retry_count:
# Reached max retry count
LOG.info(
f'{packet.__class__.__name__} '
f'({packet.msgNo}) '
'Message Send Complete. Max attempts reached'
f' {packet.retry_count}',
)
pkt_tracker.remove(packet.msgNo)
return False
# Check if it's time to send
send_now = False
if packet.last_send_time:
now = int(round(time.time()))
sleeptime = (packet.send_count + 1) * 31
delta = now - packet.last_send_time
if delta > sleeptime:
send_now = True
else:
send_now = True
if send_now:
packet.last_send_time = int(round(time.time()))
sent = False
try:
sent = _send_direct(packet)
except Exception as ex:
LOG.error(f'Failed to send packet: {packet}')
LOG.error(ex)
else:
if sent:
packet.send_count += 1
return True
def _send_ack_worker(msg_no: str, max_retries: int):
"""Worker function for threadpool to send an ack packet.
This function checks if the ack needs to be sent and sends it if conditions are met.
Returns True if ack should continue to be tracked, False if done.
"""
pkt_tracker = tracker.PacketTrack()
packet = pkt_tracker.get(msg_no)
if not packet:
# Packet was removed from tracker
return False
if packet.send_count >= max_retries:
LOG.debug(
f'{packet.__class__.__name__}'
f'({packet.msgNo}) '
'Send Complete. Max attempts reached'
f' {max_retries}',
)
return False
# Check if it's time to send
send_now = False
if packet.last_send_time:
now = int(round(time.time()))
sleep_time = 31
delta = now - packet.last_send_time
if delta > sleep_time:
send_now = True
else:
# No previous send time, send immediately
send_now = True
if send_now:
sent = False
try:
sent = _send_direct(packet)
except Exception:
LOG.error(f'Failed to send packet: {packet}')
else:
if sent:
packet.send_count += 1
packet.last_send_time = int(round(time.time()))
return True
class PacketSendSchedulerThread(aprsd_threads.APRSDThread):
"""Scheduler thread that uses a threadpool to send packets.
This thread periodically checks all packets in PacketTrack and submits
send tasks to a threadpool executor, avoiding the need to create a
separate thread for each packet.
"""
def __init__(self, max_workers=5):
super().__init__('PacketSendSchedulerThread')
self.executor = ThreadPoolExecutor(
max_workers=max_workers, thread_name_prefix='PacketSendWorker'
)
self.max_workers = max_workers
def loop(self):
"""Check all tracked packets and submit send tasks to threadpool."""
pkt_tracker = tracker.PacketTrack()
# Check all packets in the tracker
for msg_no in list(pkt_tracker.keys()):
packet = pkt_tracker.get(msg_no)
if not packet:
# Packet was acked, skip it
continue
# Skip AckPackets - they're handled by AckSendSchedulerThread
if isinstance(packet, core.AckPacket):
continue
# Check if packet is still being tracked (not acked)
if packet.send_count >= packet.retry_count:
# Max retries reached, will be cleaned up by worker
continue
# Submit send task to threadpool
# The worker will check timing and send if needed
self.executor.submit(_send_packet_worker, msg_no)
time.sleep(1) # Check every second
return True
def _cleanup(self):
"""Cleanup threadpool executor on thread shutdown."""
LOG.debug('Shutting down PacketSendSchedulerThread executor')
self.executor.shutdown(wait=True)
class AckSendSchedulerThread(aprsd_threads.APRSDThread):
"""Scheduler thread that uses a threadpool to send ack packets.
This thread periodically checks all ack packets in PacketTrack and submits
send tasks to a threadpool executor, avoiding the need to create a
separate thread for each ack.
"""
def __init__(self, max_workers=3):
super().__init__('AckSendSchedulerThread')
self.executor = ThreadPoolExecutor(
max_workers=max_workers, thread_name_prefix='AckSendWorker'
)
self.max_workers = max_workers
self.max_retries = CONF.default_ack_send_count
def loop(self):
"""Check all tracked ack packets and submit send tasks to threadpool."""
pkt_tracker = tracker.PacketTrack()
# Check all packets in the tracker that are acks
for msg_no in list(pkt_tracker.keys()):
packet = pkt_tracker.get(msg_no)
if not packet:
# Packet was removed, skip it
continue
# Only process AckPackets
if not isinstance(packet, core.AckPacket):
continue
# Check if ack is still being tracked
if packet.send_count >= self.max_retries:
# Max retries reached, will be cleaned up by worker
continue
# Submit send task to threadpool
self.executor.submit(_send_ack_worker, msg_no, self.max_retries)
time.sleep(1) # Check every second
return True
def _cleanup(self):
"""Cleanup threadpool executor on thread shutdown."""
LOG.debug('Shutting down AckSendSchedulerThread executor')
self.executor.shutdown(wait=True)
class SendPacketThread(aprsd_threads.APRSDThread):
loop_count: int = 1

View File

@ -3,7 +3,6 @@ import threading
import wrapt
MAX_PACKET_ID = 9999

View File

@ -26,42 +26,42 @@ def fuzzy(hour, minute, degree=1):
When degree = 2, time is in quantum of 15 minutes."""
if degree <= 0 or degree > 2:
print("Please use a degree of 1 or 2. Using fuzziness degree=1")
print('Please use a degree of 1 or 2. Using fuzziness degree=1')
degree = 1
begin = "It's "
f0 = "almost "
f1 = "exactly "
f2 = "around "
f0 = 'almost '
f1 = 'exactly '
f2 = 'around '
b0 = " past "
b1 = " to "
b0 = ' past '
b1 = ' to '
hourlist = (
"One",
"Two",
"Three",
"Four",
"Five",
"Six",
"Seven",
"Eight",
"Nine",
"Ten",
"Eleven",
"Twelve",
'One',
'Two',
'Three',
'Four',
'Five',
'Six',
'Seven',
'Eight',
'Nine',
'Ten',
'Eleven',
'Twelve',
)
s1 = s2 = s3 = s4 = ""
s1 = s2 = s3 = s4 = ''
base = 5
if degree == 1:
base = 5
val = ("Five", "Ten", "Quarter", "Twenty", "Twenty-Five", "Half")
val = ('Five', 'Ten', 'Quarter', 'Twenty', 'Twenty-Five', 'Half')
elif degree == 2:
base = 15
val = ("Quarter", "Half")
val = ('Quarter', 'Half')
# to find whether we have to use 'almost', 'exactly' or 'around'
dmin = minute % base
@ -86,11 +86,11 @@ def fuzzy(hour, minute, degree=1):
if minute <= base / 2:
# Case like "It's around/exactly Ten"
s2 = s3 = ""
s2 = s3 = ''
s4 = hourlist[hour - 12 - 1]
elif minute >= 60 - base / 2:
# Case like "It's almost Ten"
s2 = s3 = ""
s2 = s3 = ''
s4 = hourlist[hour - 12]
else:
# Other cases with all words, like "It's around Quarter past One"
@ -114,22 +114,22 @@ def main():
try:
deg = int(sys.argv[1])
except Exception:
print("Please use a degree of 1 or 2. Using fuzziness degree=1")
print('Please use a degree of 1 or 2. Using fuzziness degree=1')
if len(sys.argv) >= 3:
tm = sys.argv[2].split(":")
tm = sys.argv[2].split(':')
try:
h = int(tm[0])
m = int(tm[1])
if h < 0 or h > 23 or m < 0 or m > 59:
raise Exception
except Exception:
print("Bad time entered. Using the system time.")
print('Bad time entered. Using the system time.')
h = stm.tm_hour
m = stm.tm_min
print(fuzzy(h, m, deg))
return
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@ -10,40 +10,40 @@ class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
args = (
"year",
"month",
"day",
"hour",
"minute",
"second",
"microsecond",
'year',
'month',
'day',
'hour',
'minute',
'second',
'microsecond',
)
return {
"__type__": "datetime.datetime",
"args": [getattr(obj, a) for a in args],
'__type__': 'datetime.datetime',
'args': [getattr(obj, a) for a in args],
}
elif isinstance(obj, datetime.date):
args = ("year", "month", "day")
args = ('year', 'month', 'day')
return {
"__type__": "datetime.date",
"args": [getattr(obj, a) for a in args],
'__type__': 'datetime.date',
'args': [getattr(obj, a) for a in args],
}
elif isinstance(obj, datetime.time):
args = ("hour", "minute", "second", "microsecond")
args = ('hour', 'minute', 'second', 'microsecond')
return {
"__type__": "datetime.time",
"args": [getattr(obj, a) for a in args],
'__type__': 'datetime.time',
'args': [getattr(obj, a) for a in args],
}
elif isinstance(obj, datetime.timedelta):
args = ("days", "seconds", "microseconds")
args = ('days', 'seconds', 'microseconds')
return {
"__type__": "datetime.timedelta",
"args": [getattr(obj, a) for a in args],
'__type__': 'datetime.timedelta',
'args': [getattr(obj, a) for a in args],
}
elif isinstance(obj, decimal.Decimal):
return {
"__type__": "decimal.Decimal",
"args": [str(obj)],
'__type__': 'decimal.Decimal',
'args': [str(obj)],
}
else:
return super().default(obj)
@ -76,10 +76,10 @@ class EnhancedJSONDecoder(json.JSONDecoder):
)
def object_hook(self, d):
if "__type__" not in d:
if '__type__' not in d:
return d
o = sys.modules[__name__]
for e in d["__type__"].split("."):
for e in d['__type__'].split('.'):
o = getattr(o, e)
args, kwargs = d.get("args", ()), d.get("kwargs", {})
args, kwargs = d.get('args', ()), d.get('kwargs', {})
return o(*args, **kwargs)

View File

@ -2,7 +2,6 @@ import logging
import os
import pathlib
import pickle
import threading
from oslo_config import cfg
@ -25,8 +24,8 @@ class ObjectStoreMixin:
aprsd server -f (flush) will wipe all saved objects.
"""
def __init__(self):
self.lock = threading.RLock()
# Child class must create the lock.
lock = None
def __len__(self):
with self.lock:
@ -94,29 +93,31 @@ class ObjectStoreMixin:
def load(self):
if not CONF.enable_save:
return
if os.path.exists(self._save_filename()):
try:
with open(self._save_filename(), 'rb') as fp:
raw = pickle.load(fp)
if raw:
self.data = raw
LOG.debug(
f'{self.__class__.__name__}::Loaded {len(self)} entries from disk.',
)
else:
LOG.debug(f'{self.__class__.__name__}::No data to load.')
except (pickle.UnpicklingError, Exception) as ex:
LOG.error(f'Failed to UnPickle {self._save_filename()}')
LOG.error(ex)
self.data = {}
else:
LOG.debug(f'{self.__class__.__name__}::No save file found.')
with self.lock:
if os.path.exists(self._save_filename()):
try:
with open(self._save_filename(), 'rb') as fp:
raw = pickle.load(fp)
if raw:
self.data = raw
LOG.debug(
f'{self.__class__.__name__}::Loaded {len(self)} entries from disk.',
)
else:
LOG.debug(f'{self.__class__.__name__}::No data to load.')
except (pickle.UnpicklingError, Exception) as ex:
LOG.error(f'Failed to UnPickle {self._save_filename()}')
LOG.error(ex)
self.data = {}
else:
LOG.debug(f'{self.__class__.__name__}::No save file found.')
def flush(self):
"""Nuke the old pickle file that stored the old results from last aprsd run."""
if not CONF.enable_save:
return
if os.path.exists(self._save_filename()):
pathlib.Path(self._save_filename()).unlink()
with self.lock:
self.data = {}
if os.path.exists(self._save_filename()):
pathlib.Path(self._save_filename()).unlink()
with self.lock:
self.data = {}

View File

@ -60,18 +60,27 @@ def get_module_info(package_name, module_name, module_path):
for path, _subdirs, files in os.walk(dir_path):
for name in files:
if fnmatch.fnmatch(name, pattern):
module = smuggle(f'{path}/{name}')
for mem_name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and is_plugin(obj):
obj_list.append(
{
'package': package_name,
'name': mem_name,
'obj': obj,
'version': obj.version,
'path': f'{".".join([module_name, obj.__name__])}',
},
)
# Skip __init__.py files as they often have relative imports
# that don't work when imported directly via smuggle
if name == '__init__.py':
continue
try:
module = smuggle(f'{path}/{name}')
for mem_name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and is_plugin(obj):
obj_list.append(
{
'package': package_name,
'name': mem_name,
'obj': obj,
'version': obj.version,
'path': f'{".".join([module_name, obj.__name__])}',
},
)
except (ImportError, SyntaxError, AttributeError) as e:
# Skip files that can't be imported (relative imports, syntax errors, etc.)
LOG.debug(f'Could not import {path}/{name}: {e}')
continue
return obj_list

View File

@ -45,4 +45,4 @@ export COLUMNS=200
#exec uwsgi --http :8000 --gevent 1000 --http-websockets --master -w aprsd.wsgi --callable app
#exec aprsd listen -c $APRSD_CONFIG --loglevel ${LOG_LEVEL} ${APRSD_LOAD_PLUGINS} ${APRSD_LISTEN_FILTER}
#
uv run aprsd admin web -c $APRSD_CONFIG --loglevel ${LOG_LEVEL}
uv run aprsd admin web -c $APRSD_CONFIG --loglevel ${LOG_LEVEL}

View File

@ -157,10 +157,9 @@ Sample config file
# useful for a direwolf KISS aprs connection only. (boolean value)
#enabled = true
# APRS Username (string value)
#login = NOCALL
# The callsign in [DEFAULT] is used as the APRS-IS login.
# APRS Password Get the passcode for your callsign here:
# APRS Password for the callsign in [DEFAULT]. Get the passcode here:
# https://apps.magicbug.co.uk/passcode (string value)
#password = <None>

View File

@ -126,7 +126,6 @@ on creating your own plugins.
2025-12-10 14:30:05.259 | MainThread | DEBUG | aprs_registry.service_website = None | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.259 | MainThread | DEBUG | aprs_network.enabled = True | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.260 | MainThread | DEBUG | aprs_network.host = 155.138.131.1 | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.260 | MainThread | DEBUG | aprs_network.login = WB4BOR-1 | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.260 | MainThread | DEBUG | aprs_network.password = **** | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.260 | MainThread | DEBUG | aprs_network.port = 14580 | oslo_config.cfg:log_opt_values:2824
2025-12-10 14:30:05.260 | MainThread | DEBUG | kiss_serial.baudrate = 9600 | oslo_config.cfg:log_opt_values:2824

View File

@ -2,18 +2,18 @@ import logging
from aprsd import packets, plugin
LOG = logging.getLogger("APRSD")
LOG = logging.getLogger('APRSD')
class HelloPlugin(plugin.APRSDRegexCommandPluginBase):
"""Hello World."""
version = "1.0"
version = '1.0'
# matches any string starting with h or H
command_regex = "^[hH]"
command_name = "hello"
command_regex = '^[hH]'
command_name = 'hello'
def process(self, packet: packets.MessagePacket):
LOG.info("HelloPlugin")
LOG.info('HelloPlugin')
reply = f"Hello '{packet.from_call}'"
return reply

View File

@ -105,6 +105,8 @@ classifiers = [
[tool.setuptools.dynamic]
dependencies = {file = ["./requirements.txt"]}
optional-dependencies.dev = {file = ["./requirements-dev.txt"]}
optional-dependencies.tests = {file = ["./requirements-tests.txt"]}
optional-dependencies.type = {file = ["./requirements-type.txt"]}
# List additional groups of dependencies here (e.g. development
# dependencies). Users will be able to install these using the "extras"

View File

@ -2,9 +2,24 @@ build
pip
pip-tools
pre-commit
pre-commit-uv>=4.1.1
tox
tox-uv
wheel
# Testing
pytest
pytest-cov
# Linting and formatting
ruff
# Type checking
mypy
types-pytz
types-requests
types-tzlocal
# Twine is used for uploading packages to pypi
# but it induces an install of cryptography
# This is sucky for rpi systems.

View File

@ -6,22 +6,40 @@ cfgv==3.5.0 # via pre-commit
chardet==5.2.0 # via tox
click==8.3.1 # via pip-tools
colorama==0.4.6 # via tox
coverage==7.13.1 # via pytest-cov
distlib==0.4.0 # via virtualenv
exceptiongroup==1.3.1 # via pytest
filelock==3.20.0 # via tox, virtualenv
identify==2.6.15 # via pre-commit
iniconfig==2.3.0 # via pytest
librt==0.7.8 # via mypy
mypy==1.19.1 # via -r requirements-dev.in
mypy-extensions==1.1.0 # via mypy
nodeenv==1.9.1 # via pre-commit
packaging==25.0 # via build, pyproject-api, tox
packaging==25.0 # via build, pyproject-api, pytest, tox, tox-uv
pathspec==1.0.3 # via mypy
pip==25.3 # via pip-tools, -r requirements-dev.in
pip-tools==7.5.2 # via -r requirements-dev.in
platformdirs==4.5.1 # via tox, virtualenv
pluggy==1.6.0 # via tox
pre-commit==4.5.0 # via -r requirements-dev.in
pluggy==1.6.0 # via pytest, pytest-cov, tox
pre-commit==4.5.0 # via pre-commit-uv, -r requirements-dev.in
pre-commit-uv==4.2.0 # via -r requirements-dev.in
pygments==2.19.2 # via pytest
pyproject-api==1.10.0 # via tox
pyproject-hooks==1.2.0 # via build, pip-tools
pytest==9.0.2 # via pytest-cov, -r requirements-dev.in
pytest-cov==7.0.0 # via -r requirements-dev.in
pyyaml==6.0.3 # via pre-commit
ruff==0.14.13 # via -r requirements-dev.in
setuptools==80.9.0 # via pip-tools
tomli==2.3.0 # via build, pip-tools, pyproject-api, tox
tox==4.32.0 # via -r requirements-dev.in
typing-extensions==4.15.0 # via tox, virtualenv
tomli==2.4.0 # via build, coverage, mypy, pip-tools, pyproject-api, pytest, tox, tox-uv
tox==4.32.0 # via tox-uv, -r requirements-dev.in
tox-uv==1.29.0 # via -r requirements-dev.in
types-pytz==2025.2.0.20251108 # via types-tzlocal, -r requirements-dev.in
types-requests==2.32.4.20260107 # via -r requirements-dev.in
types-tzlocal==5.1.0.1 # via -r requirements-dev.in
typing-extensions==4.15.0 # via exceptiongroup, mypy, tox, virtualenv
urllib3==2.6.2 # via types-requests
uv==0.9.26 # via pre-commit-uv, tox-uv
virtualenv==20.35.4 # via pre-commit, tox
wheel==0.45.1 # via pip-tools, -r requirements-dev.in

View File

@ -1,4 +1,5 @@
aprslib>=0.7.0
#aprslib>=0.7.0
git+https://github.com/hemna/aprs-python.git@telemetry
click
dataclasses-json
haversine

View File

@ -1,6 +1,6 @@
# This file was autogenerated by uv via the following command:
# uv pip compile --resolver backtracking --annotation-style=line requirements.in -o requirements.txt
aprslib==0.7.2 # via -r requirements.in
aprslib @ git+https://github.com/hemna/aprs-python.git@09cd7a2829a2e9d28ee1566881c843cc4769e590 # via -r requirements.in
attrs==25.4.0 # via ax253, kiss3, rush
ax253==0.1.5.post1 # via kiss3
bitarray==3.8.0 # via ax253, kiss3

View File

@ -14,5 +14,4 @@
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup()

View File

@ -18,9 +18,9 @@ class TestAPRSISDriver(unittest.TestCase):
self.conf_patcher = mock.patch('aprsd.client.drivers.aprsis.CONF')
self.mock_conf = self.conf_patcher.start()
# Configure APRS-IS settings
# Configure APRS-IS settings. callsign in [DEFAULT] is used as APRS-IS login.
self.mock_conf.aprs_network.enabled = True
self.mock_conf.aprs_network.login = 'TEST'
self.mock_conf.callsign = 'TEST'
self.mock_conf.aprs_network.password = '12345'
self.mock_conf.aprs_network.host = 'rotate.aprs.net'
self.mock_conf.aprs_network.port = 14580
@ -97,16 +97,26 @@ class TestAPRSISDriver(unittest.TestCase):
def test_is_configured_true(self):
"""Test is_configured returns True when properly configured."""
with mock.patch.object(APRSISDriver, 'is_enabled', return_value=True):
self.mock_conf.aprs_network.login = 'TEST'
self.mock_conf.callsign = 'TEST'
self.mock_conf.aprs_network.password = '12345'
self.mock_conf.aprs_network.host = 'rotate.aprs.net'
self.assertTrue(APRSISDriver.is_configured())
def test_is_configured_no_login(self):
"""Test is_configured raises exception when login not set."""
def test_is_configured_no_callsign(self):
"""Test is_configured raises exception when callsign not set or NOCALL."""
with mock.patch.object(APRSISDriver, 'is_enabled', return_value=True):
self.mock_conf.aprs_network.login = None
self.mock_conf.callsign = None
with self.assertRaises(exception.MissingConfigOptionException):
APRSISDriver.is_configured()
def test_is_configured_callsign_nocall(self):
"""Test is_configured raises exception when callsign is NOCALL."""
with mock.patch.object(APRSISDriver, 'is_enabled', return_value=True):
self.mock_conf.callsign = 'NOCALL'
self.mock_conf.aprs_network.password = '12345'
self.mock_conf.aprs_network.host = 'rotate.aprs.net'
with self.assertRaises(exception.MissingConfigOptionException):
APRSISDriver.is_configured()
@ -114,7 +124,7 @@ class TestAPRSISDriver(unittest.TestCase):
def test_is_configured_no_password(self):
"""Test is_configured raises exception when password not set."""
with mock.patch.object(APRSISDriver, 'is_enabled', return_value=True):
self.mock_conf.aprs_network.login = 'TEST'
self.mock_conf.callsign = 'TEST'
self.mock_conf.aprs_network.password = None
with self.assertRaises(exception.MissingConfigOptionException):
@ -123,7 +133,7 @@ class TestAPRSISDriver(unittest.TestCase):
def test_is_configured_no_host(self):
"""Test is_configured raises exception when host not set."""
with mock.patch.object(APRSISDriver, 'is_enabled', return_value=True):
self.mock_conf.aprs_network.login = 'TEST'
self.mock_conf.callsign = 'TEST'
self.mock_conf.aprs_network.password = '12345'
self.mock_conf.aprs_network.host = None
@ -197,9 +207,9 @@ class TestAPRSISDriver(unittest.TestCase):
self.driver.setup_connection()
# Check client created with correct parameters
# Check client created with correct parameters (callsign is APRS-IS login)
self.mock_aprslib.assert_called_once_with(
self.mock_conf.aprs_network.login,
self.mock_conf.callsign,
passwd=self.mock_conf.aprs_network.password,
host=self.mock_conf.aprs_network.host,
port=self.mock_conf.aprs_network.port,

View File

@ -112,7 +112,7 @@ class TestKISSDriver(unittest.TestCase):
mock_parse.return_value = mock_aprs_data
mock_factory.return_value = mock_packet
result = self.driver.decode_packet(frame=frame)
result = self.driver.decode_packet(frame)
self.assertEqual(result, mock_packet)
mock_parse.assert_called_with(str(frame))
@ -131,7 +131,7 @@ class TestKISSDriver(unittest.TestCase):
mock_parse.side_effect = Exception('Parse error')
with mock.patch('aprsd.client.drivers.kiss_common.LOG') as mock_log:
result = self.driver.decode_packet(frame=frame)
result = self.driver.decode_packet(frame)
self.assertIsNone(result)
mock_log.error.assert_called()
@ -154,7 +154,7 @@ class TestKISSDriver(unittest.TestCase):
mock_parse.return_value = mock_aprs_data
mock_factory.return_value = third_party
result = self.driver.decode_packet(frame=frame)
result = self.driver.decode_packet(frame)
self.assertEqual(result, third_party.subpacket)
def test_consumer_not_connected(self):

View File

@ -339,7 +339,7 @@ class TestTCPKISSDriver(unittest.TestCase):
with mock.patch(
'aprsd.client.drivers.tcpkiss.core.factory', return_value=mock_packet
) as mock_factory:
result = self.driver.decode_packet(frame=mock_frame)
result = self.driver.decode_packet(mock_frame)
mock_parse.assert_called_once_with(str(mock_frame))
mock_factory.assert_called_once_with(mock_aprs_data)
@ -362,7 +362,7 @@ class TestTCPKISSDriver(unittest.TestCase):
'aprsd.client.drivers.kiss_common.aprslib.parse',
side_effect=Exception('Test error'),
) as mock_parse:
result = self.driver.decode_packet(frame=mock_frame)
result = self.driver.decode_packet(mock_frame)
mock_parse.assert_called_once()
self.assertIsNone(result)
@ -389,7 +389,7 @@ class TestTCPKISSDriver(unittest.TestCase):
self.driver.consumer(mock_callback)
mock_read_frame.assert_called_once()
mock_callback.assert_called_once_with(frame=mock_frame)
mock_callback.assert_called_once_with(mock_frame)
@mock.patch('aprsd.client.drivers.tcpkiss.LOG')
def test_read_frame_success(self, mock_log):

View File

@ -26,11 +26,11 @@ class TestDriverRegistry(unittest.TestCase):
mock_instance.is_enabled.return_value = False
mock_instance.is_configured.return_value = False
# Mock CONF to prevent password check
# Mock CONF to prevent password/callsign check
self.conf_patcher = mock.patch('aprsd.client.drivers.aprsis.CONF')
mock_conf = self.conf_patcher.start()
mock_conf.aprs_network.password = 'dummy'
mock_conf.aprs_network.login = 'dummy'
mock_conf.callsign = 'dummy'
# Patch the register method to skip Protocol check for MockClientDriver
self._original_register = self.registry.register

View File

@ -5,14 +5,13 @@ from unittest import mock
from click.testing import CliRunner
from oslo_config import cfg
from aprsd import conf # noqa : F401
from aprsd.cmds import send_message # noqa
from aprsd.main import cli
from .. import fake
CONF = cfg.CONF
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
F = t.TypeVar('F', bound=t.Callable[..., t.Any])
class TestSendMessageCommand(unittest.TestCase):
@ -21,44 +20,44 @@ class TestSendMessageCommand(unittest.TestCase):
CONF.trace_enabled = False
CONF.watch_list.packet_keep_count = 1
if login:
CONF.aprs_network.login = login
CONF.callsign = login
if password:
CONF.aprs_network.password = password
# CONF.aprsd_admin_extension.user = "admin"
# CONF.aprsd_admin_extension.password = "password"
@mock.patch("aprsd.log.log.setup_logging")
@mock.patch('aprsd.log.log.setup_logging')
def test_no_tocallsign(self, mock_logging):
"""Make sure we get an error if there is no tocallsign."""
self.config_and_init(
login="something",
password="another",
login='something',
password='another',
)
runner = CliRunner()
result = runner.invoke(
cli,
["send-message"],
['send-message'],
catch_exceptions=False,
)
assert result.exit_code == 2
assert "Error: Missing argument 'TOCALLSIGN'" in result.output
@mock.patch("aprsd.log.log.setup_logging")
@mock.patch('aprsd.log.log.setup_logging')
def test_no_command(self, mock_logging):
"""Make sure we get an error if there is no command."""
self.config_and_init(
login="something",
password="another",
login='something',
password='another',
)
runner = CliRunner()
result = runner.invoke(
cli,
["send-message", "WB4BOR"],
['send-message', 'WB4BOR'],
catch_exceptions=False,
)
assert result.exit_code == 2

View File

@ -1,9 +1,9 @@
from aprsd import plugin, threads
from aprsd.packets import core
FAKE_MESSAGE_TEXT = "fake MeSSage"
FAKE_FROM_CALLSIGN = "KFAKE"
FAKE_TO_CALLSIGN = "KMINE"
FAKE_MESSAGE_TEXT = 'fake MeSSage'
FAKE_FROM_CALLSIGN = 'KFAKE'
FAKE_TO_CALLSIGN = 'KMINE'
def fake_packet(
@ -15,22 +15,40 @@ def fake_packet(
response=None,
):
packet_dict = {
"from": fromcall,
"addresse": tocall,
"to": tocall,
"format": message_format,
"raw": "",
'from': fromcall,
'addresse': tocall,
'to': tocall,
'format': message_format,
'raw': '',
}
if message:
packet_dict["message_text"] = message
packet_dict['message_text'] = message
if msg_number:
packet_dict["msgNo"] = str(msg_number)
packet_dict['msgNo'] = str(msg_number)
if response:
packet_dict["response"] = response
packet_dict['response'] = response
return core.factory(packet_dict)
packet = core.factory(packet_dict)
# Call prepare to build the raw data
packet.prepare()
return packet
def fake_gps_packet():
"""Create a properly prepared GPSPacket for testing."""
packet = core.GPSPacket(
from_call=FAKE_FROM_CALLSIGN,
to_call=FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='>',
comment='Test GPS comment',
)
# Call prepare to build the raw data
packet.prepare()
return packet
def fake_ack_packet():
@ -41,7 +59,7 @@ def fake_ack_packet():
class FakeBaseNoThreadsPlugin(plugin.APRSDPluginBase):
version = "1.0"
version = '1.0'
def setup(self):
self.enabled = True
@ -50,19 +68,19 @@ class FakeBaseNoThreadsPlugin(plugin.APRSDPluginBase):
return None
def process(self, packet):
return "process"
return 'process'
class FakeThread(threads.APRSDThread):
def __init__(self):
super().__init__("FakeThread")
super().__init__('FakeThread')
def loop(self):
return False
class FakeBaseThreadsPlugin(plugin.APRSDPluginBase):
version = "1.0"
version = '1.0'
def setup(self):
self.enabled = True
@ -71,16 +89,16 @@ class FakeBaseThreadsPlugin(plugin.APRSDPluginBase):
return None
def process(self, packet):
return "process"
return 'process'
def create_threads(self):
return FakeThread()
class FakeRegexCommandPlugin(plugin.APRSDRegexCommandPluginBase):
version = "1.0"
command_regex = "^[fF]"
command_name = "fake"
version = '1.0'
command_regex = '^[fF]'
command_name = 'fake'
def process(self, packet):
return FAKE_MESSAGE_TEXT

View File

@ -80,19 +80,16 @@ class TestDupePacketFilter(unittest.TestCase):
packet.processed = True
packet.timestamp = 1000
with mock.patch(
'aprsd.packets.filters.dupe_filter.packets.PacketList'
) as mock_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within 60 second timeout
mock_list_instance.find.return_value = found_packet
mock_list.return_value = mock_list_instance
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within 60 second timeout
mock_list_instance.find.return_value = found_packet
self.filter.pl = mock_list_instance
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertIsNone(result) # Should be dropped
mock_log.warning.assert_called()
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertIsNone(result) # Should be dropped
mock_log.warning.assert_called()
def test_filter_duplicate_after_timeout(self):
"""Test filter() with duplicate after timeout."""
@ -105,16 +102,13 @@ class TestDupePacketFilter(unittest.TestCase):
packet.processed = True
packet.timestamp = 2000
with mock.patch(
'aprsd.packets.filters.dupe_filter.packets.PacketList'
) as mock_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1000 # More than 60 seconds ago
mock_list_instance.find.return_value = found_packet
mock_list.return_value = mock_list_instance
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1000 # More than 60 seconds ago
mock_list_instance.find.return_value = found_packet
self.filter.pl = mock_list_instance
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertEqual(result, packet) # Should pass
mock_log.warning.assert_called()
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertEqual(result, packet) # Should pass
mock_log.warning.assert_called()

View File

@ -0,0 +1,76 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestAckPacket(unittest.TestCase):
"""Test AckPacket JSON serialization."""
def test_ack_packet_to_json(self):
"""Test AckPacket.to_json() method."""
packet = packets.AckPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'AckPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['msgNo'], '123')
def test_ack_packet_from_dict(self):
"""Test AckPacket.from_dict() method."""
packet_dict = {
'_type': 'AckPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'msgNo': '123',
}
packet = packets.AckPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.AckPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.msgNo, '123')
def test_ack_packet_round_trip(self):
"""Test AckPacket round-trip: to_json -> from_dict."""
original = packets.AckPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.AckPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.msgNo, original.msgNo)
self.assertEqual(restored._type, original._type)
def test_ack_packet_from_raw_string(self):
"""Test AckPacket creation from raw APRS string."""
packet_raw = 'KFAKE>APZ100::KMINE :ack123'
packet_dict = aprslib.parse(packet_raw)
# aprslib might not set format/response correctly, so set them manually
packet_dict['format'] = 'message'
packet_dict['response'] = 'ack'
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.AckPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'AckPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.AckPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)
self.assertEqual(restored.msgNo, packet.msgNo)

View File

@ -0,0 +1,98 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestBeaconPacket(unittest.TestCase):
"""Test BeaconPacket JSON serialization."""
def test_beacon_packet_to_json(self):
"""Test BeaconPacket.to_json() method."""
packet = packets.BeaconPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='>',
symbol_table='/',
comment='Test beacon comment',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'BeaconPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['symbol'], '>')
self.assertEqual(json_dict['symbol_table'], '/')
self.assertEqual(json_dict['comment'], 'Test beacon comment')
def test_beacon_packet_from_dict(self):
"""Test BeaconPacket.from_dict() method."""
packet_dict = {
'_type': 'BeaconPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'symbol': '>',
'symbol_table': '/',
'comment': 'Test beacon comment',
}
packet = packets.BeaconPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.BeaconPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.symbol, '>')
self.assertEqual(packet.symbol_table, '/')
self.assertEqual(packet.comment, 'Test beacon comment')
def test_beacon_packet_round_trip(self):
"""Test BeaconPacket round-trip: to_json -> from_dict."""
original = packets.BeaconPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='>',
symbol_table='/',
comment='Test beacon comment',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.BeaconPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.symbol, original.symbol)
self.assertEqual(restored.symbol_table, original.symbol_table)
self.assertEqual(restored.comment, original.comment)
self.assertEqual(restored._type, original._type)
def test_beacon_packet_from_raw_string(self):
"""Test BeaconPacket creation from raw APRS string."""
# Use a format that aprslib can parse correctly
packet_raw = 'kd8mey-10>APRS,TCPIP*,qAC,T2SYDNEY:=4247.80N/08539.00WrPHG1210/Making 220 Great Again Allstar# 552191'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.BeaconPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'BeaconPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.BeaconPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.latitude, packet.latitude)
self.assertEqual(restored.longitude, packet.longitude)

View File

@ -0,0 +1,75 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestBulletinPacket(unittest.TestCase):
"""Test BulletinPacket JSON serialization."""
def test_bulletin_packet_to_json(self):
"""Test BulletinPacket.to_json() method."""
packet = packets.BulletinPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
message_text='Test bulletin message',
bid='1',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'BulletinPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['message_text'], 'Test bulletin message')
self.assertEqual(json_dict['bid'], '1')
def test_bulletin_packet_from_dict(self):
"""Test BulletinPacket.from_dict() method."""
packet_dict = {
'_type': 'BulletinPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'message_text': 'Test bulletin message',
'bid': '1',
}
packet = packets.BulletinPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.BulletinPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.message_text, 'Test bulletin message')
self.assertEqual(packet.bid, '1')
def test_bulletin_packet_round_trip(self):
"""Test BulletinPacket round-trip: to_json -> from_dict."""
original = packets.BulletinPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
message_text='Test bulletin message',
bid='1',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.BulletinPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.message_text, original.message_text)
self.assertEqual(restored.bid, original.bid)
self.assertEqual(restored._type, original._type)
def test_bulletin_packet_from_raw_string(self):
"""Test BulletinPacket creation from raw APRS string."""
packet_raw = 'KFAKE>APZ100::BLN1 :Test bulletin message'
packet_dict = aprslib.parse(packet_raw)
# aprslib might not set format correctly, so set it manually
packet_dict['format'] = 'bulletin'
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.BulletinPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'BulletinPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.BulletinPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.message_text, packet.message_text)
self.assertEqual(restored.bid, packet.bid)

View File

@ -0,0 +1,109 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestGPSPacket(unittest.TestCase):
"""Test GPSPacket JSON serialization."""
def test_gps_packet_to_json(self):
"""Test GPSPacket.to_json() method."""
packet = packets.GPSPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
altitude=100.0,
symbol='>',
symbol_table='/',
comment='Test GPS comment',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'GPSPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['altitude'], 100.0)
self.assertEqual(json_dict['symbol'], '>')
self.assertEqual(json_dict['symbol_table'], '/')
self.assertEqual(json_dict['comment'], 'Test GPS comment')
def test_gps_packet_from_dict(self):
"""Test GPSPacket.from_dict() method."""
packet_dict = {
'_type': 'GPSPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'altitude': 100.0,
'symbol': '>',
'symbol_table': '/',
'comment': 'Test GPS comment',
}
packet = packets.GPSPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.GPSPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.altitude, 100.0)
self.assertEqual(packet.symbol, '>')
self.assertEqual(packet.symbol_table, '/')
self.assertEqual(packet.comment, 'Test GPS comment')
def test_gps_packet_round_trip(self):
"""Test GPSPacket round-trip: to_json -> from_dict."""
original = packets.GPSPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
altitude=100.0,
symbol='>',
symbol_table='/',
comment='Test GPS comment',
speed=25.5,
course=180,
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.GPSPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.altitude, original.altitude)
self.assertEqual(restored.symbol, original.symbol)
self.assertEqual(restored.symbol_table, original.symbol_table)
self.assertEqual(restored.comment, original.comment)
self.assertEqual(restored.speed, original.speed)
self.assertEqual(restored.course, original.course)
self.assertEqual(restored._type, original._type)
def test_gps_packet_from_raw_string(self):
"""Test GPSPacket creation from raw APRS string."""
packet_raw = 'KFAKE>APZ100,WIDE2-1:!3742.00N/12225.00W>Test GPS comment'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
# GPS packets are typically created as BeaconPacket or other types
# but we can test if it has GPS data
self.assertIsNotNone(packet)
if hasattr(packet, 'latitude') and hasattr(packet, 'longitude'):
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertIn('latitude', json_dict)
self.assertIn('longitude', json_dict)
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertEqual(restored.latitude, packet.latitude)
self.assertEqual(restored.longitude, packet.longitude)

208
tests/packets/test_log.py Normal file
View File

@ -0,0 +1,208 @@
import unittest
from unittest import mock
from aprsd import packets
from aprsd.packets import log
from tests import fake
class TestPacketLog(unittest.TestCase):
"""Unit tests for the packet logging functions."""
def setUp(self):
"""Set up test fixtures."""
# Mock the logging to avoid actual log output during tests
self.loguru_opt_mock = mock.patch('aprsd.packets.log.LOGU.opt').start()
self.loguru_info_mock = self.loguru_opt_mock.return_value.info
self.logging_mock = mock.patch('aprsd.packets.log.LOG').start()
self.haversine_mock = mock.patch('aprsd.packets.log.haversine').start()
self.utils_mock = mock.patch('aprsd.packets.log.utils').start()
self.conf_mock = mock.patch('aprsd.packets.log.CONF').start()
# Set default configuration values
self.conf_mock.enable_packet_logging = True
self.conf_mock.log_packet_format = (
'multiline' # Changed from 'compact' to 'multiline'
)
self.conf_mock.default_ack_send_count = 3
self.conf_mock.default_packet_send_count = 5
self.conf_mock.latitude = 37.7749
self.conf_mock.longitude = -122.4194
# Set up the utils mock methods
self.utils_mock.calculate_initial_compass_bearing.return_value = 45.0
self.utils_mock.degrees_to_cardinal.return_value = 'NE'
self.haversine_mock.return_value = 10.5
# No need to mock packet.raw since we create real packets with raw data
# The packet objects created in tests will have their raw attribute set properly
def tearDown(self):
"""Clean up after tests."""
# Stop all mocks
mock.patch.stopall()
def test_log_multiline_with_ack_packet(self):
"""Test log_multiline with an AckPacket."""
# Create a fake AckPacket
packet = fake.fake_ack_packet()
packet.send_count = 1
# Call the function
log.log_multiline(packet, tx=True, header=True)
# Verify that logging was called
self.loguru_opt_mock.assert_called_once()
self.loguru_info_mock.assert_called_once()
# LOG.debug is no longer called in log_multiline
def test_log_multiline_with_gps_packet(self):
"""Test log_multiline with a GPSPacket."""
# Create a fake GPSPacket
packet = packets.GPSPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='>',
comment='Test GPS comment',
)
packet.send_count = 2
# Call the function
log.log_multiline(packet, tx=False, header=True)
# Verify that logging was called
self.loguru_opt_mock.assert_called_once()
self.loguru_info_mock.assert_called_once()
# LOG.debug is no longer called in log_multiline
def test_log_multiline_disabled_logging(self):
"""Test log_multiline when packet logging is disabled."""
# Disable packet logging
self.conf_mock.enable_packet_logging = False
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 0
# Call the function
log.log_multiline(packet, tx=False, header=True)
# Verify that logging was NOT called
self.loguru_opt_mock.assert_not_called()
self.logging_mock.debug.assert_not_called()
def test_log_multiline_compact_format(self):
"""Test log_multiline when log format is compact."""
# Set compact format
self.conf_mock.log_packet_format = 'compact'
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 0
# Call the function
log.log_multiline(packet, tx=False, header=True)
# Verify that logging was NOT called (because of compact format)
self.loguru_opt_mock.assert_not_called()
self.logging_mock.debug.assert_not_called()
def test_log_with_compact_format(self):
"""Test log function with compact format."""
# Set compact format
self.conf_mock.log_packet_format = 'compact'
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 1
# Call the function
log.log(packet, tx=True, header=True, packet_count=1)
# Verify that logging was called (but may be different behavior)
self.loguru_opt_mock.assert_called_once()
def test_log_with_multiline_format(self):
"""Test log function with multiline format."""
# Set multiline format
self.conf_mock.log_packet_format = 'multiline'
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 1
# Call the function
log.log(packet, tx=True, header=True, packet_count=1)
# Verify that logging was called
self.loguru_opt_mock.assert_called_once()
def test_log_with_gps_packet_distance(self):
"""Test log function with GPS packet that includes distance info."""
# Create a GPSPacket
packet = packets.GPSPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='>',
comment='Test GPS comment',
)
packet.send_count = 2
# Call the function
log.log(packet, tx=False, header=True)
# Verify that logging was called
self.loguru_opt_mock.assert_called_once()
def test_log_with_disabled_logging(self):
"""Test log function when packet logging is disabled."""
# Disable packet logging
self.conf_mock.enable_packet_logging = False
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 0
# Call the function
log.log(packet, tx=False, header=True, force_log=False)
# Verify that logging was NOT called
self.loguru_opt_mock.assert_not_called()
def test_log_with_force_log(self):
"""Test log function with force_log=True even when logging is disabled."""
# Disable packet logging
self.conf_mock.enable_packet_logging = False
# Create a fake packet
packet = fake.fake_packet()
packet.send_count = 0
# Call the function with force_log=True
log.log(packet, tx=False, header=True, force_log=True)
# Verify that logging WAS called because of force_log=True
self.loguru_opt_mock.assert_called_once()
def test_log_with_different_packet_types(self):
"""Test log function with different packet types."""
# Test with MessagePacket
packet = fake.fake_packet()
packet.send_count = 1
log.log(packet, tx=False, header=True)
self.loguru_opt_mock.assert_called_once()
# Reset mocks
self.loguru_opt_mock.reset_mock()
# Test with AckPacket
ack_packet = fake.fake_ack_packet()
ack_packet.send_count = 2
log.log(ack_packet, tx=True, header=True)
self.loguru_opt_mock.assert_called_once()

View File

@ -0,0 +1,80 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestMessagePacket(unittest.TestCase):
"""Test MessagePacket JSON serialization."""
def test_message_packet_to_json(self):
"""Test MessagePacket.to_json() method."""
packet = packets.MessagePacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
message_text='Test message',
msgNo='123',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'MessagePacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['message_text'], 'Test message')
self.assertEqual(json_dict['msgNo'], '123')
def test_message_packet_from_dict(self):
"""Test MessagePacket.from_dict() method."""
packet_dict = {
'_type': 'MessagePacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'message_text': 'Test message',
'msgNo': '123',
}
packet = packets.MessagePacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.MessagePacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.message_text, 'Test message')
self.assertEqual(packet.msgNo, '123')
def test_message_packet_round_trip(self):
"""Test MessagePacket round-trip: to_json -> from_dict."""
original = packets.MessagePacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
message_text='Test message',
msgNo='123',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.MessagePacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.message_text, original.message_text)
self.assertEqual(restored.msgNo, original.msgNo)
self.assertEqual(restored._type, original._type)
def test_message_packet_from_raw_string(self):
"""Test MessagePacket creation from raw APRS string."""
packet_raw = 'KM6LYW>APZ100::WB4BOR :Test message{123'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.MessagePacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'MessagePacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.MessagePacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)
self.assertEqual(restored.message_text, packet.message_text)
self.assertEqual(restored.msgNo, packet.msgNo)

View File

@ -0,0 +1,107 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestMicEPacket(unittest.TestCase):
"""Test MicEPacket JSON serialization."""
def test_mice_packet_to_json(self):
"""Test MicEPacket.to_json() method."""
packet = packets.MicEPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
speed=25.5,
course=180,
mbits='test',
mtype='test_type',
telemetry={'key': 'value'},
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'MicEPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['speed'], 25.5)
self.assertEqual(json_dict['course'], 180)
self.assertEqual(json_dict['mbits'], 'test')
self.assertEqual(json_dict['mtype'], 'test_type')
def test_mice_packet_from_dict(self):
"""Test MicEPacket.from_dict() method."""
packet_dict = {
'_type': 'MicEPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'speed': 25.5,
'course': 180,
'mbits': 'test',
'mtype': 'test_type',
'telemetry': {'key': 'value'},
}
packet = packets.MicEPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.MicEPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.speed, 25.5)
self.assertEqual(packet.course, 180)
self.assertEqual(packet.mbits, 'test')
self.assertEqual(packet.mtype, 'test_type')
def test_mice_packet_round_trip(self):
"""Test MicEPacket round-trip: to_json -> from_dict."""
original = packets.MicEPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
speed=25.5,
course=180,
mbits='test',
mtype='test_type',
telemetry={'key': 'value'},
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.MicEPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.speed, original.speed)
self.assertEqual(restored.course, original.course)
self.assertEqual(restored.mbits, original.mbits)
self.assertEqual(restored.mtype, original.mtype)
self.assertEqual(restored._type, original._type)
def test_mice_packet_from_raw_string(self):
"""Test MicEPacket creation from raw APRS string."""
packet_raw = 'kh2sr-15>S7TSYR,WIDE1-1,WIDE2-1,qAO,KO6KL-1:`1`7\x1c\x1c.#/`"4,}QuirkyQRP 4.6V 35.3C S06'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.MicEPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'MicEPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.MicEPacket)
self.assertEqual(restored.from_call, packet.from_call)
if hasattr(packet, 'latitude') and packet.latitude:
self.assertEqual(restored.latitude, packet.latitude)
self.assertEqual(restored.longitude, packet.longitude)

View File

@ -0,0 +1,122 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestObjectPacket(unittest.TestCase):
"""Test ObjectPacket JSON serialization."""
def test_object_packet_to_json(self):
"""Test ObjectPacket.to_json() method."""
packet = packets.ObjectPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='r',
symbol_table='/',
comment='Test object comment',
alive=True,
speed=25.5,
course=180,
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'ObjectPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['symbol'], 'r')
self.assertEqual(json_dict['symbol_table'], '/')
self.assertEqual(json_dict['comment'], 'Test object comment')
self.assertEqual(json_dict['alive'], True)
self.assertEqual(json_dict['speed'], 25.5)
self.assertEqual(json_dict['course'], 180)
def test_object_packet_from_dict(self):
"""Test ObjectPacket.from_dict() method."""
packet_dict = {
'_type': 'ObjectPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'symbol': 'r',
'symbol_table': '/',
'comment': 'Test object comment',
'alive': True,
'speed': 25.5,
'course': 180,
}
packet = packets.ObjectPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.ObjectPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.symbol, 'r')
self.assertEqual(packet.symbol_table, '/')
self.assertEqual(packet.comment, 'Test object comment')
self.assertEqual(packet.alive, True)
self.assertEqual(packet.speed, 25.5)
self.assertEqual(packet.course, 180)
def test_object_packet_round_trip(self):
"""Test ObjectPacket round-trip: to_json -> from_dict."""
original = packets.ObjectPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='r',
symbol_table='/',
comment='Test object comment',
alive=True,
speed=25.5,
course=180,
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.ObjectPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.symbol, original.symbol)
self.assertEqual(restored.symbol_table, original.symbol_table)
self.assertEqual(restored.comment, original.comment)
self.assertEqual(restored.alive, original.alive)
self.assertEqual(restored.speed, original.speed)
self.assertEqual(restored.course, original.course)
self.assertEqual(restored._type, original._type)
def test_object_packet_from_raw_string(self):
"""Test ObjectPacket creation from raw APRS string."""
# Use a working object packet example from the codebase
packet_raw = (
'REPEAT>APZ100:;K4CQ *301301z3735.11N/07903.08Wr145.490MHz T136 -060'
)
packet_dict = aprslib.parse(packet_raw)
# aprslib might not set format correctly, so set it manually
packet_dict['format'] = 'object'
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.ObjectPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'ObjectPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.ObjectPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)
if hasattr(packet, 'latitude') and packet.latitude:
self.assertEqual(restored.latitude, packet.latitude)
self.assertEqual(restored.longitude, packet.longitude)

View File

@ -0,0 +1,75 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestPacket(unittest.TestCase):
"""Test Packet base class JSON serialization."""
def test_packet_to_json(self):
"""Test Packet.to_json() method."""
packet = packets.Packet(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
# Verify it's valid JSON
json_dict = json.loads(json_str)
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['msgNo'], '123')
def test_packet_from_dict(self):
"""Test Packet.from_dict() method."""
packet_dict = {
'_type': 'Packet',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'msgNo': '123',
}
packet = packets.Packet.from_dict(packet_dict)
self.assertIsInstance(packet, packets.Packet)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.msgNo, '123')
def test_packet_round_trip(self):
"""Test Packet round-trip: to_json -> from_dict."""
original = packets.Packet(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
addresse=fake.FAKE_TO_CALLSIGN,
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.Packet.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.msgNo, original.msgNo)
self.assertEqual(restored.addresse, original.addresse)
def test_packet_from_raw_string(self):
"""Test Packet creation from raw APRS string."""
# Note: Base Packet is rarely used directly, but we can test with a simple message
packet_raw = 'KFAKE>APZ100::KMINE :Test message{123'
packet_dict = aprslib.parse(packet_raw)
# aprslib might not set format correctly, so set it manually
packet_dict['format'] = 'message'
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.MessagePacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertIn('from_call', json_dict)
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)

View File

@ -0,0 +1,76 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestRejectPacket(unittest.TestCase):
"""Test RejectPacket JSON serialization."""
def test_reject_packet_to_json(self):
"""Test RejectPacket.to_json() method."""
packet = packets.RejectPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
response='rej',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'RejectPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['msgNo'], '123')
def test_reject_packet_from_dict(self):
"""Test RejectPacket.from_dict() method."""
packet_dict = {
'_type': 'RejectPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'msgNo': '123',
'response': 'rej',
}
packet = packets.RejectPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.RejectPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.msgNo, '123')
def test_reject_packet_round_trip(self):
"""Test RejectPacket round-trip: to_json -> from_dict."""
original = packets.RejectPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
msgNo='123',
response='rej',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.RejectPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.msgNo, original.msgNo)
self.assertEqual(restored._type, original._type)
def test_reject_packet_from_raw_string(self):
"""Test RejectPacket creation from raw APRS string."""
packet_raw = 'HB9FDL-1>APK102,HB9FM-4*,WIDE2,qAR,HB9FEF-11::REPEAT :rej4139'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.RejectPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'RejectPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.RejectPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)
self.assertEqual(restored.msgNo, packet.msgNo)

View File

@ -0,0 +1,93 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestStatusPacket(unittest.TestCase):
"""Test StatusPacket JSON serialization."""
def test_status_packet_to_json(self):
"""Test StatusPacket.to_json() method."""
packet = packets.StatusPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
status='Test status message',
msgNo='123',
messagecapable=True,
comment='Test comment',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'StatusPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['status'], 'Test status message')
self.assertEqual(json_dict['msgNo'], '123')
self.assertEqual(json_dict['messagecapable'], True)
self.assertEqual(json_dict['comment'], 'Test comment')
def test_status_packet_from_dict(self):
"""Test StatusPacket.from_dict() method."""
packet_dict = {
'_type': 'StatusPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'status': 'Test status message',
'msgNo': '123',
'messagecapable': True,
'comment': 'Test comment',
}
packet = packets.StatusPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.StatusPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.status, 'Test status message')
self.assertEqual(packet.msgNo, '123')
self.assertEqual(packet.messagecapable, True)
self.assertEqual(packet.comment, 'Test comment')
def test_status_packet_round_trip(self):
"""Test StatusPacket round-trip: to_json -> from_dict."""
original = packets.StatusPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
status='Test status message',
msgNo='123',
messagecapable=True,
comment='Test comment',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.StatusPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.status, original.status)
self.assertEqual(restored.msgNo, original.msgNo)
self.assertEqual(restored.messagecapable, original.messagecapable)
self.assertEqual(restored.comment, original.comment)
self.assertEqual(restored._type, original._type)
def test_status_packet_from_raw_string(self):
"""Test StatusPacket creation from raw APRS string."""
packet_raw = 'KFAKE>APZ100::KMINE :Test status message{123'
packet_dict = aprslib.parse(packet_raw)
# aprslib might not set format correctly, so set it manually
packet_dict['format'] = 'status'
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.StatusPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'StatusPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.StatusPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.to_call, packet.to_call)
self.assertEqual(restored.status, packet.status)

View File

@ -0,0 +1,115 @@
import json
import unittest
import aprslib
from aprsd import packets
from aprsd.packets.core import TelemetryPacket
from tests import fake
class TestTelemetryPacket(unittest.TestCase):
"""Test TelemetryPacket JSON serialization."""
def test_telemetry_packet_to_json(self):
"""Test TelemetryPacket.to_json() method."""
packet = TelemetryPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
speed=25.5,
course=180,
mbits='test',
mtype='test_type',
telemetry={'key': 'value'},
tPARM=['parm1', 'parm2'],
tUNIT=['unit1', 'unit2'],
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'TelemetryPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['speed'], 25.5)
self.assertEqual(json_dict['course'], 180)
self.assertEqual(json_dict['mbits'], 'test')
self.assertEqual(json_dict['mtype'], 'test_type')
def test_telemetry_packet_from_dict(self):
"""Test TelemetryPacket.from_dict() method."""
packet_dict = {
'_type': 'TelemetryPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'speed': 25.5,
'course': 180,
'mbits': 'test',
'mtype': 'test_type',
'telemetry': {'key': 'value'},
'tPARM': ['parm1', 'parm2'],
'tUNIT': ['unit1', 'unit2'],
}
packet = TelemetryPacket.from_dict(packet_dict)
self.assertIsInstance(packet, TelemetryPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.speed, 25.5)
self.assertEqual(packet.course, 180)
self.assertEqual(packet.mbits, 'test')
self.assertEqual(packet.mtype, 'test_type')
def test_telemetry_packet_round_trip(self):
"""Test TelemetryPacket round-trip: to_json -> from_dict."""
original = TelemetryPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
speed=25.5,
course=180,
mbits='test',
mtype='test_type',
telemetry={'key': 'value'},
tPARM=['parm1', 'parm2'],
tUNIT=['unit1', 'unit2'],
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = TelemetryPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.speed, original.speed)
self.assertEqual(restored.course, original.course)
self.assertEqual(restored.mbits, original.mbits)
self.assertEqual(restored.mtype, original.mtype)
self.assertEqual(restored._type, original._type)
def test_telemetry_packet_from_raw_string(self):
"""Test TelemetryPacket creation from raw APRS string."""
# Telemetry packets are less common, using a Mic-E with telemetry as example
packet_raw = (
"KD9YIL>T0PX9W,WIDE1-1,WIDE2-1,qAO,NU9R-10:`sB,l#P>/'\"6+}|#*%U'a|!whl!|3"
)
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
# This might be MicEPacket or TelemetryPacket depending on content
self.assertIsNotNone(packet)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertEqual(restored.from_call, packet.from_call)
if hasattr(packet, 'telemetry') and packet.telemetry:
self.assertIsNotNone(restored.telemetry)

View File

@ -0,0 +1,99 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestThirdPartyPacket(unittest.TestCase):
"""Test ThirdPartyPacket JSON serialization."""
def test_thirdparty_packet_to_json(self):
"""Test ThirdPartyPacket.to_json() method."""
subpacket = packets.MessagePacket(
from_call='SUB',
to_call='TARGET',
message_text='Sub message',
)
packet = packets.ThirdPartyPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
subpacket=subpacket,
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'ThirdPartyPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
# subpacket should be serialized as a dict
self.assertIn('subpacket', json_dict)
self.assertIsInstance(json_dict['subpacket'], dict)
def test_thirdparty_packet_from_dict(self):
"""Test ThirdPartyPacket.from_dict() method."""
subpacket_dict = {
'_type': 'MessagePacket',
'from_call': 'SUB',
'to_call': 'TARGET',
'message_text': 'Sub message',
}
packet_dict = {
'_type': 'ThirdPartyPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'subpacket': subpacket_dict,
}
packet = packets.ThirdPartyPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.ThirdPartyPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertIsNotNone(packet.subpacket)
self.assertIsInstance(packet.subpacket, packets.MessagePacket)
def test_thirdparty_packet_round_trip(self):
"""Test ThirdPartyPacket round-trip: to_json -> from_dict."""
subpacket = packets.MessagePacket(
from_call='SUB',
to_call='TARGET',
message_text='Sub message',
)
original = packets.ThirdPartyPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
subpacket=subpacket,
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.ThirdPartyPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored._type, original._type)
# Verify subpacket was restored
self.assertIsNotNone(restored.subpacket)
self.assertIsInstance(restored.subpacket, packets.MessagePacket)
self.assertEqual(restored.subpacket.from_call, original.subpacket.from_call)
self.assertEqual(restored.subpacket.to_call, original.subpacket.to_call)
self.assertEqual(
restored.subpacket.message_text, original.subpacket.message_text
)
def test_thirdparty_packet_from_raw_string(self):
"""Test ThirdPartyPacket creation from raw APRS string."""
packet_raw = 'GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.ThirdPartyPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'ThirdPartyPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.ThirdPartyPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertIsNotNone(restored.subpacket)
self.assertEqual(restored.subpacket.from_call, packet.subpacket.from_call)

View File

@ -0,0 +1,82 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestUnknownPacket(unittest.TestCase):
"""Test UnknownPacket JSON serialization."""
def test_unknown_packet_to_json(self):
"""Test UnknownPacket.to_json() method."""
packet = packets.UnknownPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
format='unknown_format',
packet_type='unknown',
unknown_fields={'extra_field': 'extra_value'},
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'UnknownPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['format'], 'unknown_format')
self.assertEqual(json_dict['packet_type'], 'unknown')
def test_unknown_packet_from_dict(self):
"""Test UnknownPacket.from_dict() method."""
packet_dict = {
'_type': 'UnknownPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'format': 'unknown_format',
'packet_type': 'unknown',
'extra_field': 'extra_value',
}
packet = packets.UnknownPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.UnknownPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.format, 'unknown_format')
self.assertEqual(packet.packet_type, 'unknown')
def test_unknown_packet_round_trip(self):
"""Test UnknownPacket round-trip: to_json -> from_dict."""
original = packets.UnknownPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
format='unknown_format',
packet_type='unknown',
unknown_fields={'extra_field': 'extra_value'},
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.UnknownPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.format, original.format)
self.assertEqual(restored.packet_type, original.packet_type)
self.assertEqual(restored._type, original._type)
def test_unknown_packet_from_raw_string(self):
"""Test UnknownPacket creation from raw APRS string."""
# Use a packet format that might not be recognized
packet_raw = 'KFAKE>APZ100:>Unknown format data'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
# This might be UnknownPacket or another type depending on parsing
self.assertIsNotNone(packet)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertEqual(restored.from_call, packet.from_call)
if isinstance(packet, packets.UnknownPacket):
self.assertIsInstance(restored, packets.UnknownPacket)

View File

@ -0,0 +1,151 @@
import json
import unittest
import aprslib
from aprsd import packets
from tests import fake
class TestWeatherPacket(unittest.TestCase):
"""Test WeatherPacket JSON serialization."""
def test_weather_packet_to_json(self):
"""Test WeatherPacket.to_json() method."""
packet = packets.WeatherPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='_',
symbol_table='/',
wind_speed=10.5,
wind_direction=180,
wind_gust=15.0,
temperature=72.5,
rain_1h=0.1,
rain_24h=0.5,
rain_since_midnight=0.3,
humidity=65,
pressure=1013.25,
comment='Test weather comment',
)
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'WeatherPacket')
self.assertEqual(json_dict['from_call'], fake.FAKE_FROM_CALLSIGN)
self.assertEqual(json_dict['to_call'], fake.FAKE_TO_CALLSIGN)
self.assertEqual(json_dict['latitude'], 37.7749)
self.assertEqual(json_dict['longitude'], -122.4194)
self.assertEqual(json_dict['symbol'], '_')
self.assertEqual(json_dict['wind_speed'], 10.5)
self.assertEqual(json_dict['wind_direction'], 180)
self.assertEqual(json_dict['wind_gust'], 15.0)
self.assertEqual(json_dict['temperature'], 72.5)
self.assertEqual(json_dict['rain_1h'], 0.1)
self.assertEqual(json_dict['rain_24h'], 0.5)
self.assertEqual(json_dict['rain_since_midnight'], 0.3)
self.assertEqual(json_dict['humidity'], 65)
self.assertEqual(json_dict['pressure'], 1013.25)
self.assertEqual(json_dict['comment'], 'Test weather comment')
def test_weather_packet_from_dict(self):
"""Test WeatherPacket.from_dict() method."""
packet_dict = {
'_type': 'WeatherPacket',
'from_call': fake.FAKE_FROM_CALLSIGN,
'to_call': fake.FAKE_TO_CALLSIGN,
'latitude': 37.7749,
'longitude': -122.4194,
'symbol': '_',
'symbol_table': '/',
'wind_speed': 10.5,
'wind_direction': 180,
'wind_gust': 15.0,
'temperature': 72.5,
'rain_1h': 0.1,
'rain_24h': 0.5,
'rain_since_midnight': 0.3,
'humidity': 65,
'pressure': 1013.25,
'comment': 'Test weather comment',
}
packet = packets.WeatherPacket.from_dict(packet_dict)
self.assertIsInstance(packet, packets.WeatherPacket)
self.assertEqual(packet.from_call, fake.FAKE_FROM_CALLSIGN)
self.assertEqual(packet.to_call, fake.FAKE_TO_CALLSIGN)
self.assertEqual(packet.latitude, 37.7749)
self.assertEqual(packet.longitude, -122.4194)
self.assertEqual(packet.symbol, '_')
self.assertEqual(packet.wind_speed, 10.5)
self.assertEqual(packet.wind_direction, 180)
self.assertEqual(packet.wind_gust, 15.0)
self.assertEqual(packet.temperature, 72.5)
self.assertEqual(packet.rain_1h, 0.1)
self.assertEqual(packet.rain_24h, 0.5)
self.assertEqual(packet.rain_since_midnight, 0.3)
self.assertEqual(packet.humidity, 65)
self.assertEqual(packet.pressure, 1013.25)
self.assertEqual(packet.comment, 'Test weather comment')
def test_weather_packet_round_trip(self):
"""Test WeatherPacket round-trip: to_json -> from_dict."""
original = packets.WeatherPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=37.7749,
longitude=-122.4194,
symbol='_',
symbol_table='/',
wind_speed=10.5,
wind_direction=180,
wind_gust=15.0,
temperature=72.5,
rain_1h=0.1,
rain_24h=0.5,
rain_since_midnight=0.3,
humidity=65,
pressure=1013.25,
comment='Test weather comment',
)
json_str = original.to_json()
packet_dict = json.loads(json_str)
restored = packets.WeatherPacket.from_dict(packet_dict)
self.assertEqual(restored.from_call, original.from_call)
self.assertEqual(restored.to_call, original.to_call)
self.assertEqual(restored.latitude, original.latitude)
self.assertEqual(restored.longitude, original.longitude)
self.assertEqual(restored.symbol, original.symbol)
self.assertEqual(restored.wind_speed, original.wind_speed)
self.assertEqual(restored.wind_direction, original.wind_direction)
self.assertEqual(restored.wind_gust, original.wind_gust)
self.assertEqual(restored.temperature, original.temperature)
self.assertEqual(restored.rain_1h, original.rain_1h)
self.assertEqual(restored.rain_24h, original.rain_24h)
self.assertEqual(restored.rain_since_midnight, original.rain_since_midnight)
self.assertEqual(restored.humidity, original.humidity)
self.assertEqual(restored.pressure, original.pressure)
self.assertEqual(restored.comment, original.comment)
self.assertEqual(restored._type, original._type)
def test_weather_packet_from_raw_string(self):
"""Test WeatherPacket creation from raw APRS string."""
packet_raw = 'FW9222>APRS,TCPXX*,qAX,CWOP-6:@122025z2953.94N/08423.77W_232/003g006t084r000p032P000h80b10157L745.DsWLL'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.WeatherPacket)
# Test to_json
json_str = packet.to_json()
self.assertIsInstance(json_str, str)
json_dict = json.loads(json_str)
self.assertEqual(json_dict['_type'], 'WeatherPacket')
# Test from_dict round trip
restored = packets.factory(json_dict)
self.assertIsInstance(restored, packets.WeatherPacket)
self.assertEqual(restored.from_call, packet.from_call)
self.assertEqual(restored.temperature, packet.temperature)
self.assertEqual(restored.humidity, packet.humidity)
self.assertEqual(restored.pressure, packet.pressure)
self.assertEqual(restored.wind_speed, packet.wind_speed)
self.assertEqual(restored.wind_direction, packet.wind_direction)

View File

@ -7,29 +7,28 @@ from aprsd.plugins import fortune as fortune_plugin
from .. import fake, test_plugin
CONF = cfg.CONF
class TestFortunePlugin(test_plugin.TestPlugin):
@mock.patch("shutil.which")
@mock.patch('shutil.which')
def test_fortune_fail(self, mock_which):
mock_which.return_value = None
fortune = fortune_plugin.FortunePlugin()
expected = "FortunePlugin isn't enabled"
packet = fake.fake_packet(message="fortune")
packet = fake.fake_packet(message='fortune')
actual = fortune.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("subprocess.check_output")
@mock.patch("shutil.which")
@mock.patch('subprocess.check_output')
@mock.patch('shutil.which')
def test_fortune_success(self, mock_which, mock_output):
mock_which.return_value = "/usr/bin/games/fortune"
mock_output.return_value = "Funny fortune"
mock_which.return_value = '/usr/bin/games/fortune'
mock_output.return_value = 'Funny fortune'
CONF.callsign = fake.FAKE_TO_CALLSIGN
fortune = fortune_plugin.FortunePlugin()
expected = "Funny fortune"
packet = fake.fake_packet(message="fortune")
expected = 'Funny fortune'
packet = fake.fake_packet(message='fortune')
actual = fortune.filter(packet)
self.assertEqual(expected, actual)

View File

@ -65,7 +65,6 @@ class TestWatchListPlugin(test_plugin.TestPlugin):
watchlist_callsigns=DEFAULT_WATCHLIST_CALLSIGNS,
):
CONF.callsign = self.fromcall
CONF.aprs_network.login = self.fromcall
CONF.aprs_fi.apiKey = 'something'
# Add mock password
CONF.aprs_network.password = '12345'

View File

@ -0,0 +1,85 @@
import os
import unittest
from aprsd import plugin
from aprsd.utils import package
class TestPackage(unittest.TestCase):
def test_plugin_type(self):
self.assertEqual(
package.plugin_type(plugin.APRSDRegexCommandPluginBase), 'RegexCommand'
)
self.assertEqual(
package.plugin_type(plugin.APRSDWatchListPluginBase), 'WatchList'
)
self.assertEqual(package.plugin_type(plugin.APRSDPluginBase), 'APRSDPluginBase')
def test_is_plugin(self):
class TestPlugin(plugin.APRSDPluginBase):
def setup(self):
pass
def filter(self, packet):
pass
def process(self, packet):
pass
class NonPlugin:
pass
self.assertTrue(package.is_plugin(TestPlugin))
self.assertFalse(package.is_plugin(NonPlugin))
def test_walk_package(self):
import aprsd.utils
result = package.walk_package(aprsd.utils)
# walk_package returns an iterator, so we just check it's not None
self.assertIsNotNone(result)
def test_get_module_info(self):
# Test with a specific, limited directory to avoid hanging
# Use the aprsd/utils directory which is small and safe
import aprsd.utils
package_name = 'aprsd.utils'
module_name = 'package'
# Get the actual path to aprsd/utils directory
module_path = os.path.dirname(aprsd.utils.__file__)
module_info = package.get_module_info(package_name, module_name, module_path)
# The result should be a list (even if empty)
self.assertIsInstance(module_info, list)
def test_is_aprsd_package(self):
self.assertTrue(package.is_aprsd_package('aprsd_plugin'))
self.assertFalse(package.is_aprsd_package('other'))
def test_is_aprsd_extension(self):
self.assertTrue(package.is_aprsd_extension('aprsd_extension_plugin'))
self.assertFalse(package.is_aprsd_extension('other'))
def test_get_installed_aprsd_items(self):
plugins, extensions = package.get_installed_aprsd_items()
self.assertIsNotNone(plugins)
self.assertIsNotNone(extensions)
def test_get_installed_plugins(self):
plugins = package.get_installed_plugins()
self.assertIsNotNone(plugins)
def test_get_installed_extensions(self):
extensions = package.get_installed_extensions()
self.assertIsNotNone(extensions)
def test_get_pypi_packages(self):
packages = package.get_pypi_packages()
self.assertIsNotNone(packages)
def test_log_installed_extensions_and_plugins(self):
package.log_installed_extensions_and_plugins()
if __name__ == '__main__':
unittest.main()

View File

@ -7,12 +7,11 @@ from aprsd.plugins import ping as ping_plugin
from .. import fake, test_plugin
CONF = cfg.CONF
class TestPingPlugin(test_plugin.TestPlugin):
@mock.patch("time.localtime")
@mock.patch('time.localtime')
def test_ping(self, mock_time):
fake_time = mock.MagicMock()
h = fake_time.tm_hour = 16
@ -24,7 +23,7 @@ class TestPingPlugin(test_plugin.TestPlugin):
ping = ping_plugin.PingPlugin()
packet = fake.fake_packet(
message="location",
message='location',
msg_number=1,
)
@ -33,16 +32,16 @@ class TestPingPlugin(test_plugin.TestPlugin):
def ping_str(h, m, s):
return (
"Pong! "
'Pong! '
+ str(h).zfill(2)
+ ":"
+ ':'
+ str(m).zfill(2)
+ ":"
+ ':'
+ str(s).zfill(2)
)
packet = fake.fake_packet(
message="Ping",
message='Ping',
msg_number=1,
)
actual = ping.filter(packet)
@ -50,7 +49,7 @@ class TestPingPlugin(test_plugin.TestPlugin):
self.assertEqual(expected, actual)
packet = fake.fake_packet(
message="ping",
message='ping',
msg_number=1,
)
actual = ping.filter(packet)

View File

@ -12,26 +12,26 @@ CONF = cfg.CONF
class TestTimePlugins(test_plugin.TestPlugin):
@mock.patch("aprsd.plugins.time.TimePlugin._get_local_tz")
@mock.patch("aprsd.plugins.time.TimePlugin._get_utcnow")
@mock.patch('aprsd.plugins.time.TimePlugin._get_local_tz')
@mock.patch('aprsd.plugins.time.TimePlugin._get_utcnow')
def test_time(self, mock_utcnow, mock_localtz):
utcnow = pytz.datetime.datetime.utcnow()
mock_utcnow.return_value = utcnow
tz = pytz.timezone("US/Pacific")
tz = pytz.timezone('US/Pacific')
mock_localtz.return_value = tz
gmt_t = pytz.utc.localize(utcnow)
local_t = gmt_t.astimezone(tz)
fake_time = mock.MagicMock()
h = int(local_t.strftime("%H"))
m = int(local_t.strftime("%M"))
h = int(local_t.strftime('%H'))
m = int(local_t.strftime('%M'))
fake_time.tm_sec = 13
CONF.callsign = fake.FAKE_TO_CALLSIGN
time = time_plugin.TimePlugin()
packet = fake.fake_packet(
message="location",
message='location',
msg_number=1,
)
@ -41,11 +41,11 @@ class TestTimePlugins(test_plugin.TestPlugin):
cur_time = fuzzy(h, m, 1)
packet = fake.fake_packet(
message="time",
message='time',
msg_number=1,
)
local_short_str = local_t.strftime("%H:%M %Z")
expected = "{} ({})".format(
local_short_str = local_t.strftime('%H:%M %Z')
expected = '{} ({})'.format(
cur_time,
local_short_str,
)

View File

@ -40,8 +40,9 @@ class TestVersionPlugin(test_plugin.TestPlugin):
}
}
expected = f'APRSD ver:{aprsd.__version__} uptime:00:00:00'
CONF.callsign = fake.FAKE_TO_CALLSIGN
CONF.owner_callsign = None
expected = f'APRSD ver:{aprsd.__version__} uptime:00:00:00 owner:-'
version = version_plugin.VersionPlugin()
version.enabled = True
@ -62,3 +63,22 @@ class TestVersionPlugin(test_plugin.TestPlugin):
# Verify the mock was called exactly once
mock_collector_instance.collect.assert_called_once()
@mock.patch('aprsd.stats.collector.Collector')
def test_version_shows_owner_callsign_when_set(self, mock_collector_class):
mock_collector_instance = mock_collector_class.return_value
mock_collector_instance.collect.return_value = {
'APRSDStats': {'uptime': '01:23:45'},
}
CONF.callsign = fake.FAKE_TO_CALLSIGN
CONF.owner_callsign = 'K0WN3R'
version = version_plugin.VersionPlugin()
version.enabled = True
packet = fake.fake_packet(message='version', msg_number=1)
actual = version.filter(packet)
self.assertEqual(
actual,
f'APRSD ver:{aprsd.__version__} uptime:01:23:45 owner:K0WN3R',
)

View File

@ -18,89 +18,89 @@ class TestUSWeatherPlugin(test_plugin.TestPlugin):
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USWeatherPlugin()
expected = "USWeatherPlugin isn't enabled"
packet = fake.fake_packet(message="weather")
packet = fake.fake_packet(message='weather')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
def test_failed_aprs_fi_location(self, mock_check):
# When the aprs.fi api key isn't set, then
# the Plugin will be disabled.
mock_check.side_effect = Exception
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USWeatherPlugin()
expected = "Failed to fetch aprs.fi location"
packet = fake.fake_packet(message="weather")
expected = 'Failed to fetch aprs.fi location'
packet = fake.fake_packet(message='weather')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
def test_failed_aprs_fi_location_no_entries(self, mock_check):
# When the aprs.fi api key isn't set, then
# the Plugin will be disabled.
mock_check.return_value = {"entries": []}
CONF.aprs_fi.apiKey = "abc123"
mock_check.return_value = {'entries': []}
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USWeatherPlugin()
wx.enabled = True
expected = "Failed to fetch aprs.fi location"
packet = fake.fake_packet(message="weather")
expected = 'Failed to fetch aprs.fi location'
packet = fake.fake_packet(message='weather')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch("aprsd.plugin_utils.get_weather_gov_for_gps")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
@mock.patch('aprsd.plugin_utils.get_weather_gov_for_gps')
def test_unknown_gps(self, mock_weather, mock_check_aprs):
# When the aprs.fi api key isn't set, then
# the LocationPlugin will be disabled.
mock_check_aprs.return_value = {
"entries": [
'entries': [
{
"lat": 10,
"lng": 11,
"lasttime": 10,
'lat': 10,
'lng': 11,
'lasttime': 10,
},
],
}
mock_weather.side_effect = Exception
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USWeatherPlugin()
wx.enabled = True
expected = "Unable to get weather"
packet = fake.fake_packet(message="weather")
expected = 'Unable to get weather'
packet = fake.fake_packet(message='weather')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch("aprsd.plugin_utils.get_weather_gov_for_gps")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
@mock.patch('aprsd.plugin_utils.get_weather_gov_for_gps')
def test_working(self, mock_weather, mock_check_aprs):
# When the aprs.fi api key isn't set, then
# the LocationPlugin will be disabled.
mock_check_aprs.return_value = {
"entries": [
'entries': [
{
"lat": 10,
"lng": 11,
"lasttime": 10,
'lat': 10,
'lng': 11,
'lasttime': 10,
},
],
}
mock_weather.return_value = {
"currentobservation": {"Temp": "400"},
"data": {
"temperature": ["10", "11"],
"weather": ["test", "another"],
'currentobservation': {'Temp': '400'},
'data': {
'temperature': ['10', '11'],
'weather': ['test', 'another'],
},
"time": {"startPeriodName": ["ignored", "sometime"]},
'time': {'startPeriodName': ['ignored', 'sometime']},
}
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USWeatherPlugin()
wx.enabled = True
expected = "400F(10F/11F) test. sometime, another."
packet = fake.fake_packet(message="weather")
expected = '400F(10F/11F) test. sometime, another.'
packet = fake.fake_packet(message='weather')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@ -112,93 +112,93 @@ class TestUSMetarPlugin(test_plugin.TestPlugin):
CONF.aprs_fi.apiKey = None
wx = weather_plugin.USMetarPlugin()
expected = "USMetarPlugin isn't enabled"
packet = fake.fake_packet(message="metar")
packet = fake.fake_packet(message='metar')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
def test_failed_aprs_fi_location(self, mock_check):
# When the aprs.fi api key isn't set, then
# the Plugin will be disabled.
mock_check.side_effect = Exception
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USMetarPlugin()
wx.enabled = True
expected = "Failed to fetch aprs.fi location"
packet = fake.fake_packet(message="metar")
expected = 'Failed to fetch aprs.fi location'
packet = fake.fake_packet(message='metar')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
def test_failed_aprs_fi_location_no_entries(self, mock_check):
# When the aprs.fi api key isn't set, then
# the Plugin will be disabled.
mock_check.return_value = {"entries": []}
CONF.aprs_fi.apiKey = "abc123"
mock_check.return_value = {'entries': []}
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USMetarPlugin()
wx.enabled = True
expected = "Failed to fetch aprs.fi location"
packet = fake.fake_packet(message="metar")
expected = 'Failed to fetch aprs.fi location'
packet = fake.fake_packet(message='metar')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
@mock.patch('aprsd.plugin_utils.get_weather_gov_metar')
def test_gov_metar_fetch_fails(self, mock_metar):
mock_metar.side_effect = Exception
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USMetarPlugin()
wx.enabled = True
expected = "Unable to find station METAR"
packet = fake.fake_packet(message="metar KPAO")
expected = 'Unable to find station METAR'
packet = fake.fake_packet(message='metar KPAO')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
@mock.patch('aprsd.plugin_utils.get_weather_gov_metar')
def test_airport_works(self, mock_metar):
class Response:
text = '{"properties": {"rawMessage": "BOGUSMETAR"}}'
mock_metar.return_value = Response()
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USMetarPlugin()
wx.enabled = True
expected = "BOGUSMETAR"
packet = fake.fake_packet(message="metar KPAO")
expected = 'BOGUSMETAR'
packet = fake.fake_packet(message='metar KPAO')
actual = wx.filter(packet)
self.assertEqual(expected, actual)
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
@mock.patch("aprsd.plugin_utils.get_aprs_fi")
@mock.patch("aprsd.plugin_utils.get_weather_gov_for_gps")
@mock.patch('aprsd.plugin_utils.get_weather_gov_metar')
@mock.patch('aprsd.plugin_utils.get_aprs_fi')
@mock.patch('aprsd.plugin_utils.get_weather_gov_for_gps')
def test_metar_works(self, mock_wx_for_gps, mock_check_aprs, mock_metar):
mock_wx_for_gps.return_value = {
"location": {"metar": "BOGUSMETAR"},
'location': {'metar': 'BOGUSMETAR'},
}
class Response:
text = '{"properties": {"rawMessage": "BOGUSMETAR"}}'
mock_check_aprs.return_value = {
"entries": [
'entries': [
{
"lat": 10,
"lng": 11,
"lasttime": 10,
'lat': 10,
'lng': 11,
'lasttime': 10,
},
],
}
mock_metar.return_value = Response()
CONF.aprs_fi.apiKey = "abc123"
CONF.aprs_fi.apiKey = 'abc123'
CONF.callsign = fake.FAKE_TO_CALLSIGN
wx = weather_plugin.USMetarPlugin()
wx.enabled = True
expected = "BOGUSMETAR"
packet = fake.fake_packet(message="metar")
expected = 'BOGUSMETAR'
packet = fake.fake_packet(message='metar')
actual = wx.filter(packet)
self.assertEqual(expected, actual)

View File

@ -20,18 +20,18 @@ class TestPacketBase(unittest.TestCase):
message_format=core.PACKET_TYPE_MESSAGE,
):
packet_dict = {
"from": from_call,
"addresse": to_call,
"to": to_call,
"format": message_format,
"raw": "",
'from': from_call,
'addresse': to_call,
'to': to_call,
'format': message_format,
'raw': '',
}
if message:
packet_dict["message_text"] = message
packet_dict['message_text'] = message
if msg_number:
packet_dict["msgNo"] = str(msg_number)
packet_dict['msgNo'] = str(msg_number)
return packet_dict
@ -52,7 +52,7 @@ class TestPacketBase(unittest.TestCase):
self.assertEqual(
fake.FAKE_FROM_CALLSIGN,
pkt.get("from_call"),
pkt.get('from_call'),
)
def test_packet_factory(self):
@ -64,21 +64,21 @@ class TestPacketBase(unittest.TestCase):
self.assertEqual(fake.FAKE_TO_CALLSIGN, pkt.to_call)
self.assertEqual(fake.FAKE_TO_CALLSIGN, pkt.addresse)
pkt_dict["symbol"] = "_"
pkt_dict["weather"] = {
"wind_gust": 1.11,
"temperature": 32.01,
"humidity": 85,
"pressure": 1095.12,
"comment": "Home!",
pkt_dict['symbol'] = '_'
pkt_dict['weather'] = {
'wind_gust': 1.11,
'temperature': 32.01,
'humidity': 85,
'pressure': 1095.12,
'comment': 'Home!',
}
pkt_dict["format"] = core.PACKET_TYPE_UNCOMPRESSED
pkt_dict['format'] = core.PACKET_TYPE_UNCOMPRESSED
pkt = packets.factory(pkt_dict)
self.assertIsInstance(pkt, packets.WeatherPacket)
@mock.patch("aprsd.packets.core.GPSPacket._build_time_zulu")
@mock.patch('aprsd.packets.core.GPSPacket._build_time_zulu')
def test_packet_format_rain_1h(self, mock_time_zulu):
mock_time_zulu.return_value = "221450"
mock_time_zulu.return_value = '221450'
wx = packets.WeatherPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
@ -87,58 +87,58 @@ class TestPacketBase(unittest.TestCase):
)
wx.prepare()
expected = "KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r000p000P000h00b00000"
expected = 'KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r000p000P000h00b00000'
self.assertEqual(expected, wx.raw)
rain_location = 59
self.assertEqual(rain_location, wx.raw.find("r000"))
self.assertEqual(rain_location, wx.raw.find('r000'))
wx.rain_1h = 1.11
wx.prepare()
expected = "KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r111p000P000h00b00000"
expected = 'KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r111p000P000h00b00000'
self.assertEqual(expected, wx.raw)
wx.rain_1h = 0.01
wx.prepare()
expected = "KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r001p000P000h00b00000"
expected = 'KFAKE>KMINE,WIDE1-1,WIDE2-1:@221450z0.0/0.0_000/000g000t000r001p000P000h00b00000'
self.assertEqual(expected, wx.raw)
def test_beacon_factory(self):
"""Test to ensure a beacon packet is created."""
packet_raw = (
"WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat"
'WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat'
)
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.BeaconPacket)
packet_raw = "kd8mey-10>APRS,TCPIP*,qAC,T2SYDNEY:=4247.80N/08539.00WrPHG1210/Making 220 Great Again Allstar# 552191"
packet_raw = 'kd8mey-10>APRS,TCPIP*,qAC,T2SYDNEY:=4247.80N/08539.00WrPHG1210/Making 220 Great Again Allstar# 552191'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.BeaconPacket)
def test_reject_factory(self):
"""Test to ensure a reject packet is created."""
packet_raw = "HB9FDL-1>APK102,HB9FM-4*,WIDE2,qAR,HB9FEF-11::REPEAT :rej4139"
packet_raw = 'HB9FDL-1>APK102,HB9FM-4*,WIDE2,qAR,HB9FEF-11::REPEAT :rej4139'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.RejectPacket)
self.assertEqual("4139", packet.msgNo)
self.assertEqual("HB9FDL-1", packet.from_call)
self.assertEqual("REPEAT", packet.to_call)
self.assertEqual("reject", packet.packet_type)
self.assertEqual('4139', packet.msgNo)
self.assertEqual('HB9FDL-1', packet.from_call)
self.assertEqual('REPEAT', packet.to_call)
self.assertEqual('reject', packet.packet_type)
self.assertIsNone(packet.payload)
def test_thirdparty_factory(self):
"""Test to ensure a third party packet is created."""
packet_raw = "GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW"
packet_raw = 'GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.ThirdPartyPacket)
def test_weather_factory(self):
"""Test to ensure a weather packet is created."""
packet_raw = "FW9222>APRS,TCPXX*,qAX,CWOP-6:@122025z2953.94N/08423.77W_232/003g006t084r000p032P000h80b10157L745.DsWLL"
packet_raw = 'FW9222>APRS,TCPXX*,qAX,CWOP-6:@122025z2953.94N/08423.77W_232/003g006t084r000p032P000h80b10157L745.DsWLL'
packet_dict = aprslib.parse(packet_raw)
packet = packets.factory(packet_dict)
self.assertIsInstance(packet, packets.WeatherPacket)
@ -178,7 +178,7 @@ class TestPacketBase(unittest.TestCase):
)
expected = (
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123"
f'{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123'
)
self.assertEqual(expected, str(ack))
@ -191,7 +191,7 @@ class TestPacketBase(unittest.TestCase):
)
expected = (
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123"
f'{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123'
)
self.assertEqual(expected, str(reject))
@ -200,20 +200,20 @@ class TestPacketBase(unittest.TestCase):
lat = 28.123456
lon = -80.123456
ts = 1711219496.6426
comment = "My Beacon Comment"
comment = 'My Beacon Comment'
packet = packets.BeaconPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
latitude=lat,
longitude=lon,
timestamp=ts,
symbol=">",
symbol='>',
comment=comment,
)
expected_lat = aprslib_util.latitude_to_ddm(lat)
expected_lon = aprslib_util.longitude_to_ddm(lon)
expected = f"KFAKE>APZ100:@231844z{expected_lat}/{expected_lon}>{comment}"
expected = f'KFAKE>APZ100:@231844z{expected_lat}/{expected_lon}>{comment}'
self.assertEqual(expected, str(packet))
def test_beacon_format_no_comment(self):
@ -227,13 +227,13 @@ class TestPacketBase(unittest.TestCase):
latitude=lat,
longitude=lon,
timestamp=ts,
symbol=">",
symbol='>',
)
empty_comment = "APRSD Beacon"
empty_comment = 'APRSD Beacon'
expected_lat = aprslib_util.latitude_to_ddm(lat)
expected_lon = aprslib_util.longitude_to_ddm(lon)
expected = f"KFAKE>APZ100:@231844z{expected_lat}/{expected_lon}>{empty_comment}"
expected = f'KFAKE>APZ100:@231844z{expected_lat}/{expected_lon}>{empty_comment}'
self.assertEqual(expected, str(packet))
def test_bulletin_format(self):
@ -242,32 +242,32 @@ class TestPacketBase(unittest.TestCase):
bid = 0
packet = packets.BulletinPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
message_text="My Bulletin Message",
message_text='My Bulletin Message',
bid=0,
)
expected = (
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}"
f'{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}'
)
self.assertEqual(expected, str(packet))
# bulletin id = 1
bid = 1
txt = "((((((( CX2SA - Salto Uruguay ))))))) http://www.cx2sa.org"
txt = '((((((( CX2SA - Salto Uruguay ))))))) http://www.cx2sa.org'
packet = packets.BulletinPacket(
from_call=fake.FAKE_FROM_CALLSIGN,
message_text=txt,
bid=1,
)
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{txt}"
expected = f'{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{txt}'
self.assertEqual(expected, str(packet))
def test_message_format(self):
"""Test the message packet format."""
message = "My Message"
msgno = "ABX"
message = 'My Message'
msgno = 'ABX'
packet = packets.MessagePacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
@ -275,19 +275,19 @@ class TestPacketBase(unittest.TestCase):
msgNo=msgno,
)
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:{message}{{{msgno}"
expected = f'{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:{message}{{{msgno}'
self.assertEqual(expected, str(packet))
# test with bad words
# Currently fails with mixed case
message = "My cunt piss fuck shIt text"
exp_msg = "My **** **** **** **** text"
msgno = "ABX"
message = 'My cunt piss fuck shIt text'
exp_msg = 'My **** **** **** **** text'
msgno = 'ABX'
packet = packets.MessagePacket(
from_call=fake.FAKE_FROM_CALLSIGN,
to_call=fake.FAKE_TO_CALLSIGN,
message_text=message,
msgNo=msgno,
)
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:{exp_msg}{{{msgno}"
expected = f'{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:{exp_msg}{{{msgno}'
self.assertEqual(expected, str(packet))

View File

@ -38,7 +38,6 @@ class TestPluginManager(unittest.TestCase):
def config_and_init(self):
CONF.callsign = self.fromcall
CONF.aprs_network.login = fake.FAKE_TO_CALLSIGN
CONF.aprs_fi.apiKey = 'something'
CONF.enabled_plugins = 'aprsd.plugins.ping.PingPlugin'
CONF.enable_save = False
@ -115,7 +114,6 @@ class TestPlugin(unittest.TestCase):
def config_and_init(self):
CONF.callsign = self.fromcall
CONF.aprs_network.login = fake.FAKE_TO_CALLSIGN
CONF.aprs_fi.apiKey = 'something'
CONF.enabled_plugins = 'aprsd.plugins.ping.PingPlugin'
CONF.enable_save = False

View File

@ -154,21 +154,26 @@ class TestAPRSDRXThread(unittest.TestCase):
mock_list_instance.find.side_effect = KeyError('Not found')
mock_pkt_list.return_value = mock_list_instance
self.rx_thread.process_packet()
# Pass raw packet string as args[0]
self.rx_thread.process_packet(packet.raw)
self.assertEqual(self.rx_thread.pkt_count, 1)
self.assertFalse(self.packet_queue.empty())
# Verify the raw string is on the queue
queued_raw = self.packet_queue.get()
self.assertEqual(queued_raw, packet.raw)
def test_process_packet_no_packet(self):
"""Test process_packet() when decode returns None."""
"""Test process_packet() when no frame is received."""
mock_client = MockClientDriver()
mock_client._decode_packet_return = None
self.rx_thread._client = mock_client
self.rx_thread.pkt_count = 0
with mock.patch('aprsd.threads.rx.LOG') as mock_log:
# Call without args to trigger warning
self.rx_thread.process_packet()
mock_log.error.assert_called()
mock_log.warning.assert_called()
self.assertEqual(self.rx_thread.pkt_count, 0)
def test_process_packet_ack_packet(self):
@ -180,38 +185,39 @@ class TestAPRSDRXThread(unittest.TestCase):
self.rx_thread.pkt_count = 0
with mock.patch('aprsd.threads.rx.packet_log'):
self.rx_thread.process_packet()
# Pass raw packet string as args[0]
self.rx_thread.process_packet(packet.raw)
self.assertEqual(self.rx_thread.pkt_count, 1)
self.assertFalse(self.packet_queue.empty())
# Verify the raw string is on the queue
queued_raw = self.packet_queue.get()
self.assertEqual(queued_raw, packet.raw)
def test_process_packet_duplicate(self):
"""Test process_packet() with duplicate packet."""
from oslo_config import cfg
CONF = cfg.CONF
CONF.packet_dupe_timeout = 60
"""Test process_packet() with duplicate packet.
Note: The rx thread's process_packet() doesn't filter duplicates.
It puts all packets on the queue. Duplicate filtering happens
later in the filter thread.
"""
mock_client = MockClientDriver()
packet = fake.fake_packet(msg_number='123')
packet.processed = True
packet.timestamp = 1000
mock_client._decode_packet_return = packet
self.rx_thread._client = mock_client
self.rx_thread.pkt_count = 0
with mock.patch('aprsd.threads.rx.packet_log'):
with mock.patch('aprsd.threads.rx.packets.PacketList') as mock_pkt_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within timeout
mock_list_instance.find.return_value = found_packet
mock_pkt_list.return_value = mock_list_instance
with mock.patch('aprsd.threads.rx.LOG') as mock_log:
self.rx_thread.process_packet()
mock_log.warning.assert_called()
# Should not add to queue
self.assertTrue(self.packet_queue.empty())
# Pass raw packet string as args[0]
self.rx_thread.process_packet(packet.raw)
# The rx thread puts all packets on the queue regardless of duplicates
# Duplicate filtering happens in the filter thread
self.assertFalse(self.packet_queue.empty())
queued_raw = self.packet_queue.get()
# Verify the raw string is on the queue
self.assertEqual(queued_raw, packet.raw)
class TestAPRSDFilterThread(unittest.TestCase):
@ -266,10 +272,11 @@ class TestAPRSDFilterThread(unittest.TestCase):
def test_print_packet(self):
"""Test print_packet() method."""
packet = fake.fake_packet()
self.filter_thread.packet_count = 5 # Set a packet count
with mock.patch('aprsd.threads.rx.packet_log') as mock_log:
self.filter_thread.print_packet(packet)
mock_log.log.assert_called_with(packet)
mock_log.log.assert_called_with(packet, packet_count=5)
def test_loop_with_packet(self):
"""Test loop() with packet in queue."""

149
tests/threads/test_stats.py Normal file
View File

@ -0,0 +1,149 @@
import unittest
from unittest import mock
from aprsd.stats import collector
from aprsd.threads.stats import APRSDStatsStoreThread, StatsStore
class TestStatsStore(unittest.TestCase):
"""Unit tests for the StatsStore class."""
def test_init(self):
"""Test StatsStore initialization."""
ss = StatsStore()
self.assertIsNotNone(ss.lock)
self.assertFalse(hasattr(ss, 'data'))
def test_add(self):
"""Test add method."""
ss = StatsStore()
test_data = {'test': 'data'}
ss.add(test_data)
self.assertEqual(ss.data, test_data)
def test_add_concurrent(self):
"""Test add method with concurrent access."""
import threading
ss = StatsStore()
test_data = {'test': 'data'}
results = []
def add_data():
ss.add(test_data)
results.append(ss.data)
# Create multiple threads to test thread safety
threads = []
for _ in range(5):
t = threading.Thread(target=add_data)
threads.append(t)
t.start()
for t in threads:
t.join()
# All threads should have added the data
for result in results:
self.assertEqual(result, test_data)
class TestAPRSDStatsStoreThread(unittest.TestCase):
"""Unit tests for the APRSDStatsStoreThread class."""
def setUp(self):
"""Set up test fixtures."""
# Reset singleton instance
collector.Collector._instance = None
# Clear producers to start fresh
c = collector.Collector()
c.producers = []
def tearDown(self):
"""Clean up after tests."""
collector.Collector._instance = None
def test_init(self):
"""Test APRSDStatsStoreThread initialization."""
thread = APRSDStatsStoreThread()
self.assertEqual(thread.name, 'StatsStore')
self.assertEqual(thread.save_interval, 10)
self.assertTrue(hasattr(thread, 'loop_count'))
def test_loop_with_save(self):
"""Test loop method when save interval is reached."""
thread = APRSDStatsStoreThread()
# Mock the collector and save methods
with (
mock.patch('aprsd.stats.collector.Collector') as mock_collector_class,
mock.patch('aprsd.utils.objectstore.ObjectStoreMixin.save') as mock_save,
):
# Setup mock collector to return some stats
mock_collector_instance = mock.Mock()
mock_collector_instance.collect.return_value = {'test': 'data'}
mock_collector_class.return_value = mock_collector_instance
# Set loop_count to match save interval
thread.loop_count = 10
# Call loop
result = thread.loop()
# Should return True (continue looping)
self.assertTrue(result)
# Should have called collect and save
mock_collector_instance.collect.assert_called_once()
mock_save.assert_called_once()
def test_loop_without_save(self):
"""Test loop method when save interval is not reached."""
thread = APRSDStatsStoreThread()
# Mock the collector and save methods
with (
mock.patch('aprsd.stats.collector.Collector') as mock_collector_class,
mock.patch('aprsd.utils.objectstore.ObjectStoreMixin.save') as mock_save,
):
# Setup mock collector to return some stats
mock_collector_instance = mock.Mock()
mock_collector_instance.collect.return_value = {'test': 'data'}
mock_collector_class.return_value = mock_collector_instance
# Set loop_count to not match save interval
thread.loop_count = 1
# Call loop
result = thread.loop()
# Should return True (continue looping)
self.assertTrue(result)
# Should not have called save
mock_save.assert_not_called()
def test_loop_with_exception(self):
"""Test loop method when an exception occurs."""
thread = APRSDStatsStoreThread()
# Mock the collector to raise an exception
with mock.patch('aprsd.stats.collector.Collector') as mock_collector_class:
mock_collector_instance = mock.Mock()
mock_collector_instance.collect.side_effect = RuntimeError('Test exception')
mock_collector_class.return_value = mock_collector_instance
# Set loop_count to match save interval
thread.loop_count = 10
# Should raise the exception
with self.assertRaises(RuntimeError):
thread.loop()
# Removed test_loop_count_increment as it's not meaningful to test in isolation
# since the increment happens in the parent run() method, not in loop()
if __name__ == '__main__':
unittest.main()

View File

@ -15,10 +15,24 @@ class TestSendFunctions(unittest.TestCase):
"""Set up test fixtures."""
# Reset singleton instances
tracker.PacketTrack._instance = None
# Reset scheduler instances
tx._packet_scheduler = None
tx._ack_scheduler = None
def tearDown(self):
"""Clean up after tests."""
tracker.PacketTrack._instance = None
# Clean up schedulers
if tx._packet_scheduler:
tx._packet_scheduler.stop()
if tx._packet_scheduler.is_alive():
tx._packet_scheduler.join(timeout=1)
if tx._ack_scheduler:
tx._ack_scheduler.stop()
if tx._ack_scheduler.is_alive():
tx._ack_scheduler.join(timeout=1)
tx._packet_scheduler = None
tx._ack_scheduler = None
@mock.patch('aprsd.threads.tx.collector.PacketCollector')
@mock.patch('aprsd.threads.tx._send_packet')
@ -66,10 +80,28 @@ class TestSendFunctions(unittest.TestCase):
mock_log.info.assert_called()
mock_send_ack.assert_not_called()
@mock.patch('aprsd.threads.tx.SendPacketThread')
def test_send_packet_threaded(self, mock_thread_class):
"""Test _send_packet() with threading."""
@mock.patch('aprsd.threads.tx._get_packet_scheduler')
def test_send_packet_threaded(self, mock_get_scheduler):
"""Test _send_packet() uses scheduler."""
packet = fake.fake_packet()
mock_scheduler = mock.MagicMock()
mock_scheduler.is_alive.return_value = True
mock_get_scheduler.return_value = mock_scheduler
tx._send_packet(packet, direct=False)
mock_get_scheduler.assert_called()
# Scheduler should be alive and will handle the packet
self.assertTrue(mock_scheduler.is_alive())
@mock.patch('aprsd.threads.tx.SendPacketThread')
@mock.patch('aprsd.threads.tx._get_packet_scheduler')
def test_send_packet_fallback(self, mock_get_scheduler, mock_thread_class):
"""Test _send_packet() falls back to old method if scheduler not available."""
packet = fake.fake_packet()
mock_scheduler = mock.MagicMock()
mock_scheduler.is_alive.return_value = False
mock_get_scheduler.return_value = mock_scheduler
mock_thread = mock.MagicMock()
mock_thread_class.return_value = mock_thread
@ -85,10 +117,28 @@ class TestSendFunctions(unittest.TestCase):
tx._send_packet(packet, direct=True)
mock_send_direct.assert_called_with(packet, aprs_client=None)
@mock.patch('aprsd.threads.tx.SendAckThread')
def test_send_ack_threaded(self, mock_thread_class):
"""Test _send_ack() with threading."""
@mock.patch('aprsd.threads.tx._get_ack_scheduler')
def test_send_ack_threaded(self, mock_get_scheduler):
"""Test _send_ack() uses scheduler."""
packet = fake.fake_ack_packet()
mock_scheduler = mock.MagicMock()
mock_scheduler.is_alive.return_value = True
mock_get_scheduler.return_value = mock_scheduler
tx._send_ack(packet, direct=False)
mock_get_scheduler.assert_called()
# Scheduler should be alive and will handle the packet
self.assertTrue(mock_scheduler.is_alive())
@mock.patch('aprsd.threads.tx.SendAckThread')
@mock.patch('aprsd.threads.tx._get_ack_scheduler')
def test_send_ack_fallback(self, mock_get_scheduler, mock_thread_class):
"""Test _send_ack() falls back to old method if scheduler not available."""
packet = fake.fake_ack_packet()
mock_scheduler = mock.MagicMock()
mock_scheduler.is_alive.return_value = False
mock_get_scheduler.return_value = mock_scheduler
mock_thread = mock.MagicMock()
mock_thread_class.return_value = mock_thread
@ -146,6 +196,397 @@ class TestSendFunctions(unittest.TestCase):
self.assertFalse(result)
mock_log_error.error.assert_called()
@mock.patch('aprsd.threads.tx.PacketSendSchedulerThread')
def test_get_packet_scheduler_creates_new(self, mock_scheduler_class):
"""Test _get_packet_scheduler() creates new scheduler if none exists."""
tx._packet_scheduler = None
mock_scheduler = mock.MagicMock()
mock_scheduler_class.return_value = mock_scheduler
result = tx._get_packet_scheduler()
mock_scheduler_class.assert_called_once()
mock_scheduler.start.assert_called_once()
self.assertEqual(result, mock_scheduler)
@mock.patch('aprsd.threads.tx.PacketSendSchedulerThread')
def test_get_packet_scheduler_reuses_existing(self, mock_scheduler_class):
"""Test _get_packet_scheduler() reuses existing scheduler if alive."""
existing_scheduler = mock.MagicMock()
existing_scheduler.is_alive.return_value = True
tx._packet_scheduler = existing_scheduler
result = tx._get_packet_scheduler()
mock_scheduler_class.assert_not_called()
self.assertEqual(result, existing_scheduler)
@mock.patch('aprsd.threads.tx.PacketSendSchedulerThread')
def test_get_packet_scheduler_recreates_if_dead(self, mock_scheduler_class):
"""Test _get_packet_scheduler() recreates scheduler if dead."""
dead_scheduler = mock.MagicMock()
dead_scheduler.is_alive.return_value = False
tx._packet_scheduler = dead_scheduler
new_scheduler = mock.MagicMock()
mock_scheduler_class.return_value = new_scheduler
result = tx._get_packet_scheduler()
mock_scheduler_class.assert_called_once()
new_scheduler.start.assert_called_once()
self.assertEqual(result, new_scheduler)
@mock.patch('aprsd.threads.tx.AckSendSchedulerThread')
def test_get_ack_scheduler_creates_new(self, mock_scheduler_class):
"""Test _get_ack_scheduler() creates new scheduler if none exists."""
tx._ack_scheduler = None
mock_scheduler = mock.MagicMock()
mock_scheduler_class.return_value = mock_scheduler
result = tx._get_ack_scheduler()
mock_scheduler_class.assert_called_once()
mock_scheduler.start.assert_called_once()
self.assertEqual(result, mock_scheduler)
class TestPacketWorkers(unittest.TestCase):
"""Unit tests for worker functions used by threadpool."""
def setUp(self):
"""Set up test fixtures."""
tracker.PacketTrack._instance = None
def tearDown(self):
"""Clean up after tests."""
tracker.PacketTrack._instance = None
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_send_packet_worker_packet_acked(self, mock_tracker_class):
"""Test _send_packet_worker() when packet is acked."""
mock_tracker = mock.MagicMock()
mock_tracker.get.return_value = None # Packet removed = acked
mock_tracker_class.return_value = mock_tracker
result = tx._send_packet_worker('123')
self.assertFalse(result)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_send_packet_worker_max_retries(self, mock_tracker_class):
"""Test _send_packet_worker() when max retries reached."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_packet(msg_number='123')
tracked_packet.send_count = 3
tracked_packet.retry_count = 3
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
with mock.patch('aprsd.threads.tx.LOG') as mock_log:
result = tx._send_packet_worker('123')
self.assertFalse(result)
mock_log.info.assert_called()
mock_tracker.remove.assert_called()
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
@mock.patch('aprsd.threads.tx._send_direct')
def test_send_packet_worker_send_now(self, mock_send_direct, mock_tracker_class):
"""Test _send_packet_worker() when it's time to send."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_packet(msg_number='123')
tracked_packet.send_count = 0
tracked_packet.retry_count = 3
tracked_packet.last_send_time = None
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
mock_send_direct.return_value = True
result = tx._send_packet_worker('123')
self.assertTrue(result)
mock_send_direct.assert_called()
self.assertEqual(tracked_packet.send_count, 1)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
@mock.patch('aprsd.threads.tx._send_direct')
def test_send_packet_worker_send_failed(self, mock_send_direct, mock_tracker_class):
"""Test _send_packet_worker() when send fails."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_packet(msg_number='123')
tracked_packet.send_count = 0
tracked_packet.retry_count = 3
tracked_packet.last_send_time = None
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
mock_send_direct.return_value = False
result = tx._send_packet_worker('123')
self.assertTrue(result)
self.assertEqual(
tracked_packet.send_count, 0
) # Should not increment on failure
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_send_ack_worker_packet_removed(self, mock_tracker_class):
"""Test _send_ack_worker() when packet is removed."""
mock_tracker = mock.MagicMock()
mock_tracker.get.return_value = None
mock_tracker_class.return_value = mock_tracker
result = tx._send_ack_worker('123', 3)
self.assertFalse(result)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_send_ack_worker_max_retries(self, mock_tracker_class):
"""Test _send_ack_worker() when max retries reached."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_ack_packet()
tracked_packet.send_count = 3
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
with mock.patch('aprsd.threads.tx.LOG') as mock_log:
result = tx._send_ack_worker('123', 3)
self.assertFalse(result)
mock_log.debug.assert_called()
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
@mock.patch('aprsd.threads.tx._send_direct')
def test_send_ack_worker_send_now(self, mock_send_direct, mock_tracker_class):
"""Test _send_ack_worker() when it's time to send."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_ack_packet()
tracked_packet.send_count = 0
tracked_packet.last_send_time = None
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
mock_send_direct.return_value = True
result = tx._send_ack_worker('123', 3)
self.assertTrue(result)
mock_send_direct.assert_called()
self.assertEqual(tracked_packet.send_count, 1)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
@mock.patch('aprsd.threads.tx._send_direct')
def test_send_ack_worker_waiting(self, mock_send_direct, mock_tracker_class):
"""Test _send_ack_worker() when waiting for next send."""
mock_tracker = mock.MagicMock()
tracked_packet = fake.fake_ack_packet()
tracked_packet.send_count = 0
tracked_packet.last_send_time = int(time.time()) - 10 # Too soon
mock_tracker.get.return_value = tracked_packet
mock_tracker_class.return_value = mock_tracker
mock_send_direct.return_value = True
result = tx._send_ack_worker('123', 3)
self.assertTrue(result)
mock_send_direct.assert_not_called()
class TestPacketSendSchedulerThread(unittest.TestCase):
"""Unit tests for PacketSendSchedulerThread class."""
def setUp(self):
"""Set up test fixtures."""
tracker.PacketTrack._instance = None
self.scheduler = tx.PacketSendSchedulerThread(max_workers=2)
def tearDown(self):
"""Clean up after tests."""
self.scheduler.stop()
if self.scheduler.is_alive():
self.scheduler.join(timeout=1)
self.scheduler.executor.shutdown(wait=False)
tracker.PacketTrack._instance = None
def test_init(self):
"""Test initialization."""
self.assertEqual(self.scheduler.name, 'PacketSendSchedulerThread')
self.assertEqual(self.scheduler.max_workers, 2)
self.assertIsNotNone(self.scheduler.executor)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_submits_tasks(self, mock_tracker_class):
"""Test loop() submits tasks to threadpool."""
mock_tracker = mock.MagicMock()
packet1 = fake.fake_packet(msg_number='123')
packet1.send_count = 0
packet1.retry_count = 3
packet2 = fake.fake_packet(msg_number='456')
packet2.send_count = 0
packet2.retry_count = 3
mock_tracker.keys.return_value = ['123', '456']
mock_tracker.get.side_effect = lambda x: packet1 if x == '123' else packet2
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should submit tasks for both packets
self.assertEqual(mock_submit.call_count, 2)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_skips_acked_packets(self, mock_tracker_class):
"""Test loop() skips packets that are acked."""
mock_tracker = mock.MagicMock()
mock_tracker.keys.return_value = ['123']
mock_tracker.get.return_value = None # Packet acked
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should not submit task for acked packet
mock_submit.assert_not_called()
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_skips_ack_packets(self, mock_tracker_class):
"""Test loop() skips AckPackets."""
mock_tracker = mock.MagicMock()
ack_packet = fake.fake_ack_packet()
mock_tracker.keys.return_value = ['123']
mock_tracker.get.return_value = ack_packet
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should not submit task for ack packet
mock_submit.assert_not_called()
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_skips_max_retries(self, mock_tracker_class):
"""Test loop() skips packets at max retries."""
mock_tracker = mock.MagicMock()
packet = fake.fake_packet(msg_number='123')
packet.send_count = 3
packet.retry_count = 3
mock_tracker.keys.return_value = ['123']
mock_tracker.get.return_value = packet
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should not submit task for packet at max retries
mock_submit.assert_not_called()
def test_cleanup(self):
"""Test _cleanup() shuts down executor."""
with mock.patch.object(self.scheduler.executor, 'shutdown') as mock_shutdown:
with mock.patch('aprsd.threads.tx.LOG') as mock_log:
self.scheduler._cleanup()
mock_shutdown.assert_called_once_with(wait=True)
mock_log.debug.assert_called()
class TestAckSendSchedulerThread(unittest.TestCase):
"""Unit tests for AckSendSchedulerThread class."""
def setUp(self):
"""Set up test fixtures."""
from oslo_config import cfg
CONF = cfg.CONF
CONF.default_ack_send_count = 3
tracker.PacketTrack._instance = None
self.scheduler = tx.AckSendSchedulerThread(max_workers=2)
def tearDown(self):
"""Clean up after tests."""
self.scheduler.stop()
if self.scheduler.is_alive():
self.scheduler.join(timeout=1)
self.scheduler.executor.shutdown(wait=False)
tracker.PacketTrack._instance = None
def test_init(self):
"""Test initialization."""
self.assertEqual(self.scheduler.name, 'AckSendSchedulerThread')
self.assertEqual(self.scheduler.max_workers, 2)
self.assertEqual(self.scheduler.max_retries, 3)
self.assertIsNotNone(self.scheduler.executor)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_submits_tasks(self, mock_tracker_class):
"""Test loop() submits tasks to threadpool."""
mock_tracker = mock.MagicMock()
ack_packet1 = fake.fake_ack_packet()
ack_packet1.send_count = 0
ack_packet2 = fake.fake_ack_packet()
ack_packet2.send_count = 0
mock_tracker.keys.return_value = ['123', '456']
mock_tracker.get.side_effect = (
lambda x: ack_packet1 if x == '123' else ack_packet2
)
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should submit tasks for both ack packets
self.assertEqual(mock_submit.call_count, 2)
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_skips_non_ack_packets(self, mock_tracker_class):
"""Test loop() skips non-AckPackets."""
mock_tracker = mock.MagicMock()
regular_packet = fake.fake_packet()
mock_tracker.keys.return_value = ['123']
mock_tracker.get.return_value = regular_packet
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should not submit task for non-ack packet
mock_submit.assert_not_called()
@mock.patch('aprsd.threads.tx.tracker.PacketTrack')
def test_loop_skips_max_retries(self, mock_tracker_class):
"""Test loop() skips acks at max retries."""
mock_tracker = mock.MagicMock()
ack_packet = fake.fake_ack_packet()
ack_packet.send_count = 3
mock_tracker.keys.return_value = ['123']
mock_tracker.get.return_value = ack_packet
mock_tracker_class.return_value = mock_tracker
# Mock the executor's submit method
with mock.patch.object(self.scheduler.executor, 'submit') as mock_submit:
result = self.scheduler.loop()
self.assertTrue(result)
# Should not submit task for ack at max retries
mock_submit.assert_not_called()
def test_cleanup(self):
"""Test _cleanup() shuts down executor."""
with mock.patch.object(self.scheduler.executor, 'shutdown') as mock_shutdown:
with mock.patch('aprsd.threads.tx.LOG') as mock_log:
self.scheduler._cleanup()
mock_shutdown.assert_called_once_with(wait=True)
mock_log.debug.assert_called()
class TestSendPacketThread(unittest.TestCase):
"""Unit tests for the SendPacketThread class."""

View File

@ -2,6 +2,7 @@ import os
import pickle
import shutil
import tempfile
import threading
import unittest
from unittest import mock
@ -17,6 +18,7 @@ class TestObjectStore(objectstore.ObjectStoreMixin):
def __init__(self):
super().__init__()
self.lock = threading.RLock()
self.data = {}

View File

@ -0,0 +1,172 @@
import unittest
from aprsd.utils.ring_buffer import RingBuffer
class TestRingBufferAdditional(unittest.TestCase):
"""Additional unit tests for the RingBuffer class to cover edge cases."""
def test_empty_buffer(self):
"""Test behavior with empty buffer."""
rb = RingBuffer(5)
self.assertEqual(len(rb), 0)
self.assertEqual(rb.get(), [])
def test_buffer_with_zero_size(self):
"""Test buffer with zero size."""
rb = RingBuffer(0)
# Should not crash, but behavior might be different
# In this implementation, it will behave like a normal list
rb.append(1)
self.assertEqual(len(rb), 1)
self.assertEqual(rb.get(), [1])
def test_buffer_with_negative_size(self):
"""Test buffer with negative size."""
# This might not be a valid use case, but let's test it
rb = RingBuffer(-1)
rb.append(1)
self.assertEqual(len(rb), 1)
self.assertEqual(rb.get(), [1])
def test_append_none_value(self):
"""Test appending None values."""
rb = RingBuffer(3)
rb.append(None)
rb.append(1)
rb.append(2)
result = rb.get()
self.assertEqual(len(result), 3)
self.assertIsNone(result[0])
self.assertEqual(result[1], 1)
self.assertEqual(result[2], 2)
def test_append_multiple_types(self):
"""Test appending multiple different types of values."""
rb = RingBuffer(4)
rb.append('string')
rb.append(42)
rb.append([1, 2, 3])
rb.append({'key': 'value'})
result = rb.get()
self.assertEqual(len(result), 4)
self.assertEqual(result[0], 'string')
self.assertEqual(result[1], 42)
self.assertEqual(result[2], [1, 2, 3])
self.assertEqual(result[3], {'key': 'value'})
def test_multiple_appends_then_get(self):
"""Test multiple appends followed by get operations."""
rb = RingBuffer(5)
# Append multiple items
for i in range(10):
rb.append(i)
# Get should return the last 5 items
result = rb.get()
self.assertEqual(len(result), 5)
self.assertEqual(result, [5, 6, 7, 8, 9])
def test_get_returns_copy(self):
"""Test that get() returns a copy, not a reference."""
rb = RingBuffer(3)
rb.append(1)
rb.append(2)
rb.append(3)
result = rb.get()
# Modify the returned list
result.append(4)
# Original buffer should not be affected
original = rb.get()
self.assertEqual(len(original), 3)
self.assertNotIn(4, original)
def test_buffer_size_one(self):
"""Test buffer with size 1."""
rb = RingBuffer(1)
rb.append(1)
self.assertEqual(len(rb), 1)
self.assertEqual(rb.get(), [1])
rb.append(2)
self.assertEqual(len(rb), 1)
result = rb.get()
self.assertEqual(len(result), 1)
self.assertEqual(result[0], 2)
def test_buffer_size_two(self):
"""Test buffer with size 2."""
rb = RingBuffer(2)
rb.append(1)
rb.append(2)
self.assertEqual(len(rb), 2)
self.assertEqual(rb.get(), [1, 2])
rb.append(3)
self.assertEqual(len(rb), 2)
result = rb.get()
self.assertEqual(len(result), 2)
self.assertEqual(result[0], 2)
self.assertEqual(result[1], 3)
def test_large_buffer_size(self):
"""Test with a large buffer size."""
rb = RingBuffer(1000)
for i in range(1000):
rb.append(i)
result = rb.get()
self.assertEqual(len(result), 1000)
self.assertEqual(result[0], 0)
self.assertEqual(result[-1], 999)
def test_buffer_with_many_wraparounds(self):
"""Test buffer with many wraparounds."""
rb = RingBuffer(3)
# Fill and wrap multiple times
for i in range(100):
rb.append(i)
result = rb.get()
self.assertEqual(len(result), 3)
# Should contain the last 3 elements
self.assertEqual(result[0], 97)
self.assertEqual(result[1], 98)
self.assertEqual(result[2], 99)
def test_multiple_get_calls(self):
"""Test multiple get() calls return consistent results."""
rb = RingBuffer(3)
rb.append(1)
rb.append(2)
rb.append(3)
result1 = rb.get()
result2 = rb.get()
result3 = rb.get()
self.assertEqual(result1, result2)
self.assertEqual(result2, result3)
self.assertEqual(result1, [1, 2, 3])
def test_get_order_consistency(self):
"""Test that get() maintains order consistency."""
rb = RingBuffer(5)
# Add elements
elements = [1, 2, 3, 4, 5, 6, 7]
for elem in elements:
rb.append(elem)
result = rb.get()
# Should contain the last 5 elements in correct order
self.assertEqual(len(result), 5)
self.assertEqual(result, [3, 4, 5, 6, 7])
if __name__ == '__main__':
unittest.main()

83
tox.ini
View File

@ -1,11 +1,9 @@
[tox]
minversion = 2.9.0
minversion = 4.30.0
skipdist = True
skip_missing_interpreters = true
envlist = pep8,py{310,311}
#requires = tox-pipenv
# pip==22.0.4
# pip-tools==5.4.0
envlist = lint,py{311,312,313,314}
requires = tox-uv
# Activate isolated build environment. tox will use a virtual environment
# to build a source distribution from the source tree. For build tools and
@ -18,14 +16,12 @@ setenv =
_PYTEST_SETUP_SKIP_APRSD_DEP=1
PYTHONDONTWRITEBYTECODE=1
PYTHONUNBUFFERED=1
usedevelop = True
install_command = pip install {opts} {packages}
extras = tests
package = editable
deps =
pytest-cov
pytest
pytest-cov
commands =
pytest -s -v --cov-report term-missing --cov=aprsd {posargs}
pytest -v --cov-report term-missing --cov=aprsd tests {posargs}
coverage: coverage report -m
coverage: coverage xml
@ -45,53 +41,38 @@ commands =
#sphinx-build -a -W . _build
sphinx-build -M html source build
[testenv:pep8]
deps =
flake8
commands =
flake8 {posargs} aprsd tests
[testenv:fast8]
basepython = python3
# Use same environment directory as pep8 env to save space and install time
envdir = {toxworkdir}/pep8
commands =
{toxinidir}/tools/fast8.sh
passenv = FAST8_NUM_COMMITS
[testenv:lint]
skip_install = true
deps =
ruff
ruff
commands =
ruff check aprsd tests
ruff check aprsd tests {posargs}
ruff format --check aprsd tests
[flake8]
max-line-length = 99
show-source = True
ignore = E713,E501,W503,N818
extend-ignore = E203,W503
extend-exclude = venv
exclude = .venv,.git,.tox,dist,doc,.ropeproject
[testenv:fast8]
basepython = python3
# Use same environment directory as lint env to save space and install time
envdir = {toxworkdir}/lint
commands =
{toxinidir}/tools/fast8.sh
passenv = FAST8_NUM_COMMITS
# This is the configuration for the tox-gh-actions plugin for GitHub Actions
# https://github.com/ymyzk/tox-gh-actions
# This section is not needed if not using GitHub Actions for CI.
[gh-actions]
python =
3.9: py39, pep8, type-check, docs
3.10: py39, pep8, type-check, docs
3.11: py311, pep8, type-check, docs
3.10: py39, lint, type-check, docs
3.11: py311, lint, type-check, docs
[testenv:fmt]
# This will reformat your code to comply with pep8
# and standard formatting
# This will reformat your code using ruff
skip_install = true
deps =
ruff
commands =
ruff format aprsd tests
ruff check --fix aprsd tests
[testenv:type-check]
skip_install = true
@ -108,3 +89,27 @@ skip_install = true
basepython = python3
deps = pre-commit
commands = pre-commit run --all-files --show-diff-on-failure
[testenv:fix]
description = run code formatter and linter (auto-fix)
skip_install = true
deps =
pre-commit-uv>=4.1.1
commands =
pre-commit run --all-files --show-diff-on-failure
[testenv:type]
runner = uv-venv-lock-runner
description = run type checker via mypy
commands =
mypy {posargs:aprsd}
[testenv:dev]
runner = uv-venv-lock-runner
description = dev environment
extras =
dev
tests
type
commands =
uv pip tree

6
uv.lock generated
View File

@ -38,6 +38,7 @@ dependencies = [
{ name = "rfc3986" },
{ name = "rich" },
{ name = "rush" },
{ name = "setuptools" },
{ name = "stevedore" },
{ name = "thesmuggler" },
{ name = "timeago" },
@ -63,6 +64,7 @@ dev = [
{ name = "identify" },
{ name = "nodeenv" },
{ name = "packaging" },
{ name = "pip" },
{ name = "pip-tools" },
{ name = "platformdirs" },
{ name = "pluggy" },
@ -70,6 +72,7 @@ dev = [
{ name = "pyproject-api" },
{ name = "pyproject-hooks" },
{ name = "pyyaml" },
{ name = "setuptools" },
{ name = "tomli" },
{ name = "tox" },
{ name = "typing-extensions" },
@ -112,6 +115,7 @@ requires-dist = [
{ name = "packaging", specifier = "==25.0" },
{ name = "packaging", marker = "extra == 'dev'", specifier = "==25.0" },
{ name = "pbr", specifier = "==7.0.3" },
{ name = "pip", marker = "extra == 'dev'", specifier = "==25.3" },
{ name = "pip-tools", marker = "extra == 'dev'", specifier = "==7.5.2" },
{ name = "platformdirs", marker = "extra == 'dev'", specifier = "==4.5.1" },
{ name = "pluggy", specifier = "==1.6.0" },
@ -129,6 +133,8 @@ requires-dist = [
{ name = "rfc3986", specifier = "==2.0.0" },
{ name = "rich", specifier = "==14.2.0" },
{ name = "rush", specifier = "==2021.4.0" },
{ name = "setuptools", specifier = "==80.9.0" },
{ name = "setuptools", marker = "extra == 'dev'", specifier = "==80.9.0" },
{ name = "stevedore", specifier = "==5.6.0" },
{ name = "thesmuggler", specifier = "==1.0.1" },
{ name = "timeago", specifier = "==1.0.16" },