mirror of
https://github.com/craigerl/aprsd.git
synced 2025-04-19 09:49:01 -04:00
Changed to ruff
This patch changes to the ruff linter. SO MUCH quicker. Removed grey and mypy as well.
This commit is contained in:
parent
30d1eb57dd
commit
72d068c0b8
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
@ -10,13 +10,19 @@ repos:
|
||||
- id: check-case-conflict
|
||||
- id: check-docstring-first
|
||||
- id: check-builtin-literals
|
||||
- id: check-illegal-windows-names
|
||||
|
||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: setup-cfg-fmt
|
||||
|
||||
- repo: https://github.com/dizballanze/gray
|
||||
rev: v0.14.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.4
|
||||
hooks:
|
||||
- id: gray
|
||||
- id: ruff
|
||||
###### Relevant part below ######
|
||||
- id: ruff
|
||||
args: ["check", "--select", "I", "--fix"]
|
||||
###### Relevant part above ######
|
||||
- id: ruff-format
|
||||
|
@ -1,7 +1,7 @@
|
||||
from functools import update_wrapper
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
from oslo_config import cfg
|
||||
@ -11,7 +11,6 @@ from aprsd import conf # noqa: F401
|
||||
from aprsd.log import log
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
home = str(Path.home())
|
||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||
@ -58,6 +57,7 @@ class AliasedGroup(click.Group):
|
||||
calling into :meth:`add_command`.
|
||||
Copied from `click` and extended for `aliases`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
aliases = kwargs.pop("aliases", [])
|
||||
cmd = click.decorators.command(*args, **kwargs)(f)
|
||||
@ -65,6 +65,7 @@ class AliasedGroup(click.Group):
|
||||
for alias in aliases:
|
||||
self.add_command(cmd, name=alias)
|
||||
return cmd
|
||||
|
||||
return decorator
|
||||
|
||||
def group(self, *args, **kwargs):
|
||||
@ -74,6 +75,7 @@ class AliasedGroup(click.Group):
|
||||
calling into :meth:`add_command`.
|
||||
Copied from `click` and extended for `aliases`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
aliases = kwargs.pop("aliases", [])
|
||||
cmd = click.decorators.group(*args, **kwargs)(f)
|
||||
@ -81,6 +83,7 @@ class AliasedGroup(click.Group):
|
||||
for alias in aliases:
|
||||
self.add_command(cmd, name=alias)
|
||||
return cmd
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@ -89,6 +92,7 @@ def add_options(options):
|
||||
for option in reversed(options):
|
||||
func = option(func)
|
||||
return func
|
||||
|
||||
return _add_options
|
||||
|
||||
|
||||
@ -103,7 +107,9 @@ def process_standard_options(f: F) -> F:
|
||||
default_config_files = None
|
||||
try:
|
||||
CONF(
|
||||
[], project="aprsd", version=aprsd.__version__,
|
||||
[],
|
||||
project="aprsd",
|
||||
version=aprsd.__version__,
|
||||
default_config_files=default_config_files,
|
||||
)
|
||||
except cfg.ConfigFilesNotFoundError:
|
||||
@ -119,7 +125,7 @@ def process_standard_options(f: F) -> F:
|
||||
trace.setup_tracing(["method", "api"])
|
||||
|
||||
if not config_file_found:
|
||||
LOG = logging.getLogger("APRSD") # noqa: N806
|
||||
LOG = logging.getLogger("APRSD") # noqa: N806
|
||||
LOG.error("No config file found!! run 'aprsd sample-config'")
|
||||
|
||||
del kwargs["loglevel"]
|
||||
@ -132,6 +138,7 @@ def process_standard_options(f: F) -> F:
|
||||
|
||||
def process_standard_options_no_config(f: F) -> F:
|
||||
"""Use this as a decorator when config isn't needed."""
|
||||
|
||||
def new_func(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
ctx.ensure_object(dict)
|
||||
|
@ -2,24 +2,22 @@ import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
import timeago
|
||||
from aprslib.exceptions import LoginError
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
import timeago
|
||||
|
||||
from aprsd import client, exception
|
||||
from aprsd.client import base
|
||||
from aprsd.client.drivers import aprsis
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOGU = logger
|
||||
|
||||
|
||||
class APRSISClient(base.APRSClient):
|
||||
|
||||
_client = None
|
||||
_checks = False
|
||||
|
||||
@ -106,6 +104,7 @@ class APRSISClient(base.APRSClient):
|
||||
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
|
||||
return False
|
||||
return self._client.is_alive() and not self._is_stale_connection()
|
||||
|
||||
def close(self):
|
||||
if self._client:
|
||||
self._client.stop()
|
||||
@ -134,8 +133,12 @@ class APRSISClient(base.APRSClient):
|
||||
if retry_count >= retries:
|
||||
break
|
||||
try:
|
||||
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
|
||||
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
|
||||
LOG.info(
|
||||
f"Creating aprslib client({host}:{port}) and logging in {user}."
|
||||
)
|
||||
aprs_client = aprsis.Aprsdis(
|
||||
user, passwd=password, host=host, port=port
|
||||
)
|
||||
# Force the log to be the same
|
||||
aprs_client.logger = LOG
|
||||
aprs_client.connect()
|
||||
@ -166,8 +169,10 @@ class APRSISClient(base.APRSClient):
|
||||
if self._client:
|
||||
try:
|
||||
self._client.consumer(
|
||||
callback, blocking=blocking,
|
||||
immortal=immortal, raw=raw,
|
||||
callback,
|
||||
blocking=blocking,
|
||||
immortal=immortal,
|
||||
raw=raw,
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.error(e)
|
||||
|
@ -2,12 +2,11 @@ import abc
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.packets import core
|
||||
from aprsd.threads import keepalive_collector
|
||||
|
||||
from aprsd.utils import keepalive_collector
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
@ -4,17 +4,20 @@ import select
|
||||
import threading
|
||||
|
||||
import aprslib
|
||||
import wrapt
|
||||
from aprslib import is_py3
|
||||
from aprslib.exceptions import (
|
||||
ConnectionDrop, ConnectionError, GenericError, LoginError, ParseError,
|
||||
ConnectionDrop,
|
||||
ConnectionError,
|
||||
GenericError,
|
||||
LoginError,
|
||||
ParseError,
|
||||
UnknownFormat,
|
||||
)
|
||||
import wrapt
|
||||
|
||||
import aprsd
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
|
@ -3,20 +3,19 @@ import threading
|
||||
import time
|
||||
|
||||
import aprslib
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import conf # noqa
|
||||
from aprsd.packets import core
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
||||
'''Fake client for testing.'''
|
||||
"""Fake client for testing."""
|
||||
|
||||
# flag to tell us to stop
|
||||
thread_stop = False
|
||||
|
@ -4,13 +4,11 @@ from typing import Callable, Protocol, runtime_checkable
|
||||
from aprsd import exception
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class Client(Protocol):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
@ -7,13 +7,11 @@ from aprsd.client import base
|
||||
from aprsd.client.drivers import fake as fake_driver
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class APRSDFakeClient(base.APRSClient, metaclass=trace.TraceWrapperMetaclass):
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
return {
|
||||
"transport": "Fake",
|
||||
|
@ -2,23 +2,21 @@ import datetime
|
||||
import logging
|
||||
|
||||
import aprslib
|
||||
import timeago
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
import timeago
|
||||
|
||||
from aprsd import client, exception
|
||||
from aprsd.client import base
|
||||
from aprsd.client.drivers import kiss
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOGU = logger
|
||||
|
||||
|
||||
class KISSClient(base.APRSClient):
|
||||
|
||||
_client = None
|
||||
keepalive = datetime.datetime.now()
|
||||
|
||||
|
@ -1,18 +1,16 @@
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import client
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@singleton
|
||||
class APRSClientStats:
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
|
@ -3,12 +3,13 @@ import click.shell_completion
|
||||
|
||||
from aprsd.main import cli
|
||||
|
||||
|
||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("shell", type=click.Choice(list(click.shell_completion._available_shells)))
|
||||
@click.argument(
|
||||
"shell", type=click.Choice(list(click.shell_completion._available_shells))
|
||||
)
|
||||
def completion(shell):
|
||||
"""Show the shell completion code"""
|
||||
from click.utils import _detect_program_name
|
||||
@ -17,6 +18,8 @@ def completion(shell):
|
||||
prog_name = _detect_program_name()
|
||||
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
|
||||
print(cls(cli, {}, prog_name, complete_var).source())
|
||||
print("# Add the following line to your shell configuration file to have aprsd command line completion")
|
||||
print(
|
||||
"# Add the following line to your shell configuration file to have aprsd command line completion"
|
||||
)
|
||||
print("# but remove the leading '#' character.")
|
||||
print(f"# eval \"$(aprsd completion {shell})\"")
|
||||
print(f'# eval "$(aprsd completion {shell})"')
|
||||
|
@ -9,12 +9,12 @@ import click
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import cli_helper, conf, packets, plugin
|
||||
|
||||
# local imports here
|
||||
from aprsd.client import base
|
||||
from aprsd.main import cli
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||
@ -112,7 +112,8 @@ def test_plugin(
|
||||
# Register the plugin they wanted tested.
|
||||
LOG.info(
|
||||
"Testing plugin {} Version {}".format(
|
||||
obj.__class__, obj.version,
|
||||
obj.__class__,
|
||||
obj.version,
|
||||
),
|
||||
)
|
||||
pm.register_msg(obj)
|
||||
|
@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
import click
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
|
||||
@ -13,7 +13,6 @@ from aprsd import cli_helper
|
||||
from aprsd.main import cli
|
||||
from aprsd.threads.stats import StatsStore
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -23,12 +22,14 @@ CONF = cfg.CONF
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.option(
|
||||
"--host", type=str,
|
||||
"--host",
|
||||
type=str,
|
||||
default=None,
|
||||
help="IP address of the remote aprsd admin web ui fetch stats from.",
|
||||
)
|
||||
@click.option(
|
||||
"--port", type=int,
|
||||
"--port",
|
||||
type=int,
|
||||
default=None,
|
||||
help="Port of the remote aprsd web admin interface to fetch stats from.",
|
||||
)
|
||||
@ -169,8 +170,8 @@ def fetch_stats(ctx, host, port):
|
||||
"--show-section",
|
||||
default=["All"],
|
||||
help="Show specific sections of the stats. "
|
||||
" Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,"
|
||||
" PacketList, SeenList, WatchList",
|
||||
" Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,"
|
||||
" PacketList, SeenList, WatchList",
|
||||
multiple=True,
|
||||
type=click.Choice(
|
||||
[
|
||||
|
@ -13,13 +13,15 @@ from oslo_config import cfg
|
||||
from rich.console import Console
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper
|
||||
from aprsd import conf # noqa
|
||||
from aprsd import (
|
||||
cli_helper,
|
||||
conf, # noqa
|
||||
)
|
||||
|
||||
# local imports here
|
||||
from aprsd.main import cli
|
||||
from aprsd.threads import stats as stats_threads
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
CONF = cfg.CONF
|
||||
|
@ -9,9 +9,9 @@ import sys
|
||||
from traceback import print_tb
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import click
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
@ -22,7 +22,6 @@ from aprsd import plugin as aprsd_plugin
|
||||
from aprsd.main import cli
|
||||
from aprsd.plugins import fortune, notify, ping, time, version, weather
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
PYPI_URL = "https://pypi.org/search/"
|
||||
|
||||
@ -79,7 +78,8 @@ def get_module_info(package_name, module_name, module_path):
|
||||
obj_list.append(
|
||||
{
|
||||
"package": package_name,
|
||||
"name": mem_name, "obj": obj,
|
||||
"name": mem_name,
|
||||
"obj": obj,
|
||||
"version": obj.version,
|
||||
"path": f"{'.'.join([module_name, obj.__name__])}",
|
||||
},
|
||||
@ -99,7 +99,9 @@ def _get_installed_aprsd_items():
|
||||
module = importlib.import_module(name)
|
||||
pkgs = walk_package(module)
|
||||
for pkg in pkgs:
|
||||
pkg_info = get_module_info(module.__name__, pkg.name, module.__path__[0])
|
||||
pkg_info = get_module_info(
|
||||
module.__name__, pkg.name, module.__path__[0]
|
||||
)
|
||||
if "plugin" in name:
|
||||
plugins[name] = pkg_info
|
||||
elif "extension" in name:
|
||||
@ -193,10 +195,18 @@ def show_pypi_plugins(installed_plugins, console):
|
||||
table.add_column("Installed?", style="red", justify="center")
|
||||
for snippet in snippets:
|
||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
||||
package = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip()
|
||||
)
|
||||
version = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip()
|
||||
)
|
||||
created = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip()
|
||||
)
|
||||
description = re.sub(
|
||||
r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip()
|
||||
)
|
||||
emoji = ":open_file_folder:"
|
||||
|
||||
if "aprsd-" not in package or "-plugin" not in package:
|
||||
@ -210,7 +220,10 @@ def show_pypi_plugins(installed_plugins, console):
|
||||
|
||||
table.add_row(
|
||||
f"[link={link}]{emoji}[/link] {package}",
|
||||
description, version, created, installed,
|
||||
description,
|
||||
version,
|
||||
created,
|
||||
installed,
|
||||
)
|
||||
|
||||
console.print("\n")
|
||||
@ -234,10 +247,18 @@ def show_pypi_extensions(installed_extensions, console):
|
||||
table.add_column("Installed?", style="red", justify="center")
|
||||
for snippet in snippets:
|
||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
||||
package = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip()
|
||||
)
|
||||
version = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip()
|
||||
)
|
||||
created = re.sub(
|
||||
r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip()
|
||||
)
|
||||
description = re.sub(
|
||||
r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip()
|
||||
)
|
||||
emoji = ":open_file_folder:"
|
||||
|
||||
if "aprsd-" not in package or "-extension" not in package:
|
||||
@ -251,7 +272,10 @@ def show_pypi_extensions(installed_extensions, console):
|
||||
|
||||
table.add_row(
|
||||
f"[link={link}]{emoji}[/link] {package}",
|
||||
description, version, created, installed,
|
||||
description,
|
||||
version,
|
||||
created,
|
||||
installed,
|
||||
)
|
||||
|
||||
console.print("\n")
|
||||
|
@ -27,7 +27,6 @@ from aprsd.threads import keepalive, rx
|
||||
from aprsd.threads import stats as stats_thread
|
||||
from aprsd.threads.aprsd import APRSDThread
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -51,8 +50,12 @@ def signal_handler(sig, frame):
|
||||
|
||||
class APRSDListenThread(rx.APRSDRXThread):
|
||||
def __init__(
|
||||
self, packet_queue, packet_filter=None, plugin_manager=None,
|
||||
enabled_plugins=[], log_packets=False,
|
||||
self,
|
||||
packet_queue,
|
||||
packet_filter=None,
|
||||
plugin_manager=None,
|
||||
enabled_plugins=[],
|
||||
log_packets=False,
|
||||
):
|
||||
super().__init__(packet_queue)
|
||||
self.packet_filter = packet_filter
|
||||
@ -126,7 +129,7 @@ class ListenStatsThread(APRSDThread):
|
||||
thread_hex = f"fg {utils.hex_from_name(k)}"
|
||||
LOGU.opt(colors=True).info(
|
||||
f"<{thread_hex}>{k:<15}</{thread_hex}> "
|
||||
f"<blue>RX: {v["rx"]}</blue> <red>TX: {v["tx"]}</red>",
|
||||
f"<blue>RX: {v['rx']}</blue> <red>TX: {v['tx']}</red>",
|
||||
)
|
||||
|
||||
time.sleep(1)
|
||||
@ -265,7 +268,7 @@ def listen(
|
||||
LOG.debug(f"Filter by '{filter}'")
|
||||
aprs_client.set_filter(filter)
|
||||
|
||||
keepalive = keepalive.KeepAliveThread()
|
||||
keepalive_thread = keepalive.KeepAliveThread()
|
||||
|
||||
if not CONF.enable_seen_list:
|
||||
# just deregister the class from the packet collector
|
||||
@ -309,9 +312,9 @@ def listen(
|
||||
listen_stats = ListenStatsThread()
|
||||
listen_stats.start()
|
||||
|
||||
keepalive.start()
|
||||
keepalive_thread.start()
|
||||
LOG.debug("keepalive Join")
|
||||
keepalive.join()
|
||||
keepalive_thread.join()
|
||||
LOG.debug("listen_thread Join")
|
||||
listen_thread.join()
|
||||
stats.join()
|
||||
|
@ -6,20 +6,17 @@ import click
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper
|
||||
from aprsd import cli_helper, plugin, threads, utils
|
||||
from aprsd import main as aprsd_main
|
||||
from aprsd import plugin, threads, utils
|
||||
from aprsd.client import client_factory
|
||||
from aprsd.main import cli
|
||||
from aprsd.packets import collector as packet_collector
|
||||
from aprsd.packets import seen_list
|
||||
from aprsd.threads import aprsd as aprsd_threads
|
||||
from aprsd.threads import keepalive, registry, rx
|
||||
from aprsd.threads import keepalive, registry, rx, tx
|
||||
from aprsd.threads import stats as stats_thread
|
||||
from aprsd.threads import tx
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -32,6 +29,7 @@ class ServerThreads:
|
||||
the server command.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.threads: list[aprsd_threads.APRSDThread] = []
|
||||
|
||||
|
@ -4,7 +4,6 @@ The options for log setup
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
DEFAULT_LOGIN = "NOCALL"
|
||||
|
||||
aprs_group = cfg.OptGroup(
|
||||
@ -31,7 +30,7 @@ aprs_opts = [
|
||||
"enabled",
|
||||
default=True,
|
||||
help="Set enabled to False if there is no internet connectivity."
|
||||
"This is useful for a direwolf KISS aprs connection only.",
|
||||
"This is useful for a direwolf KISS aprs connection only.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"login",
|
||||
@ -42,8 +41,8 @@ aprs_opts = [
|
||||
"password",
|
||||
secret=True,
|
||||
help="APRS Password "
|
||||
"Get the passcode for your callsign here: "
|
||||
"https://apps.magicbug.co.uk/passcode",
|
||||
"Get the passcode for your callsign here: "
|
||||
"https://apps.magicbug.co.uk/passcode",
|
||||
),
|
||||
cfg.HostAddressOpt(
|
||||
"host",
|
||||
|
@ -2,7 +2,6 @@ from pathlib import Path
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
home = str(Path.home())
|
||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||
APRSD_DEFAULT_MAGIC_WORD = "CHANGEME!!!"
|
||||
@ -47,15 +46,15 @@ aprsd_opts = [
|
||||
"ack_rate_limit_period",
|
||||
default=1,
|
||||
help="The wait period in seconds per Ack packet being sent."
|
||||
"1 means 1 ack packet per second allowed."
|
||||
"2 means 1 pack packet every 2 seconds allowed",
|
||||
"1 means 1 ack packet per second allowed."
|
||||
"2 means 1 pack packet every 2 seconds allowed",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"msg_rate_limit_period",
|
||||
default=2,
|
||||
help="Wait period in seconds per non AckPacket being sent."
|
||||
"2 means 1 packet every 2 seconds allowed."
|
||||
"5 means 1 pack packet every 5 seconds allowed",
|
||||
"2 means 1 packet every 2 seconds allowed."
|
||||
"5 means 1 pack packet every 5 seconds allowed",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"packet_dupe_timeout",
|
||||
@ -66,7 +65,7 @@ aprsd_opts = [
|
||||
"enable_beacon",
|
||||
default=False,
|
||||
help="Enable sending of a GPS Beacon packet to locate this service. "
|
||||
"Requires latitude and longitude to be set.",
|
||||
"Requires latitude and longitude to be set.",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"beacon_interval",
|
||||
@ -93,8 +92,8 @@ aprsd_opts = [
|
||||
choices=["compact", "multiline", "both"],
|
||||
default="compact",
|
||||
help="When logging packets 'compact' will use a single line formatted for each packet."
|
||||
"'multiline' will use multiple lines for each packet and is the traditional format."
|
||||
"both will log both compact and multiline.",
|
||||
"'multiline' will use multiple lines for each packet and is the traditional format."
|
||||
"both will log both compact and multiline.",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"default_packet_send_count",
|
||||
@ -120,7 +119,7 @@ aprsd_opts = [
|
||||
"enable_seen_list",
|
||||
default=True,
|
||||
help="Enable the Callsign seen list tracking feature. This allows aprsd to keep track of "
|
||||
"callsigns that have been seen and when they were last seen.",
|
||||
"callsigns that have been seen and when they were last seen.",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"enable_packet_logging",
|
||||
@ -136,7 +135,7 @@ aprsd_opts = [
|
||||
"enable_sending_ack_packets",
|
||||
default=True,
|
||||
help="Set this to False, to disable sending of ack packets. This will entirely stop"
|
||||
"APRSD from sending ack packets.",
|
||||
"APRSD from sending ack packets.",
|
||||
),
|
||||
]
|
||||
|
||||
@ -145,8 +144,8 @@ watch_list_opts = [
|
||||
"enabled",
|
||||
default=False,
|
||||
help="Enable the watch list feature. Still have to enable "
|
||||
"the correct plugin. Built-in plugin to use is "
|
||||
"aprsd.plugins.notify.NotifyPlugin",
|
||||
"the correct plugin. Built-in plugin to use is "
|
||||
"aprsd.plugins.notify.NotifyPlugin",
|
||||
),
|
||||
cfg.ListOpt(
|
||||
"callsigns",
|
||||
@ -165,7 +164,7 @@ watch_list_opts = [
|
||||
"alert_time_seconds",
|
||||
default=3600,
|
||||
help="Time to wait before alert is sent on new message for "
|
||||
"users in callsigns.",
|
||||
"users in callsigns.",
|
||||
),
|
||||
]
|
||||
|
||||
@ -183,8 +182,8 @@ enabled_plugins_opts = [
|
||||
"aprsd.plugins.notify.NotifySeenPlugin",
|
||||
],
|
||||
help="Comma separated list of enabled plugins for APRSD."
|
||||
"To enable installed external plugins add them here."
|
||||
"The full python path to the class name must be used",
|
||||
"To enable installed external plugins add them here."
|
||||
"The full python path to the class name must be used",
|
||||
),
|
||||
]
|
||||
|
||||
@ -193,16 +192,16 @@ registry_opts = [
|
||||
"enabled",
|
||||
default=False,
|
||||
help="Enable sending aprs registry information. This will let the "
|
||||
"APRS registry know about your service and it's uptime. "
|
||||
"No personal information is sent, just the callsign, uptime and description. "
|
||||
"The service callsign is the callsign set in [DEFAULT] section.",
|
||||
"APRS registry know about your service and it's uptime. "
|
||||
"No personal information is sent, just the callsign, uptime and description. "
|
||||
"The service callsign is the callsign set in [DEFAULT] section.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"description",
|
||||
default=None,
|
||||
help="Description of the service to send to the APRS registry. "
|
||||
"This is what will show up in the APRS registry."
|
||||
"If not set, the description will be the same as the callsign.",
|
||||
"This is what will show up in the APRS registry."
|
||||
"If not set, the description will be the same as the callsign.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"registry_url",
|
||||
|
@ -1,11 +1,11 @@
|
||||
"""
|
||||
The options for log setup
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
LOG_LEVELS = {
|
||||
"CRITICAL": logging.CRITICAL,
|
||||
"ERROR": logging.ERROR,
|
||||
@ -59,7 +59,5 @@ def register_opts(config):
|
||||
|
||||
def list_opts():
|
||||
return {
|
||||
logging_group.name: (
|
||||
logging_opts
|
||||
),
|
||||
logging_group.name: (logging_opts),
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ import importlib
|
||||
import os
|
||||
import pkgutil
|
||||
|
||||
|
||||
LIST_OPTS_FUNC_NAME = "list_opts"
|
||||
|
||||
|
||||
@ -64,9 +63,11 @@ def _import_modules(module_names):
|
||||
for modname in module_names:
|
||||
mod = importlib.import_module("aprsd.conf." + modname)
|
||||
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
|
||||
msg = "The module 'aprsd.conf.%s' should have a '%s' "\
|
||||
"function which returns the config options." % \
|
||||
(modname, LIST_OPTS_FUNC_NAME)
|
||||
msg = (
|
||||
"The module 'aprsd.conf.%s' should have a '%s' "
|
||||
"function which returns the config options."
|
||||
% (modname, LIST_OPTS_FUNC_NAME)
|
||||
)
|
||||
raise Exception(msg)
|
||||
else:
|
||||
imported_modules.append(mod)
|
||||
|
@ -1,6 +1,5 @@
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
aprsfi_group = cfg.OptGroup(
|
||||
name="aprs_fi",
|
||||
title="APRS.FI website settings",
|
||||
@ -21,8 +20,7 @@ owm_wx_group = cfg.OptGroup(
|
||||
aprsfi_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="Get the apiKey from your aprs.fi account here:"
|
||||
"http://aprs.fi/account",
|
||||
help="Get the apiKey from your aprs.fi account here:" "http://aprs.fi/account",
|
||||
),
|
||||
]
|
||||
|
||||
@ -30,11 +28,11 @@ owm_wx_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
|
||||
"This plugin uses the openweathermap API to fetch"
|
||||
"location and weather information."
|
||||
"To use this plugin you need to get an openweathermap"
|
||||
"account and apikey."
|
||||
"https://home.openweathermap.org/api_keys",
|
||||
"This plugin uses the openweathermap API to fetch"
|
||||
"location and weather information."
|
||||
"To use this plugin you need to get an openweathermap"
|
||||
"account and apikey."
|
||||
"https://home.openweathermap.org/api_keys",
|
||||
),
|
||||
]
|
||||
|
||||
@ -42,16 +40,16 @@ avwx_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="avwx-api is an opensource project that has"
|
||||
"a hosted service here: https://avwx.rest/"
|
||||
"You can launch your own avwx-api in a container"
|
||||
"by cloning the githug repo here:"
|
||||
"https://github.com/avwx-rest/AVWX-API",
|
||||
"a hosted service here: https://avwx.rest/"
|
||||
"You can launch your own avwx-api in a container"
|
||||
"by cloning the githug repo here:"
|
||||
"https://github.com/avwx-rest/AVWX-API",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"base_url",
|
||||
default="https://avwx.rest",
|
||||
help="The base url for the avwx API. If you are hosting your own"
|
||||
"Here is where you change the url to point to yours.",
|
||||
"Here is where you change the url to point to yours.",
|
||||
),
|
||||
]
|
||||
|
||||
|
@ -1,11 +1,13 @@
|
||||
class MissingConfigOptionException(Exception):
|
||||
"""Missing a config option."""
|
||||
|
||||
def __init__(self, config_option):
|
||||
self.message = f"Option '{config_option}' was not in config file"
|
||||
|
||||
|
||||
class ConfigOptionBogusDefaultException(Exception):
|
||||
"""Missing a config option."""
|
||||
|
||||
def __init__(self, config_option, default_fail):
|
||||
self.message = (
|
||||
f"Config file option '{config_option}' needs to be "
|
||||
|
@ -7,7 +7,6 @@ from oslo_config import cfg
|
||||
|
||||
from aprsd.conf import log as conf_log
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
# LOG = logging.getLogger("APRSD")
|
||||
LOG = logger
|
||||
@ -18,6 +17,7 @@ class QueueLatest(queue.Queue):
|
||||
|
||||
This prevents the queue from blowing up in size.
|
||||
"""
|
||||
|
||||
def put(self, *args, **kwargs):
|
||||
try:
|
||||
super().put(*args, **kwargs)
|
||||
@ -43,7 +43,9 @@ class InterceptHandler(logging.Handler):
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level, record.getMessage()
|
||||
)
|
||||
|
||||
|
||||
# Setup the log faciility
|
||||
|
@ -22,11 +22,11 @@
|
||||
# python included libs
|
||||
import datetime
|
||||
import importlib.metadata as imp
|
||||
from importlib.metadata import version as metadata_version
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from importlib.metadata import version as metadata_version
|
||||
|
||||
import click
|
||||
from oslo_config import cfg, generator
|
||||
@ -36,7 +36,6 @@ import aprsd
|
||||
from aprsd import cli_helper, packets, threads, utils
|
||||
from aprsd.stats import collector
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
CONF = cfg.CONF
|
||||
@ -54,8 +53,14 @@ def cli(ctx):
|
||||
|
||||
def load_commands():
|
||||
from .cmds import ( # noqa
|
||||
completion, dev, fetch_stats, healthcheck, list_plugins, listen,
|
||||
send_message, server,
|
||||
completion,
|
||||
dev,
|
||||
fetch_stats,
|
||||
healthcheck,
|
||||
list_plugins,
|
||||
listen,
|
||||
send_message,
|
||||
server,
|
||||
)
|
||||
|
||||
|
||||
@ -115,6 +120,7 @@ def sample_config(ctx):
|
||||
|
||||
def _get_selected_entry_points():
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
all = imp.entry_points()
|
||||
selected = []
|
||||
|
@ -1,15 +1,25 @@
|
||||
from aprsd.packets import collector
|
||||
from aprsd.packets.core import ( # noqa: F401
|
||||
AckPacket, BeaconPacket, BulletinPacket, GPSPacket, MessagePacket,
|
||||
MicEPacket, ObjectPacket, Packet, RejectPacket, StatusPacket,
|
||||
ThirdPartyPacket, UnknownPacket, WeatherPacket, factory,
|
||||
AckPacket,
|
||||
BeaconPacket,
|
||||
BulletinPacket,
|
||||
GPSPacket,
|
||||
MessagePacket,
|
||||
MicEPacket,
|
||||
ObjectPacket,
|
||||
Packet,
|
||||
RejectPacket,
|
||||
StatusPacket,
|
||||
ThirdPartyPacket,
|
||||
UnknownPacket,
|
||||
WeatherPacket,
|
||||
factory,
|
||||
)
|
||||
from aprsd.packets.packet_list import PacketList # noqa: F401
|
||||
from aprsd.packets.seen_list import SeenList # noqa: F401
|
||||
from aprsd.packets.tracker import PacketTrack # noqa: F401
|
||||
from aprsd.packets.watch_list import WatchList # noqa: F401
|
||||
|
||||
|
||||
# Register all the packet tracking objects.
|
||||
collector.PacketCollector().register(PacketList)
|
||||
collector.PacketCollector().register(SeenList)
|
||||
|
@ -1,20 +1,23 @@
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
# Due to a failure in python 3.8
|
||||
from typing import Any, List, Optional, Type, TypeVar, Union
|
||||
|
||||
from aprslib import util as aprslib_util
|
||||
from dataclasses_json import (
|
||||
CatchAll, DataClassJsonMixin, Undefined, dataclass_json,
|
||||
CatchAll,
|
||||
DataClassJsonMixin,
|
||||
Undefined,
|
||||
dataclass_json,
|
||||
)
|
||||
from loguru import logger
|
||||
|
||||
from aprsd.utils import counter
|
||||
|
||||
|
||||
# For mypy to be happy
|
||||
A = TypeVar("A", bound="DataClassJsonMixin")
|
||||
Json = Union[dict, list, str, int, float, bool, None]
|
||||
@ -51,7 +54,7 @@ def _init_send_time():
|
||||
return NO_DATE
|
||||
|
||||
|
||||
def _init_msgNo(): # noqa: N802
|
||||
def _init_msgNo(): # noqa: N802
|
||||
"""For some reason __post__init doesn't get called.
|
||||
|
||||
So in order to initialize the msgNo field in the packet
|
||||
@ -84,14 +87,16 @@ class Packet:
|
||||
to_call: Optional[str] = field(default=None)
|
||||
addresse: Optional[str] = field(default=None)
|
||||
format: Optional[str] = field(default=None)
|
||||
msgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
msgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
packet_type: Optional[str] = field(default=None)
|
||||
timestamp: float = field(default_factory=_init_timestamp, compare=False, hash=False)
|
||||
# Holds the raw text string to be sent over the wire
|
||||
# or holds the raw string from input packet
|
||||
raw: Optional[str] = field(default=None, compare=False, hash=False)
|
||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
||||
raw_dict: dict = field(
|
||||
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||
)
|
||||
# Built by calling prepare(). raw needs this built first.
|
||||
payload: Optional[str] = field(default=None)
|
||||
|
||||
@ -140,12 +145,12 @@ class Packet:
|
||||
def _build_payload(self) -> None:
|
||||
"""The payload is the non headers portion of the packet."""
|
||||
if not self.to_call:
|
||||
raise ValueError("to_call isn't set. Must set to_call before calling prepare()")
|
||||
raise ValueError(
|
||||
"to_call isn't set. Must set to_call before calling prepare()"
|
||||
)
|
||||
|
||||
# The base packet class has no real payload
|
||||
self.payload = (
|
||||
f":{self.to_call.ljust(9)}"
|
||||
)
|
||||
self.payload = f":{self.to_call.ljust(9)}"
|
||||
|
||||
def _build_raw(self) -> None:
|
||||
"""Build the self.raw which is what is sent over the air."""
|
||||
@ -166,8 +171,10 @@ class Packet:
|
||||
message = msg[:67]
|
||||
# We all miss George Carlin
|
||||
return re.sub(
|
||||
"fuck|shit|cunt|piss|cock|bitch", "****",
|
||||
message, flags=re.IGNORECASE,
|
||||
"fuck|shit|cunt|piss|cock|bitch",
|
||||
"****",
|
||||
message,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
@ -214,10 +221,7 @@ class BulletinPacket(Packet):
|
||||
return f"BLN{self.bid} {self.message_text}"
|
||||
|
||||
def _build_payload(self) -> None:
|
||||
self.payload = (
|
||||
f":BLN{self.bid:<9}"
|
||||
f":{self.message_text}"
|
||||
)
|
||||
self.payload = f":BLN{self.bid:<9}" f":{self.message_text}"
|
||||
|
||||
|
||||
@dataclass_json
|
||||
@ -335,10 +339,7 @@ class GPSPacket(Packet):
|
||||
self.payload = "".join(payload)
|
||||
|
||||
def _build_raw(self):
|
||||
self.raw = (
|
||||
f"{self.from_call}>{self.to_call},WIDE2-1:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>{self.to_call},WIDE2-1:" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def human_info(self) -> str:
|
||||
@ -370,10 +371,7 @@ class BeaconPacket(GPSPacket):
|
||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||
lon = aprslib_util.longitude_to_ddm(self.longitude)
|
||||
|
||||
self.payload = (
|
||||
f"@{time_zulu}z{lat}{self.symbol_table}"
|
||||
f"{lon}"
|
||||
)
|
||||
self.payload = f"@{time_zulu}z{lat}{self.symbol_table}" f"{lon}"
|
||||
|
||||
if self.comment:
|
||||
comment = self._filter_for_send(self.comment)
|
||||
@ -382,10 +380,7 @@ class BeaconPacket(GPSPacket):
|
||||
self.payload = f"{self.payload}{self.symbol}APRSD Beacon"
|
||||
|
||||
def _build_raw(self):
|
||||
self.raw = (
|
||||
f"{self.from_call}>APZ100:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>APZ100:" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
@ -474,10 +469,7 @@ class ObjectPacket(GPSPacket):
|
||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||
long = aprslib_util.longitude_to_ddm(self.longitude)
|
||||
|
||||
self.payload = (
|
||||
f"*{time_zulu}z{lat}{self.symbol_table}"
|
||||
f"{long}{self.symbol}"
|
||||
)
|
||||
self.payload = f"*{time_zulu}z{lat}{self.symbol_table}" f"{long}{self.symbol}"
|
||||
|
||||
if self.comment:
|
||||
comment = self._filter_for_send(self.comment)
|
||||
@ -494,10 +486,7 @@ class ObjectPacket(GPSPacket):
|
||||
The frequency, uplink_tone, offset is part of the comment
|
||||
"""
|
||||
|
||||
self.raw = (
|
||||
f"{self.from_call}>APZ100:;{self.to_call:9s}"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>APZ100:;{self.to_call:9s}" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def human_info(self) -> str:
|
||||
@ -547,11 +536,13 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
if "speed" in raw:
|
||||
del raw["speed"]
|
||||
# Let's adjust the rain numbers as well, since it's wrong
|
||||
raw["rain_1h"] = round((raw.get("rain_1h", 0) / .254) * .01, 3)
|
||||
raw["rain_1h"] = round((raw.get("rain_1h", 0) / 0.254) * 0.01, 3)
|
||||
raw["weather"]["rain_1h"] = raw["rain_1h"]
|
||||
raw["rain_24h"] = round((raw.get("rain_24h", 0) / .254) * .01, 3)
|
||||
raw["rain_24h"] = round((raw.get("rain_24h", 0) / 0.254) * 0.01, 3)
|
||||
raw["weather"]["rain_24h"] = raw["rain_24h"]
|
||||
raw["rain_since_midnight"] = round((raw.get("rain_since_midnight", 0) / .254) * .01, 3)
|
||||
raw["rain_since_midnight"] = round(
|
||||
(raw.get("rain_since_midnight", 0) / 0.254) * 0.01, 3
|
||||
)
|
||||
raw["weather"]["rain_since_midnight"] = raw["rain_since_midnight"]
|
||||
|
||||
if "wind_direction" not in raw:
|
||||
@ -593,26 +584,26 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
def _build_payload(self):
|
||||
"""Build an uncompressed weather packet
|
||||
|
||||
Format =
|
||||
Format =
|
||||
|
||||
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
||||
NOT BACKWARD COMPATIBLE
|
||||
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
||||
NOT BACKWARD COMPATIBLE
|
||||
|
||||
|
||||
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
||||
t is in degrees F
|
||||
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
||||
t is in degrees F
|
||||
|
||||
r is Rain per last 60 minutes
|
||||
1.04 inches of rain will show as r104
|
||||
p is precipitation per last 24 hours (sliding 24 hour window)
|
||||
P is precip per last 24 hours since midnight
|
||||
b is Baro in tenths of a mb
|
||||
h is humidity in percent. 00=100
|
||||
g is Gust (peak winds in last 5 minutes)
|
||||
# is the raw rain counter for remote WX stations
|
||||
See notes on remotes below
|
||||
% shows software type d=Dos, m=Mac, w=Win, etc
|
||||
type shows type of WX instrument
|
||||
r is Rain per last 60 minutes
|
||||
1.04 inches of rain will show as r104
|
||||
p is precipitation per last 24 hours (sliding 24 hour window)
|
||||
P is precip per last 24 hours since midnight
|
||||
b is Baro in tenths of a mb
|
||||
h is humidity in percent. 00=100
|
||||
g is Gust (peak winds in last 5 minutes)
|
||||
# is the raw rain counter for remote WX stations
|
||||
See notes on remotes below
|
||||
% shows software type d=Dos, m=Mac, w=Win, etc
|
||||
type shows type of WX instrument
|
||||
|
||||
"""
|
||||
time_zulu = self._build_time_zulu()
|
||||
@ -622,7 +613,8 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
f"{self.longitude}{self.symbol}",
|
||||
f"{self.wind_direction:03d}",
|
||||
# Speed = sustained 1 minute wind speed in mph
|
||||
f"{self.symbol_table}", f"{self.wind_speed:03.0f}",
|
||||
f"{self.symbol_table}",
|
||||
f"{self.wind_speed:03.0f}",
|
||||
# wind gust (peak wind speed in mph in the last 5 minutes)
|
||||
f"g{self.wind_gust:03.0f}",
|
||||
# Temperature in degrees F
|
||||
@ -644,11 +636,7 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
self.payload = "".join(contents)
|
||||
|
||||
def _build_raw(self):
|
||||
|
||||
self.raw = (
|
||||
f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:" f"{self.payload}"
|
||||
|
||||
|
||||
@dataclass(unsafe_hash=True)
|
||||
@ -692,14 +680,17 @@ class UnknownPacket:
|
||||
|
||||
All of the unknown attributes are stored in the unknown_fields
|
||||
"""
|
||||
|
||||
unknown_fields: CatchAll
|
||||
_type: str = "UnknownPacket"
|
||||
from_call: Optional[str] = field(default=None)
|
||||
to_call: Optional[str] = field(default=None)
|
||||
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
||||
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
||||
format: Optional[str] = field(default=None)
|
||||
raw: Optional[str] = field(default=None)
|
||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
||||
raw_dict: dict = field(
|
||||
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||
)
|
||||
path: List[str] = field(default_factory=list, compare=False, hash=False)
|
||||
packet_type: Optional[str] = field(default=None)
|
||||
via: Optional[str] = field(default=None, compare=False, hash=False)
|
||||
|
@ -8,7 +8,6 @@ from oslo_config import cfg
|
||||
from aprsd import utils
|
||||
from aprsd.packets.core import AckPacket, GPSPacket, RejectPacket
|
||||
|
||||
|
||||
LOG = logging.getLogger()
|
||||
LOGU = logger
|
||||
CONF = cfg.CONF
|
||||
@ -22,7 +21,9 @@ DISTANCE_COLOR = "fg #FF5733"
|
||||
DEGREES_COLOR = "fg #FFA900"
|
||||
|
||||
|
||||
def log_multiline(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> None:
|
||||
def log_multiline(
|
||||
packet, tx: Optional[bool] = False, header: Optional[bool] = True
|
||||
) -> None:
|
||||
"""LOG a packet to the logfile."""
|
||||
if not CONF.enable_packet_logging:
|
||||
return
|
||||
@ -121,8 +122,7 @@ def log(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> No
|
||||
via_color = "green"
|
||||
arrow = f"<{via_color}>-></{via_color}>"
|
||||
logit.append(
|
||||
f"<cyan>{name}</cyan>"
|
||||
f":{packet.msgNo}",
|
||||
f"<cyan>{name}</cyan>" f":{packet.msgNo}",
|
||||
)
|
||||
|
||||
tmp = None
|
||||
|
@ -1,18 +1,18 @@
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.packets import core
|
||||
from aprsd.utils import objectstore
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class PacketList(objectstore.ObjectStoreMixin):
|
||||
"""Class to keep track of the packets we tx/rx."""
|
||||
|
||||
_instance = None
|
||||
_total_rx: int = 0
|
||||
_total_tx: int = 0
|
||||
@ -38,7 +38,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
self._add(packet)
|
||||
ptype = packet.__class__.__name__
|
||||
type_stats = self.data["types"].setdefault(
|
||||
ptype, {"tx": 0, "rx": 0},
|
||||
ptype,
|
||||
{"tx": 0, "rx": 0},
|
||||
)
|
||||
type_stats["rx"] += 1
|
||||
|
||||
@ -49,7 +50,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
self._add(packet)
|
||||
ptype = packet.__class__.__name__
|
||||
type_stats = self.data["types"].setdefault(
|
||||
ptype, {"tx": 0, "rx": 0},
|
||||
ptype,
|
||||
{"tx": 0, "rx": 0},
|
||||
)
|
||||
type_stats["tx"] += 1
|
||||
|
||||
@ -86,10 +88,11 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
with self.lock:
|
||||
# Get last N packets directly using list slicing
|
||||
packets_list = list(self.data.get("packets", {}).values())
|
||||
pkts = packets_list[-CONF.packet_list_stats_maxlen:][::-1]
|
||||
pkts = packets_list[-CONF.packet_list_stats_maxlen :][::-1]
|
||||
|
||||
stats = {
|
||||
"total_tracked": self._total_rx + self._total_tx, # Fixed typo: was rx + rx
|
||||
"total_tracked": self._total_rx
|
||||
+ self._total_tx, # Fixed typo: was rx + rx
|
||||
"rx": self._total_rx,
|
||||
"tx": self._total_tx,
|
||||
"types": self.data.get("types", {}), # Changed default from [] to {}
|
||||
|
@ -8,14 +8,13 @@ import re
|
||||
import textwrap
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import pluggy
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import client, packets, threads
|
||||
from aprsd.packets import watch_list
|
||||
|
||||
|
||||
# setup the global logger
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -166,7 +165,8 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
except Exception as ex:
|
||||
LOG.error(
|
||||
"Plugin {} failed to process packet {}".format(
|
||||
self.__class__, ex,
|
||||
self.__class__,
|
||||
ex,
|
||||
),
|
||||
)
|
||||
if result:
|
||||
@ -214,7 +214,9 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
return result
|
||||
|
||||
if not isinstance(packet, packets.MessagePacket):
|
||||
LOG.warning(f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring")
|
||||
LOG.warning(
|
||||
f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring"
|
||||
)
|
||||
return packets.NULL_MESSAGE
|
||||
|
||||
result = None
|
||||
@ -236,7 +238,8 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
except Exception as ex:
|
||||
LOG.error(
|
||||
"Plugin {} failed to process packet {}".format(
|
||||
self.__class__, ex,
|
||||
self.__class__,
|
||||
ex,
|
||||
),
|
||||
)
|
||||
LOG.exception(ex)
|
||||
@ -286,7 +289,8 @@ class HelpPlugin(APRSDRegexCommandPluginBase):
|
||||
reply = None
|
||||
for p in pm.get_plugins():
|
||||
if (
|
||||
p.enabled and isinstance(p, APRSDRegexCommandPluginBase)
|
||||
p.enabled
|
||||
and isinstance(p, APRSDRegexCommandPluginBase)
|
||||
and p.command_name.lower() == command_name
|
||||
):
|
||||
reply = p.help()
|
||||
@ -345,6 +349,7 @@ class PluginManager:
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
"""Collect and return stats for all plugins."""
|
||||
|
||||
def full_name_with_qualname(obj):
|
||||
return "{}.{}".format(
|
||||
obj.__class__.__module__,
|
||||
@ -354,7 +359,6 @@ class PluginManager:
|
||||
plugin_stats = {}
|
||||
plugins = self.get_plugins()
|
||||
if plugins:
|
||||
|
||||
for p in plugins:
|
||||
plugin_stats[full_name_with_qualname(p)] = {
|
||||
"enabled": p.enabled,
|
||||
@ -439,7 +443,9 @@ class PluginManager:
|
||||
)
|
||||
self._watchlist_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
elif isinstance(plugin_obj, APRSDRegexCommandPluginBase):
|
||||
if plugin_obj.enabled:
|
||||
LOG.info(
|
||||
@ -451,7 +457,9 @@ class PluginManager:
|
||||
)
|
||||
self._pluggy_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
elif isinstance(plugin_obj, APRSDPluginBase):
|
||||
if plugin_obj.enabled:
|
||||
LOG.info(
|
||||
@ -462,7 +470,9 @@ class PluginManager:
|
||||
)
|
||||
self._pluggy_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
except Exception as ex:
|
||||
LOG.error(f"Couldn't load plugin '{plugin_name}'")
|
||||
LOG.exception(ex)
|
||||
@ -473,7 +483,8 @@ class PluginManager:
|
||||
self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
|
||||
|
||||
def setup_plugins(
|
||||
self, load_help_plugin=True,
|
||||
self,
|
||||
load_help_plugin=True,
|
||||
plugin_list=[],
|
||||
):
|
||||
"""Create the plugin manager and register plugins."""
|
||||
|
@ -4,7 +4,6 @@ from oslo_config import cfg
|
||||
|
||||
from aprsd import packets, plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -43,9 +42,7 @@ class NotifySeenPlugin(plugin.APRSDWatchListPluginBase):
|
||||
pkt = packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=notify_callsign,
|
||||
message_text=(
|
||||
f"{fromcall} was just seen by type:'{packet_type}'"
|
||||
),
|
||||
message_text=(f"{fromcall} was just seen by type:'{packet_type}'"),
|
||||
allow_delay=False,
|
||||
)
|
||||
pkt.allow_delay = False
|
||||
|
@ -2,13 +2,12 @@ import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import plugin, plugin_utils
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -205,8 +204,9 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
|
||||
def help(self):
|
||||
_help = [
|
||||
"openweathermap: Send {} to get weather "
|
||||
"from your location".format(self.command_regex),
|
||||
"openweathermap: Send {} to get weather " "from your location".format(
|
||||
self.command_regex
|
||||
),
|
||||
"openweathermap: Send {} <callsign> to get "
|
||||
"weather from <callsign>".format(self.command_regex),
|
||||
]
|
||||
@ -327,10 +327,12 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
|
||||
def help(self):
|
||||
_help = [
|
||||
"avwxweather: Send {} to get weather "
|
||||
"from your location".format(self.command_regex),
|
||||
"avwxweather: Send {} <callsign> to get "
|
||||
"weather from <callsign>".format(self.command_regex),
|
||||
"avwxweather: Send {} to get weather " "from your location".format(
|
||||
self.command_regex
|
||||
),
|
||||
"avwxweather: Send {} <callsign> to get " "weather from <callsign>".format(
|
||||
self.command_regex
|
||||
),
|
||||
]
|
||||
return _help
|
||||
|
||||
|
@ -3,13 +3,13 @@ from typing import Callable, Protocol, runtime_checkable
|
||||
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class StatsProducer(Protocol):
|
||||
"""The StatsProducer protocol is used to define the interface for collecting stats."""
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
"""provide stats in a dictionary format."""
|
||||
...
|
||||
@ -18,6 +18,7 @@ class StatsProducer(Protocol):
|
||||
@singleton
|
||||
class Collector:
|
||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||
|
||||
def __init__(self):
|
||||
self.producers: list[Callable] = []
|
||||
|
||||
@ -26,7 +27,9 @@ class Collector:
|
||||
for name in self.producers:
|
||||
cls = name()
|
||||
try:
|
||||
stats[cls.__class__.__name__] = cls.stats(serializable=serializable).copy()
|
||||
stats[cls.__class__.__name__] = cls.stats(
|
||||
serializable=serializable
|
||||
).copy()
|
||||
except Exception as e:
|
||||
LOG.error(f"Error in producer {name} (stats): {e}")
|
||||
return stats
|
||||
|
@ -4,8 +4,9 @@ import queue
|
||||
# aprsd.threads
|
||||
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
|
||||
from .rx import ( # noqa: F401
|
||||
APRSDDupeRXThread, APRSDProcessPacketThread, APRSDRXThread,
|
||||
APRSDDupeRXThread,
|
||||
APRSDProcessPacketThread,
|
||||
APRSDRXThread,
|
||||
)
|
||||
|
||||
|
||||
packet_queue = queue.Queue(maxsize=20)
|
||||
|
@ -7,7 +7,6 @@ from typing import List
|
||||
|
||||
import wrapt
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@ -25,7 +24,7 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
||||
self._last_loop = datetime.datetime.now()
|
||||
|
||||
def _should_quit(self):
|
||||
""" see if we have a quit message from the global queue."""
|
||||
"""see if we have a quit message from the global queue."""
|
||||
if self.thread_stop:
|
||||
return True
|
||||
|
||||
@ -51,7 +50,9 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
||||
"""Add code to subclass to do any cleanup"""
|
||||
|
||||
def __str__(self):
|
||||
out = f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
||||
out = (
|
||||
f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
||||
)
|
||||
return out
|
||||
|
||||
def loop_age(self):
|
||||
@ -124,7 +125,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Stopping Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.stop()
|
||||
|
||||
@wrapt.synchronized
|
||||
@ -133,7 +134,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Pausing Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.pause()
|
||||
|
||||
@wrapt.synchronized
|
||||
@ -142,7 +143,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Resuming Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.unpause()
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
@ -153,7 +154,11 @@ class APRSDThreadList:
|
||||
alive = thread.is_alive()
|
||||
age = thread.loop_age()
|
||||
key = thread.__class__.__name__
|
||||
info[key] = {"alive": True if alive else False, "age": age, "name": thread.name}
|
||||
info[key] = {
|
||||
"alive": True if alive else False,
|
||||
"age": age,
|
||||
"name": thread.name,
|
||||
}
|
||||
return info
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
|
@ -9,8 +9,8 @@ from oslo_config import cfg
|
||||
from aprsd import packets, utils
|
||||
from aprsd.log import log as aprsd_log
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import APRSDThread, APRSDThreadList, keepalive_collector
|
||||
|
||||
from aprsd.threads import APRSDThread, APRSDThreadList
|
||||
from aprsd.utils import keepalive_collector
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -34,9 +34,14 @@ class KeepAliveThread(APRSDThread):
|
||||
thread_list = APRSDThreadList()
|
||||
now = datetime.datetime.now()
|
||||
|
||||
if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
|
||||
if (
|
||||
"APRSClientStats" in stats_json
|
||||
and stats_json["APRSClientStats"].get("transport") == "aprsis"
|
||||
):
|
||||
if stats_json["APRSClientStats"].get("server_keepalive"):
|
||||
last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
|
||||
last_msg_time = utils.strfdelta(
|
||||
now - stats_json["APRSClientStats"]["server_keepalive"]
|
||||
)
|
||||
else:
|
||||
last_msg_time = "N/A"
|
||||
else:
|
||||
|
@ -1,19 +1,19 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import threads as aprsd_threads
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class APRSRegistryThread(aprsd_threads.APRSDThread):
|
||||
"""This sends service information to the configured APRS Registry."""
|
||||
|
||||
_loop_cnt: int = 1
|
||||
|
||||
def __init__(self):
|
||||
@ -41,7 +41,7 @@ class APRSRegistryThread(aprsd_threads.APRSDThread):
|
||||
"description": CONF.aprs_registry.description,
|
||||
"service_website": CONF.aprs_registry.service_website,
|
||||
"software": f"APRSD version {aprsd.__version__} "
|
||||
"https://github.com/craigerl/aprsd",
|
||||
"https://github.com/craigerl/aprsd",
|
||||
}
|
||||
try:
|
||||
requests.post(
|
||||
|
@ -13,7 +13,6 @@ from aprsd.packets import log as packet_log
|
||||
from aprsd.threads import APRSDThread, tx
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -53,7 +52,9 @@ class APRSDRXThread(APRSDThread):
|
||||
# kwargs. :(
|
||||
# https://github.com/rossengeorgiev/aprs-python/pull/56
|
||||
self._client.consumer(
|
||||
self._process_packet, raw=False, blocking=False,
|
||||
self._process_packet,
|
||||
raw=False,
|
||||
blocking=False,
|
||||
)
|
||||
except (
|
||||
aprslib.exceptions.ConnectionDrop,
|
||||
@ -138,7 +139,9 @@ class APRSDDupeRXThread(APRSDRXThread):
|
||||
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
|
||||
# If the packet came in within N seconds of the
|
||||
# Last time seeing the packet, then we drop it as a dupe.
|
||||
LOG.warning(f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.")
|
||||
LOG.warning(
|
||||
f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping."
|
||||
)
|
||||
else:
|
||||
LOG.warning(
|
||||
f"Packet {packet.from_call}:{packet.msgNo} already tracked "
|
||||
@ -149,7 +152,7 @@ class APRSDDupeRXThread(APRSDRXThread):
|
||||
|
||||
|
||||
class APRSDPluginRXThread(APRSDDupeRXThread):
|
||||
""""Process received packets.
|
||||
""" "Process received packets.
|
||||
|
||||
For backwards compatibility, we keep the APRSDPluginRXThread.
|
||||
"""
|
||||
@ -249,7 +252,8 @@ class APRSDProcessPacketThread(APRSDThread):
|
||||
self.process_other_packet(packet, for_us=False)
|
||||
else:
|
||||
self.process_other_packet(
|
||||
packet, for_us=(to_call.lower() == our_call),
|
||||
packet,
|
||||
for_us=(to_call.lower() == our_call),
|
||||
)
|
||||
LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
|
||||
return False
|
||||
@ -349,7 +353,6 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
|
||||
# If the message was for us and we didn't have a
|
||||
# response, then we send a usage statement.
|
||||
if to_call == CONF.callsign and not replied:
|
||||
|
||||
# Tailor the messages accordingly
|
||||
if CONF.load_help_plugin:
|
||||
LOG.warning("Sending help!")
|
||||
|
@ -2,20 +2,20 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import APRSDThread
|
||||
from aprsd.utils import objectstore
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class StatsStore(objectstore.ObjectStoreMixin):
|
||||
"""Container to save the stats from the collector."""
|
||||
|
||||
lock = threading.Lock()
|
||||
data = {}
|
||||
|
||||
|
@ -2,20 +2,18 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
from rush import quota, throttle
|
||||
from rush.contrib import decorator
|
||||
from rush.limiters import periodic
|
||||
from rush.stores import dictionary
|
||||
import wrapt
|
||||
|
||||
from aprsd import conf # noqa
|
||||
from aprsd import threads as aprsd_threads
|
||||
from aprsd.client import client_factory
|
||||
from aprsd.packets import collector, core
|
||||
from aprsd.packets import collector, core, tracker
|
||||
from aprsd.packets import log as packet_log
|
||||
from aprsd.packets import tracker
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -238,6 +236,7 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
|
||||
|
||||
Settings are in the [DEFAULT] section of the config file.
|
||||
"""
|
||||
|
||||
_loop_cnt: int = 1
|
||||
|
||||
def __init__(self):
|
||||
|
@ -13,11 +13,11 @@ import update_checker
|
||||
import aprsd
|
||||
|
||||
from .fuzzyclock import fuzzy # noqa: F401
|
||||
|
||||
# Make these available by anyone importing
|
||||
# aprsd.utils
|
||||
from .ring_buffer import RingBuffer # noqa: F401
|
||||
|
||||
|
||||
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
|
||||
from collections.abc import MutableMapping
|
||||
else:
|
||||
@ -26,11 +26,13 @@ else:
|
||||
|
||||
def singleton(cls):
|
||||
"""Make a class a Singleton class (only one instance)"""
|
||||
|
||||
@functools.wraps(cls)
|
||||
def wrapper_singleton(*args, **kwargs):
|
||||
if wrapper_singleton.instance is None:
|
||||
wrapper_singleton.instance = cls(*args, **kwargs)
|
||||
return wrapper_singleton.instance
|
||||
|
||||
wrapper_singleton.instance = None
|
||||
return wrapper_singleton
|
||||
|
||||
@ -170,7 +172,10 @@ def load_entry_points(group):
|
||||
try:
|
||||
ep.load()
|
||||
except Exception as e:
|
||||
print(f"Extension {ep.name} of group {group} failed to load with {e}", file=sys.stderr)
|
||||
print(
|
||||
f"Extension {ep.name} of group {group} failed to load with {e}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(traceback.format_exc(), file=sys.stderr)
|
||||
|
||||
|
||||
@ -200,8 +205,7 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
||||
|
||||
x = math.sin(diff_long) * math.cos(lat2)
|
||||
y = math.cos(lat1) * math.sin(lat2) - (
|
||||
math.sin(lat1)
|
||||
* math.cos(lat2) * math.cos(diff_long)
|
||||
math.sin(lat1) * math.cos(lat2) * math.cos(diff_long)
|
||||
)
|
||||
|
||||
initial_bearing = math.atan2(x, y)
|
||||
@ -218,15 +222,43 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
||||
def degrees_to_cardinal(bearing, full_string=False):
|
||||
if full_string:
|
||||
directions = [
|
||||
"North", "North-Northeast", "Northeast", "East-Northeast", "East", "East-Southeast",
|
||||
"Southeast", "South-Southeast", "South", "South-Southwest", "Southwest", "West-Southwest",
|
||||
"West", "West-Northwest", "Northwest", "North-Northwest", "North",
|
||||
"North",
|
||||
"North-Northeast",
|
||||
"Northeast",
|
||||
"East-Northeast",
|
||||
"East",
|
||||
"East-Southeast",
|
||||
"Southeast",
|
||||
"South-Southeast",
|
||||
"South",
|
||||
"South-Southwest",
|
||||
"Southwest",
|
||||
"West-Southwest",
|
||||
"West",
|
||||
"West-Northwest",
|
||||
"Northwest",
|
||||
"North-Northwest",
|
||||
"North",
|
||||
]
|
||||
else:
|
||||
directions = [
|
||||
"N", "NNE", "NE", "ENE", "E", "ESE",
|
||||
"SE", "SSE", "S", "SSW", "SW", "WSW",
|
||||
"W", "WNW", "NW", "NNW", "N",
|
||||
"N",
|
||||
"NNE",
|
||||
"NE",
|
||||
"ENE",
|
||||
"E",
|
||||
"ESE",
|
||||
"SE",
|
||||
"SSE",
|
||||
"S",
|
||||
"SSW",
|
||||
"SW",
|
||||
"WSW",
|
||||
"W",
|
||||
"WNW",
|
||||
"NW",
|
||||
"NNW",
|
||||
"N",
|
||||
]
|
||||
|
||||
cardinal = directions[round(bearing / 22.5)]
|
||||
|
@ -10,8 +10,13 @@ class EnhancedJSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
args = (
|
||||
"year", "month", "day", "hour", "minute",
|
||||
"second", "microsecond",
|
||||
"year",
|
||||
"month",
|
||||
"day",
|
||||
"hour",
|
||||
"minute",
|
||||
"second",
|
||||
"microsecond",
|
||||
)
|
||||
return {
|
||||
"__type__": "datetime.datetime",
|
||||
@ -63,10 +68,10 @@ class SimpleJSONEncoder(json.JSONEncoder):
|
||||
|
||||
|
||||
class EnhancedJSONDecoder(json.JSONDecoder):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(
|
||||
*args, object_hook=self.object_hook,
|
||||
*args,
|
||||
object_hook=self.object_hook,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
@ -3,13 +3,13 @@ from typing import Callable, Protocol, runtime_checkable
|
||||
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class KeepAliveProducer(Protocol):
|
||||
"""The KeepAliveProducer protocol is used to define the interface for running Keepalive checks."""
|
||||
|
||||
def keepalive_check(self) -> dict:
|
||||
"""Check for keepalive."""
|
||||
...
|
||||
@ -22,6 +22,7 @@ class KeepAliveProducer(Protocol):
|
||||
@singleton
|
||||
class KeepAliveCollector:
|
||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||
|
||||
def __init__(self):
|
||||
self.producers: list[Callable] = []
|
||||
|
@ -5,7 +5,6 @@ import logging
|
||||
import time
|
||||
import types
|
||||
|
||||
|
||||
VALID_TRACE_FLAGS = {"method", "api"}
|
||||
TRACE_API = False
|
||||
TRACE_METHOD = False
|
||||
@ -27,7 +26,6 @@ def trace(*dec_args, **dec_kwargs):
|
||||
"""
|
||||
|
||||
def _decorator(f):
|
||||
|
||||
func_name = f.__qualname__
|
||||
func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
|
||||
|
||||
|
@ -18,7 +18,7 @@ description = "APRSd is a APRS-IS server that can be used to connect to APRS-IS
|
||||
# 'Programming Language' classifiers in this file, 'pip install' will check this
|
||||
# and refuse to install the project if the version does not match. See
|
||||
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
|
||||
requires-python = ">=3.8"
|
||||
requires-python = ">=3.9"
|
||||
|
||||
dynamic = ["version", "dependencies", "optional-dependencies"]
|
||||
|
||||
|
@ -1,12 +1,4 @@
|
||||
build
|
||||
check-manifest
|
||||
flake8
|
||||
gray
|
||||
isort
|
||||
mypy
|
||||
pep8-naming
|
||||
pytest
|
||||
pytest-cov
|
||||
pip
|
||||
pip-tools
|
||||
pre-commit
|
||||
|
@ -1,65 +1,40 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.12
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --annotation-style=line requirements-dev.in
|
||||
#
|
||||
add-trailing-comma==3.1.0 # via gray
|
||||
alabaster==1.0.0 # via sphinx
|
||||
autoflake==1.5.3 # via gray
|
||||
babel==2.16.0 # via sphinx
|
||||
black==24.10.0 # via gray
|
||||
build==1.2.2.post1 # via -r requirements-dev.in, check-manifest, pip-tools
|
||||
build==1.2.2.post1 # via -r requirements-dev.in, pip-tools
|
||||
cachetools==5.5.0 # via tox
|
||||
certifi==2024.8.30 # via requests
|
||||
certifi==2024.12.14 # via requests
|
||||
cfgv==3.4.0 # via pre-commit
|
||||
chardet==5.2.0 # via tox
|
||||
charset-normalizer==3.4.0 # via requests
|
||||
check-manifest==0.50 # via -r requirements-dev.in
|
||||
click==8.1.7 # via black, fixit, moreorless, pip-tools
|
||||
click==8.1.7 # via pip-tools
|
||||
colorama==0.4.6 # via tox
|
||||
commonmark==0.9.1 # via rich
|
||||
configargparse==1.7 # via gray
|
||||
coverage[toml]==7.6.9 # via pytest-cov
|
||||
distlib==0.3.9 # via virtualenv
|
||||
docutils==0.21.2 # via m2r, sphinx
|
||||
filelock==3.16.1 # via tox, virtualenv
|
||||
fixit==2.1.0 # via gray
|
||||
flake8==7.1.1 # via -r requirements-dev.in, pep8-naming
|
||||
gray==0.15.0 # via -r requirements-dev.in
|
||||
identify==2.6.3 # via pre-commit
|
||||
idna==3.10 # via requests
|
||||
imagesize==1.4.1 # via sphinx
|
||||
iniconfig==2.0.0 # via pytest
|
||||
isort==5.13.2 # via -r requirements-dev.in, gray
|
||||
jinja2==3.1.4 # via sphinx
|
||||
libcst==1.5.1 # via fixit
|
||||
m2r==0.3.1 # via -r requirements-dev.in
|
||||
markupsafe==3.0.2 # via jinja2
|
||||
mccabe==0.7.0 # via flake8
|
||||
mistune==0.8.4 # via m2r
|
||||
moreorless==0.4.0 # via fixit
|
||||
mypy==1.13.0 # via -r requirements-dev.in
|
||||
mypy-extensions==1.0.0 # via black, mypy
|
||||
nodeenv==1.9.1 # via pre-commit
|
||||
packaging==24.2 # via black, build, fixit, pyproject-api, pytest, sphinx, tox
|
||||
pathspec==0.12.1 # via black, trailrunner
|
||||
pep8-naming==0.14.1 # via -r requirements-dev.in
|
||||
packaging==24.2 # via build, pyproject-api, sphinx, tox
|
||||
pip-tools==7.4.1 # via -r requirements-dev.in
|
||||
platformdirs==4.3.6 # via black, tox, virtualenv
|
||||
pluggy==1.5.0 # via pytest, tox
|
||||
platformdirs==4.3.6 # via tox, virtualenv
|
||||
pluggy==1.5.0 # via tox
|
||||
pre-commit==4.0.1 # via -r requirements-dev.in
|
||||
pycodestyle==2.12.1 # via flake8
|
||||
pyflakes==3.2.0 # via autoflake, flake8
|
||||
pygments==2.18.0 # via rich, sphinx
|
||||
pygments==2.18.0 # via sphinx
|
||||
pyproject-api==1.8.0 # via tox
|
||||
pyproject-hooks==1.2.0 # via build, pip-tools
|
||||
pytest==8.3.4 # via -r requirements-dev.in, pytest-cov
|
||||
pytest-cov==6.0.0 # via -r requirements-dev.in
|
||||
pyupgrade==3.19.0 # via gray
|
||||
pyyaml==6.0.2 # via libcst, pre-commit
|
||||
pyyaml==6.0.2 # via pre-commit
|
||||
requests==2.32.3 # via sphinx
|
||||
rich==12.6.0 # via gray
|
||||
snowballstemmer==2.2.0 # via sphinx
|
||||
sphinx==8.1.3 # via -r requirements-dev.in
|
||||
sphinxcontrib-applehelp==2.0.0 # via sphinx
|
||||
@ -68,13 +43,9 @@ sphinxcontrib-htmlhelp==2.1.0 # via sphinx
|
||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||
sphinxcontrib-qthelp==2.0.0 # via sphinx
|
||||
sphinxcontrib-serializinghtml==2.0.0 # via sphinx
|
||||
tokenize-rt==6.1.0 # via add-trailing-comma, pyupgrade
|
||||
toml==0.10.2 # via autoflake
|
||||
tomli==2.2.1 # via build, pip-tools, pyproject-api, sphinx, tox
|
||||
tox==4.23.2 # via -r requirements-dev.in
|
||||
trailrunner==1.4.0 # via fixit
|
||||
typing-extensions==4.12.2 # via mypy
|
||||
unify==0.5 # via gray
|
||||
untokenize==0.1.1 # via unify
|
||||
typing-extensions==4.12.2 # via tox
|
||||
urllib3==2.2.3 # via requests
|
||||
virtualenv==20.28.0 # via pre-commit, tox
|
||||
wheel==0.45.1 # via -r requirements-dev.in, pip-tools
|
||||
|
@ -1,15 +1,15 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.12
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --annotation-style=line requirements.in
|
||||
#
|
||||
aprslib==0.7.2 # via -r requirements.in
|
||||
attrs==24.2.0 # via ax253, kiss3, rush
|
||||
attrs==24.3.0 # via ax253, kiss3, rush
|
||||
ax253==0.1.5.post1 # via kiss3
|
||||
beautifulsoup4==4.12.3 # via -r requirements.in
|
||||
bitarray==3.0.0 # via ax253, kiss3
|
||||
certifi==2024.8.30 # via requests
|
||||
certifi==2024.12.14 # via requests
|
||||
charset-normalizer==3.4.0 # via requests
|
||||
click==8.1.7 # via -r requirements.in
|
||||
commonmark==0.9.1 # via rich
|
||||
@ -20,7 +20,7 @@ idna==3.10 # via requests
|
||||
importlib-metadata==8.5.0 # via ax253, kiss3
|
||||
kiss3==8.0.0 # via -r requirements.in
|
||||
loguru==0.7.3 # via -r requirements.in
|
||||
marshmallow==3.23.1 # via dataclasses-json
|
||||
marshmallow==3.23.2 # via dataclasses-json
|
||||
mypy-extensions==1.0.0 # via typing-inspect
|
||||
netaddr==1.3.0 # via oslo-config
|
||||
oslo-config==9.7.0 # via -r requirements.in
|
||||
|
@ -54,6 +54,7 @@ class TestAPRSISClient(unittest.TestCase):
|
||||
with mock.patch.object(APRSISClient, "is_configured", return_value=True):
|
||||
stats = self.client.stats()
|
||||
from rich.console import Console
|
||||
|
||||
c = Console()
|
||||
c.print(stats)
|
||||
self.assertEqual(
|
||||
|
@ -109,7 +109,8 @@ class TestAPRSClient(unittest.TestCase):
|
||||
"""Test handling of client creation failure."""
|
||||
# Make setup_connection raise an exception
|
||||
with mock.patch.object(
|
||||
self.client, "setup_connection",
|
||||
self.client,
|
||||
"setup_connection",
|
||||
side_effect=Exception("Connection failed"),
|
||||
):
|
||||
with self.assertRaises(Exception):
|
||||
|
@ -11,13 +11,11 @@ from aprsd.main import cli
|
||||
|
||||
from .. import fake
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
|
||||
class TestSendMessageCommand(unittest.TestCase):
|
||||
|
||||
def config_and_init(self, login=None, password=None):
|
||||
CONF.callsign = fake.FAKE_TO_CALLSIGN
|
||||
CONF.trace_enabled = False
|
||||
@ -41,7 +39,8 @@ class TestSendMessageCommand(unittest.TestCase):
|
||||
runner = CliRunner()
|
||||
|
||||
result = runner.invoke(
|
||||
cli, ["send-message"],
|
||||
cli,
|
||||
["send-message"],
|
||||
catch_exceptions=False,
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
@ -58,7 +57,8 @@ class TestSendMessageCommand(unittest.TestCase):
|
||||
runner = CliRunner()
|
||||
|
||||
result = runner.invoke(
|
||||
cli, ["send-message", "WB4BOR"],
|
||||
cli,
|
||||
["send-message", "WB4BOR"],
|
||||
catch_exceptions=False,
|
||||
)
|
||||
assert result.exit_code == 2
|
||||
|
@ -1,7 +1,6 @@
|
||||
from aprsd import plugin, threads
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
FAKE_MESSAGE_TEXT = "fake MeSSage"
|
||||
FAKE_FROM_CALLSIGN = "KFAKE"
|
||||
FAKE_TO_CALLSIGN = "KMINE"
|
||||
@ -88,6 +87,5 @@ class FakeRegexCommandPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
|
||||
|
||||
class FakeWatchListPlugin(plugin.APRSDWatchListPluginBase):
|
||||
|
||||
def process(self, packet):
|
||||
return FAKE_MESSAGE_TEXT
|
||||
|
@ -2,13 +2,15 @@ from unittest import mock
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import client, packets
|
||||
from aprsd import conf # noqa: F401
|
||||
from aprsd import (
|
||||
client,
|
||||
conf, # noqa: F401
|
||||
packets,
|
||||
)
|
||||
from aprsd.plugins import notify as notify_plugin
|
||||
|
||||
from .. import fake, test_plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
DEFAULT_WATCHLIST_CALLSIGNS = fake.FAKE_FROM_CALLSIGN
|
||||
|
||||
@ -49,7 +51,6 @@ class TestWatchListPlugin(test_plugin.TestPlugin):
|
||||
|
||||
|
||||
class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
|
||||
|
||||
def test_watchlist_not_enabled(self):
|
||||
self.config_and_init(watchlist_enabled=False)
|
||||
plugin = fake.FakeWatchListPlugin()
|
||||
@ -79,7 +80,6 @@ class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
|
||||
|
||||
|
||||
class TestNotifySeenPlugin(TestWatchListPlugin):
|
||||
|
||||
def test_disabled(self):
|
||||
self.config_and_init(watchlist_enabled=False)
|
||||
plugin = notify_plugin.NotifySeenPlugin()
|
||||
@ -128,7 +128,9 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
|
||||
|
||||
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
|
||||
@mock.patch("aprsd.packets.WatchList.is_old")
|
||||
def test_callsign_in_watchlist_old_same_alert_callsign(self, mock_is_old, mock_factory):
|
||||
def test_callsign_in_watchlist_old_same_alert_callsign(
|
||||
self, mock_is_old, mock_factory
|
||||
):
|
||||
client.client_factory = mock_factory
|
||||
mock_is_old.return_value = True
|
||||
self.config_and_init(
|
||||
|
@ -1,19 +1,17 @@
|
||||
from unittest import mock
|
||||
|
||||
from oslo_config import cfg
|
||||
import pytz
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.plugins import time as time_plugin
|
||||
from aprsd.utils import fuzzy
|
||||
|
||||
from .. import fake, test_plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class TestTimePlugins(test_plugin.TestPlugin):
|
||||
|
||||
@mock.patch("aprsd.plugins.time.TimePlugin._get_local_tz")
|
||||
@mock.patch("aprsd.plugins.time.TimePlugin._get_utcnow")
|
||||
def test_time(self, mock_utcnow, mock_localtz):
|
||||
|
@ -7,12 +7,10 @@ from aprsd.plugins import version as version_plugin
|
||||
|
||||
from .. import fake, test_plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class TestVersionPlugin(test_plugin.TestPlugin):
|
||||
|
||||
@mock.patch("aprsd.stats.app.APRSDStats.uptime")
|
||||
def test_version(self, mock_stats):
|
||||
mock_stats.return_value = "00:00:00"
|
||||
|
@ -7,12 +7,10 @@ from aprsd.plugins import weather as weather_plugin
|
||||
|
||||
from .. import fake, test_plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class TestUSWeatherPlugin(test_plugin.TestPlugin):
|
||||
|
||||
def test_not_enabled_missing_aprs_fi_key(self):
|
||||
# When the aprs.fi api key isn't set, then
|
||||
# the LocationPlugin will be disabled.
|
||||
@ -108,7 +106,6 @@ class TestUSWeatherPlugin(test_plugin.TestPlugin):
|
||||
|
||||
|
||||
class TestUSMetarPlugin(test_plugin.TestPlugin):
|
||||
|
||||
def test_not_enabled_missing_aprs_fi_key(self):
|
||||
# When the aprs.fi api key isn't set, then
|
||||
# the LocationPlugin will be disabled.
|
||||
@ -161,9 +158,9 @@ class TestUSMetarPlugin(test_plugin.TestPlugin):
|
||||
|
||||
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
|
||||
def test_airport_works(self, mock_metar):
|
||||
|
||||
class Response:
|
||||
text = '{"properties": {"rawMessage": "BOGUSMETAR"}}'
|
||||
|
||||
mock_metar.return_value = Response()
|
||||
|
||||
CONF.aprs_fi.apiKey = "abc123"
|
||||
|
@ -11,7 +11,6 @@ from . import fake
|
||||
|
||||
|
||||
class TestPacketBase(unittest.TestCase):
|
||||
|
||||
def _fake_dict(
|
||||
self,
|
||||
from_call=fake.FAKE_FROM_CALLSIGN,
|
||||
@ -79,7 +78,6 @@ class TestPacketBase(unittest.TestCase):
|
||||
|
||||
@mock.patch("aprsd.packets.core.GPSPacket._build_time_zulu")
|
||||
def test_packet_format_rain_1h(self, mock_time_zulu):
|
||||
|
||||
mock_time_zulu.return_value = "221450"
|
||||
|
||||
wx = packets.WeatherPacket(
|
||||
@ -106,7 +104,9 @@ class TestPacketBase(unittest.TestCase):
|
||||
|
||||
def test_beacon_factory(self):
|
||||
"""Test to ensure a beacon packet is created."""
|
||||
packet_raw = "WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat"
|
||||
packet_raw = (
|
||||
"WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat"
|
||||
)
|
||||
packet_dict = aprslib.parse(packet_raw)
|
||||
packet = packets.factory(packet_dict)
|
||||
self.assertIsInstance(packet, packets.BeaconPacket)
|
||||
@ -162,7 +162,9 @@ class TestPacketBase(unittest.TestCase):
|
||||
|
||||
# Packet with telemetry and DAO
|
||||
# http://www.aprs.org/datum.txt
|
||||
packet_raw = 'KD9YIL>T0PX9W,WIDE1-1,WIDE2-1,qAO,NU9R-10:`sB,l#P>/\'"6+}|#*%U\'a|!whl!|3'
|
||||
packet_raw = (
|
||||
"KD9YIL>T0PX9W,WIDE1-1,WIDE2-1,qAO,NU9R-10:`sB,l#P>/'\"6+}|#*%U'a|!whl!|3"
|
||||
)
|
||||
packet_dict = aprslib.parse(packet_raw)
|
||||
packet = packets.factory(packet_dict)
|
||||
self.assertIsInstance(packet, packets.MicEPacket)
|
||||
@ -175,7 +177,9 @@ class TestPacketBase(unittest.TestCase):
|
||||
msgNo=123,
|
||||
)
|
||||
|
||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123"
|
||||
expected = (
|
||||
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123"
|
||||
)
|
||||
self.assertEqual(expected, str(ack))
|
||||
|
||||
def test_reject_format(self):
|
||||
@ -186,7 +190,9 @@ class TestPacketBase(unittest.TestCase):
|
||||
msgNo=123,
|
||||
)
|
||||
|
||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123"
|
||||
expected = (
|
||||
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123"
|
||||
)
|
||||
self.assertEqual(expected, str(reject))
|
||||
|
||||
def test_beacon_format(self):
|
||||
@ -240,7 +246,9 @@ class TestPacketBase(unittest.TestCase):
|
||||
bid=0,
|
||||
)
|
||||
|
||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}"
|
||||
expected = (
|
||||
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}"
|
||||
)
|
||||
self.assertEqual(expected, str(packet))
|
||||
|
||||
# bulletin id = 1
|
||||
|
@ -3,20 +3,20 @@ from unittest import mock
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import conf # noqa: F401
|
||||
from aprsd import packets
|
||||
from aprsd import (
|
||||
conf, # noqa: F401
|
||||
packets,
|
||||
plugins,
|
||||
)
|
||||
from aprsd import plugin as aprsd_plugin
|
||||
from aprsd import plugins
|
||||
from aprsd.packets import core
|
||||
|
||||
from . import fake
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class TestPluginManager(unittest.TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
||||
self.config_and_init()
|
||||
@ -82,7 +82,6 @@ class TestPluginManager(unittest.TestCase):
|
||||
|
||||
|
||||
class TestPlugin(unittest.TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
||||
self.ack = 1
|
||||
@ -103,7 +102,6 @@ class TestPlugin(unittest.TestCase):
|
||||
|
||||
|
||||
class TestPluginBase(TestPlugin):
|
||||
|
||||
@mock.patch.object(fake.FakeBaseNoThreadsPlugin, "process")
|
||||
def test_base_plugin_no_threads(self, mock_process):
|
||||
p = fake.FakeBaseNoThreadsPlugin()
|
||||
|
30
tox.ini
30
tox.ini
@ -2,7 +2,7 @@
|
||||
minversion = 2.9.0
|
||||
skipdist = True
|
||||
skip_missing_interpreters = true
|
||||
envlist = pep8,py{310}
|
||||
envlist = pep8,py{310,311}
|
||||
#requires = tox-pipenv
|
||||
# pip==22.0.4
|
||||
# pip-tools==5.4.0
|
||||
@ -21,10 +21,9 @@ setenv =
|
||||
usedevelop = True
|
||||
install_command = pip install {opts} {packages}
|
||||
extras = tests
|
||||
deps = coverage: coverage
|
||||
-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/requirements-dev.txt
|
||||
pytestmain: git+https://github.com/pytest-dev/pytest.git@main
|
||||
deps =
|
||||
pytest-cov
|
||||
pytest
|
||||
commands =
|
||||
pytest -v --cov-report term-missing --cov=aprsd {posargs}
|
||||
coverage: coverage report -m
|
||||
@ -43,6 +42,8 @@ commands =
|
||||
sphinx-build -a -W . _build
|
||||
|
||||
[testenv:pep8]
|
||||
deps =
|
||||
flake8
|
||||
commands =
|
||||
flake8 {posargs} aprsd tests
|
||||
|
||||
@ -57,9 +58,9 @@ passenv = FAST8_NUM_COMMITS
|
||||
[testenv:lint]
|
||||
skip_install = true
|
||||
deps =
|
||||
-r{toxinidir}/requirements-dev.txt
|
||||
ruff
|
||||
commands =
|
||||
flake8 aprsd tests
|
||||
ruff check aprsd tests
|
||||
|
||||
[flake8]
|
||||
max-line-length = 99
|
||||
@ -74,25 +75,26 @@ exclude = .venv,.git,.tox,dist,doc,.ropeproject
|
||||
# This section is not needed if not using GitHub Actions for CI.
|
||||
[gh-actions]
|
||||
python =
|
||||
3.6: py36, pep8
|
||||
3.7: py38, pep8
|
||||
3.8: py38, pep8
|
||||
3.9: py39, pep8, type-check, docs
|
||||
3.10: py39, pep8, type-check, docs
|
||||
3.11: py311, pep8, type-check, docs
|
||||
|
||||
[testenv:fmt]
|
||||
# This will reformat your code to comply with pep8
|
||||
# and standard formatting
|
||||
skip_install = true
|
||||
deps =
|
||||
-r{toxinidir}/requirements-dev.txt
|
||||
ruff
|
||||
commands =
|
||||
gray aprsd tests
|
||||
ruff format aprsd tests
|
||||
|
||||
[testenv:type-check]
|
||||
skip_install = true
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/requirements-dev.txt
|
||||
deps =
|
||||
mypy
|
||||
types-pytz
|
||||
types-requests
|
||||
types-tzlocal
|
||||
commands =
|
||||
mypy --ignore-missing-imports --install-types aprsd
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user