diff --git a/aprsd/client.py b/aprsd/client.py
index 2561692..1722a83 100644
--- a/aprsd/client.py
+++ b/aprsd/client.py
@@ -1,15 +1,18 @@
import abc
+import datetime
import logging
+import threading
import time
import aprslib
from aprslib.exceptions import LoginError
from oslo_config import cfg
+import wrapt
from aprsd import exception
from aprsd.clients import aprsis, fake, kiss
from aprsd.packets import core, packet_list
-from aprsd.utils import trace
+from aprsd.utils import singleton, trace
CONF = cfg.CONF
@@ -25,6 +28,34 @@ TRANSPORT_FAKE = "fake"
factory = None
+@singleton
+class APRSClientStats:
+
+ lock = threading.Lock()
+
+ @wrapt.synchronized(lock)
+ def stats(self, serializable=False):
+ client = factory.create()
+ stats = {
+ "transport": client.transport(),
+ "filter": client.filter,
+ "connected": client.connected,
+ }
+
+ if client.transport() == TRANSPORT_APRSIS:
+ stats["server_string"] = client.client.server_string
+ keepalive = client.client.aprsd_keepalive
+ if serializable:
+ keepalive = keepalive.isoformat()
+ stats["server_keepalive"] = keepalive
+ elif client.transport() == TRANSPORT_TCPKISS:
+ stats["host"] = CONF.kiss_tcp.host
+ stats["port"] = CONF.kiss_tcp.port
+ elif client.transport() == TRANSPORT_SERIALKISS:
+ stats["device"] = CONF.kiss_serial.device
+ return stats
+
+
class Client:
"""Singleton client class that constructs the aprslib connection."""
@@ -32,8 +63,8 @@ class Client:
_client = None
connected = False
- server_string = None
filter = None
+ lock = threading.Lock()
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
@@ -43,6 +74,10 @@ class Client:
cls._instance._create_client()
return cls._instance
+ @abc.abstractmethod
+ def stats(self) -> dict:
+ pass
+
def set_filter(self, filter):
self.filter = filter
if self._client:
@@ -69,9 +104,12 @@ class Client:
packet_list.PacketList().tx(packet)
self.client.send(packet)
+ @wrapt.synchronized(lock)
def reset(self):
"""Call this to force a rebuild/reconnect."""
+ LOG.info("Resetting client connection.")
if self._client:
+ self._client.close()
del self._client
self._create_client()
else:
@@ -102,11 +140,34 @@ class Client:
def consumer(self, callback, blocking=False, immortal=False, raw=False):
pass
+ @abc.abstractmethod
+ def is_alive(self):
+ pass
+
+ @abc.abstractmethod
+ def close(self):
+ pass
+
class APRSISClient(Client):
_client = None
+ def __init__(self):
+ max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
+ self.max_delta = datetime.timedelta(**max_timeout)
+
+ def stats(self) -> dict:
+ stats = {}
+ if self.is_configured():
+ stats = {
+ "server_string": self._client.server_string,
+ "sever_keepalive": self._client.aprsd_keepalive,
+ "filter": self.filter,
+ }
+
+ return stats
+
@staticmethod
def is_enabled():
# Defaults to True if the enabled flag is non existent
@@ -138,14 +199,24 @@ class APRSISClient(Client):
return True
return True
+ def _is_stale_connection(self):
+ delta = datetime.datetime.now() - self._client.aprsd_keepalive
+ if delta > self.max_delta:
+ LOG.error(f"Connection is stale, last heard {delta} ago.")
+ return True
+
def is_alive(self):
if self._client:
- LOG.warning(f"APRS_CLIENT {self._client} alive? {self._client.is_alive()}")
- return self._client.is_alive()
+ return self._client.is_alive() and not self._is_stale_connection()
else:
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
return False
+ def close(self):
+ if self._client:
+ self._client.stop()
+ self._client.close()
+
@staticmethod
def transport():
return TRANSPORT_APRSIS
@@ -159,25 +230,25 @@ class APRSISClient(Client):
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
- connected = False
+ self.connected = False
backoff = 1
aprs_client = None
- while not connected:
+ while not self.connected:
try:
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
# Force the log to be the same
aprs_client.logger = LOG
aprs_client.connect()
- connected = True
+ self.connected = True
backoff = 1
except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'")
- connected = False
+ self.connected = False
time.sleep(backoff)
except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
- connected = False
+ self.connected = False
time.sleep(backoff)
# Don't allow the backoff to go to inifinity.
if backoff > 5:
@@ -190,17 +261,24 @@ class APRSISClient(Client):
return aprs_client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
- if self.is_alive():
- self._client.consumer(
- callback, blocking=blocking,
- immortal=immortal, raw=raw,
- )
+ self._client.consumer(
+ callback, blocking=blocking,
+ immortal=immortal, raw=raw,
+ )
class KISSClient(Client):
_client = None
+ def stats(self) -> dict:
+ stats = {}
+ if self.is_configured():
+ return {
+ "transport": self.transport(),
+ }
+ return stats
+
@staticmethod
def is_enabled():
"""Return if tcp or serial KISS is enabled."""
@@ -239,6 +317,10 @@ class KISSClient(Client):
else:
return False
+ def close(self):
+ if self._client:
+ self._client.stop()
+
@staticmethod
def transport():
if CONF.kiss_serial.enabled:
@@ -268,6 +350,7 @@ class KISSClient(Client):
def setup_connection(self):
self._client = kiss.KISS3Client()
+ self.connected = True
return self._client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
@@ -276,6 +359,9 @@ class KISSClient(Client):
class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass):
+ def stats(self) -> dict:
+ return {}
+
@staticmethod
def is_enabled():
if CONF.fake_client.enabled:
@@ -289,7 +375,11 @@ class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass):
def is_alive(self):
return True
+ def close(self):
+ pass
+
def setup_connection(self):
+ self.connected = True
return fake.APRSDFakeClient()
@staticmethod
@@ -329,7 +419,6 @@ class ClientFactory:
key = TRANSPORT_FAKE
builder = self._builders.get(key)
- LOG.debug(f"ClientFactory Creating client of type '{key}'")
if not builder:
raise ValueError(key)
return builder()
diff --git a/aprsd/clients/aprsis.py b/aprsd/clients/aprsis.py
index 07052ac..06b2d6b 100644
--- a/aprsd/clients/aprsis.py
+++ b/aprsd/clients/aprsis.py
@@ -1,3 +1,4 @@
+import datetime
import logging
import select
import threading
@@ -11,7 +12,6 @@ from aprslib.exceptions import (
import wrapt
import aprsd
-from aprsd import stats
from aprsd.packets import core
@@ -24,6 +24,9 @@ class Aprsdis(aprslib.IS):
# flag to tell us to stop
thread_stop = False
+ # date for last time we heard from the server
+ aprsd_keepalive = datetime.datetime.now()
+
# timeout in seconds
select_timeout = 1
lock = threading.Lock()
@@ -142,7 +145,6 @@ class Aprsdis(aprslib.IS):
self.logger.info(f"Connected to {server_string}")
self.server_string = server_string
- stats.APRSDStats().set_aprsis_server(server_string)
except LoginError as e:
self.logger.error(str(e))
@@ -176,13 +178,14 @@ class Aprsdis(aprslib.IS):
try:
for line in self._socket_readlines(blocking):
if line[0:1] != b"#":
+ self.aprsd_keepalive = datetime.datetime.now()
if raw:
callback(line)
else:
callback(self._parse(line))
else:
self.logger.debug("Server: %s", line.decode("utf8"))
- stats.APRSDStats().set_aprsis_keepalive()
+ self.aprsd_keepalive = datetime.datetime.now()
except ParseError as exp:
self.logger.log(
11,
diff --git a/aprsd/cmds/fetch_stats.py b/aprsd/cmds/fetch_stats.py
index c0a4d42..386ff41 100644
--- a/aprsd/cmds/fetch_stats.py
+++ b/aprsd/cmds/fetch_stats.py
@@ -1,10 +1,9 @@
-# Fetch active stats from a remote running instance of aprsd server
-# This uses the RPC server to fetch the stats from the remote server.
-
+# Fetch active stats from a remote running instance of aprsd admin web interface.
import logging
import click
from oslo_config import cfg
+import requests
from rich.console import Console
from rich.table import Table
@@ -12,7 +11,6 @@ from rich.table import Table
import aprsd
from aprsd import cli_helper
from aprsd.main import cli
-from aprsd.rpc import client as rpc_client
# setup the global logger
@@ -26,87 +24,80 @@ CONF = cfg.CONF
@click.option(
"--host", type=str,
default=None,
- help="IP address of the remote aprsd server to fetch stats from.",
+ help="IP address of the remote aprsd admin web ui fetch stats from.",
)
@click.option(
"--port", type=int,
default=None,
- help="Port of the remote aprsd server rpc port to fetch stats from.",
-)
-@click.option(
- "--magic-word", type=str,
- default=None,
- help="Magic word of the remote aprsd server rpc port to fetch stats from.",
+ help="Port of the remote aprsd web admin interface to fetch stats from.",
)
@click.pass_context
@cli_helper.process_standard_options
-def fetch_stats(ctx, host, port, magic_word):
- """Fetch stats from a remote running instance of aprsd server."""
- LOG.info(f"APRSD Fetch-Stats started version: {aprsd.__version__}")
+def fetch_stats(ctx, host, port):
+ """Fetch stats from a APRSD admin web interface."""
+ console = Console()
+ console.print(f"APRSD Fetch-Stats started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG)
if not host:
- host = CONF.rpc_settings.ip
+ host = CONF.admin.web_ip
if not port:
- port = CONF.rpc_settings.port
- if not magic_word:
- magic_word = CONF.rpc_settings.magic_word
+ port = CONF.admin.web_port
- msg = f"Fetching stats from {host}:{port} with magic word '{magic_word}'"
- console = Console()
+ msg = f"Fetching stats from {host}:{port}"
console.print(msg)
with console.status(msg):
- client = rpc_client.RPCClient(host, port, magic_word)
- stats = client.get_stats_dict()
- if stats:
- console.print_json(data=stats)
- else:
- LOG.error(f"Failed to fetch stats via RPC aprsd server at {host}:{port}")
+ response = requests.get(f"http://{host}:{port}/stats", timeout=120)
+ if not response:
+ console.print(
+ f"Failed to fetch stats from {host}:{port}?",
+ style="bold red",
+ )
return
+
+ stats = response.json()
+ if not stats:
+ console.print(
+ f"Failed to fetch stats from aprsd admin ui at {host}:{port}",
+ style="bold red",
+ )
+ return
+
aprsd_title = (
"APRSD "
- f"[bold cyan]v{stats['aprsd']['version']}[/] "
- f"Callsign [bold green]{stats['aprsd']['callsign']}[/] "
- f"Uptime [bold yellow]{stats['aprsd']['uptime']}[/]"
+ f"[bold cyan]v{stats['APRSDStats']['version']}[/] "
+ f"Callsign [bold green]{stats['APRSDStats']['callsign']}[/] "
+ f"Uptime [bold yellow]{stats['APRSDStats']['uptime']}[/]"
)
- console.rule(f"Stats from {host}:{port} with magic word '{magic_word}'")
+ console.rule(f"Stats from {host}:{port}")
console.print("\n\n")
console.rule(aprsd_title)
# Show the connection to APRS
# It can be a connection to an APRS-IS server or a local TNC via KISS or KISSTCP
if "aprs-is" in stats:
- title = f"APRS-IS Connection {stats['aprs-is']['server']}"
+ title = f"APRS-IS Connection {stats['APRSClientStats']['server_string']}"
table = Table(title=title)
table.add_column("Key")
table.add_column("Value")
- for key, value in stats["aprs-is"].items():
+ for key, value in stats["APRSClientStats"].items():
table.add_row(key, value)
console.print(table)
threads_table = Table(title="Threads")
threads_table.add_column("Name")
threads_table.add_column("Alive?")
- for name, alive in stats["aprsd"]["threads"].items():
+ for name, alive in stats["APRSDThreadList"].items():
threads_table.add_row(name, str(alive))
console.print(threads_table)
- msgs_table = Table(title="Messages")
- msgs_table.add_column("Key")
- msgs_table.add_column("Value")
- for key, value in stats["messages"].items():
- msgs_table.add_row(key, str(value))
-
- console.print(msgs_table)
-
packet_totals = Table(title="Packet Totals")
packet_totals.add_column("Key")
packet_totals.add_column("Value")
- packet_totals.add_row("Total Received", str(stats["packets"]["total_received"]))
- packet_totals.add_row("Total Sent", str(stats["packets"]["total_sent"]))
- packet_totals.add_row("Total Tracked", str(stats["packets"]["total_tracked"]))
+ packet_totals.add_row("Total Received", str(stats["PacketList"]["rx"]))
+ packet_totals.add_row("Total Sent", str(stats["PacketList"]["tx"]))
console.print(packet_totals)
# Show each of the packet types
@@ -114,47 +105,52 @@ def fetch_stats(ctx, host, port, magic_word):
packets_table.add_column("Packet Type")
packets_table.add_column("TX")
packets_table.add_column("RX")
- for key, value in stats["packets"]["by_type"].items():
+ for key, value in stats["PacketList"]["packets"].items():
packets_table.add_row(key, str(value["tx"]), str(value["rx"]))
console.print(packets_table)
if "plugins" in stats:
- count = len(stats["plugins"])
+ count = len(stats["PluginManager"])
plugins_table = Table(title=f"Plugins ({count})")
plugins_table.add_column("Plugin")
plugins_table.add_column("Enabled")
plugins_table.add_column("Version")
plugins_table.add_column("TX")
plugins_table.add_column("RX")
- for key, value in stats["plugins"].items():
+ plugins = stats["PluginManager"]
+ for key, value in plugins.items():
plugins_table.add_row(
key,
- str(stats["plugins"][key]["enabled"]),
- stats["plugins"][key]["version"],
- str(stats["plugins"][key]["tx"]),
- str(stats["plugins"][key]["rx"]),
+ str(plugins[key]["enabled"]),
+ plugins[key]["version"],
+ str(plugins[key]["tx"]),
+ str(plugins[key]["rx"]),
)
console.print(plugins_table)
- if "seen_list" in stats["aprsd"]:
- count = len(stats["aprsd"]["seen_list"])
+ seen_list = stats.get("SeenList")
+
+ if seen_list:
+ count = len(seen_list)
seen_table = Table(title=f"Seen List ({count})")
seen_table.add_column("Callsign")
seen_table.add_column("Message Count")
seen_table.add_column("Last Heard")
- for key, value in stats["aprsd"]["seen_list"].items():
+ for key, value in seen_list.items():
seen_table.add_row(key, str(value["count"]), value["last"])
console.print(seen_table)
- if "watch_list" in stats["aprsd"]:
- count = len(stats["aprsd"]["watch_list"])
+ watch_list = stats.get("WatchList")
+
+ if watch_list:
+ count = len(watch_list)
watch_table = Table(title=f"Watch List ({count})")
watch_table.add_column("Callsign")
watch_table.add_column("Last Heard")
- for key, value in stats["aprsd"]["watch_list"].items():
+ for key, value in watch_list.items():
watch_table.add_row(key, value["last"])
console.print(watch_table)
diff --git a/aprsd/cmds/healthcheck.py b/aprsd/cmds/healthcheck.py
index c8c9a38..6914e8f 100644
--- a/aprsd/cmds/healthcheck.py
+++ b/aprsd/cmds/healthcheck.py
@@ -13,11 +13,11 @@ from oslo_config import cfg
from rich.console import Console
import aprsd
-from aprsd import cli_helper, utils
+from aprsd import cli_helper
from aprsd import conf # noqa
# local imports here
from aprsd.main import cli
-from aprsd.rpc import client as aprsd_rpc_client
+from aprsd.threads import stats as stats_threads
# setup the global logger
@@ -39,46 +39,48 @@ console = Console()
@cli_helper.process_standard_options
def healthcheck(ctx, timeout):
"""Check the health of the running aprsd server."""
- console.log(f"APRSD HealthCheck version: {aprsd.__version__}")
- if not CONF.rpc_settings.enabled:
- LOG.error("Must enable rpc_settings.enabled to use healthcheck")
- sys.exit(-1)
- if not CONF.rpc_settings.ip:
- LOG.error("Must enable rpc_settings.ip to use healthcheck")
- sys.exit(-1)
- if not CONF.rpc_settings.magic_word:
- LOG.error("Must enable rpc_settings.magic_word to use healthcheck")
- sys.exit(-1)
+ ver_str = f"APRSD HealthCheck version: {aprsd.__version__}"
+ console.log(ver_str)
- with console.status(f"APRSD HealthCheck version: {aprsd.__version__}") as status:
+ with console.status(ver_str):
try:
- status.update(f"Contacting APRSD via RPC {CONF.rpc_settings.ip}")
- stats = aprsd_rpc_client.RPCClient().get_stats_dict()
+ stats_obj = stats_threads.StatsStore()
+ stats_obj.load()
+ stats = stats_obj.data
+ # console.print(stats)
except Exception as ex:
- console.log(f"Failed to fetch healthcheck : '{ex}'")
+ console.log(f"Failed to load stats: '{ex}'")
sys.exit(-1)
else:
+ now = datetime.datetime.now()
if not stats:
console.log("No stats from aprsd")
sys.exit(-1)
- email_thread_last_update = stats["email"]["thread_last_update"]
- if email_thread_last_update != "never":
- delta = utils.parse_delta_str(email_thread_last_update)
- d = datetime.timedelta(**delta)
+ email_stats = stats.get("EmailStats")
+ if email_stats:
+ email_thread_last_update = email_stats["last_check_time"]
+
+ if email_thread_last_update != "never":
+ d = now - email_thread_last_update
+ max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
+ max_delta = datetime.timedelta(**max_timeout)
+ if d > max_delta:
+ console.log(f"Email thread is very old! {d}")
+ sys.exit(-1)
+
+ client_stats = stats.get("APRSClientStats")
+ if not client_stats:
+ console.log("No APRSClientStats")
+ sys.exit(-1)
+ else:
+ aprsis_last_update = client_stats["server_keepalive"]
+ d = now - aprsis_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
- console.log(f"Email thread is very old! {d}")
+ LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
- aprsis_last_update = stats["aprs-is"]["last_update"]
- delta = utils.parse_delta_str(aprsis_last_update)
- d = datetime.timedelta(**delta)
- max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
- max_delta = datetime.timedelta(**max_timeout)
- if d > max_delta:
- LOG.error(f"APRS-IS last update is very old! {d}")
- sys.exit(-1)
-
+ console.log("OK")
sys.exit(0)
diff --git a/aprsd/cmds/list_plugins.py b/aprsd/cmds/list_plugins.py
index 357d725..5ff3f4d 100644
--- a/aprsd/cmds/list_plugins.py
+++ b/aprsd/cmds/list_plugins.py
@@ -21,7 +21,7 @@ from aprsd import cli_helper
from aprsd import plugin as aprsd_plugin
from aprsd.main import cli
from aprsd.plugins import (
- email, fortune, location, notify, ping, query, time, version, weather,
+ email, fortune, location, notify, ping, time, version, weather,
)
@@ -122,7 +122,7 @@ def get_installed_extensions():
def show_built_in_plugins(console):
- modules = [email, fortune, location, notify, ping, query, time, version, weather]
+ modules = [email, fortune, location, notify, ping, time, version, weather]
plugins = []
for module in modules:
diff --git a/aprsd/cmds/listen.py b/aprsd/cmds/listen.py
index 93564ac..b30eee6 100644
--- a/aprsd/cmds/listen.py
+++ b/aprsd/cmds/listen.py
@@ -15,10 +15,10 @@ from rich.console import Console
# local imports here
import aprsd
-from aprsd import cli_helper, client, packets, plugin, stats, threads
+from aprsd import cli_helper, client, packets, plugin, threads
from aprsd.main import cli
from aprsd.packets import log as packet_log
-from aprsd.rpc import server as rpc_server
+from aprsd.stats import collector
from aprsd.threads import rx
@@ -38,7 +38,7 @@ def signal_handler(sig, frame):
),
)
time.sleep(5)
- LOG.info(stats.APRSDStats())
+ LOG.info(collector.Collector().collect())
class APRSDListenThread(rx.APRSDRXThread):
@@ -169,6 +169,7 @@ def listen(
LOG.info(f"APRSD Listen Started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG)
+ collector.Collector()
# Try and load saved MsgTrack list
LOG.debug("Loading saved MsgTrack object.")
@@ -192,10 +193,6 @@ def listen(
keepalive = threads.KeepAliveThread()
# keepalive.start()
- if CONF.rpc_settings.enabled:
- rpc = rpc_server.APRSDRPCThread()
- rpc.start()
-
pm = None
pm = plugin.PluginManager()
if load_plugins:
@@ -206,6 +203,8 @@ def listen(
"Not Loading any plugins use --load-plugins to load what's "
"defined in the config file.",
)
+ stats_thread = threads.APRSDStatsStoreThread()
+ stats_thread.start()
LOG.debug("Create APRSDListenThread")
listen_thread = APRSDListenThread(
@@ -221,6 +220,4 @@ def listen(
keepalive.join()
LOG.debug("listen_thread Join")
listen_thread.join()
-
- if CONF.rpc_settings.enabled:
- rpc.join()
+ stats_thread.join()
diff --git a/aprsd/cmds/send_message.py b/aprsd/cmds/send_message.py
index 1da30a2..84a503e 100644
--- a/aprsd/cmds/send_message.py
+++ b/aprsd/cmds/send_message.py
@@ -76,7 +76,6 @@ def send_message(
aprs_login = CONF.aprs_network.login
if not aprs_password:
- LOG.warning(CONF.aprs_network.password)
if not CONF.aprs_network.password:
click.echo("Must set --aprs-password or APRS_PASSWORD")
ctx.exit(-1)
diff --git a/aprsd/cmds/server.py b/aprsd/cmds/server.py
index 89a6c74..4edaff3 100644
--- a/aprsd/cmds/server.py
+++ b/aprsd/cmds/server.py
@@ -10,8 +10,9 @@ from aprsd import cli_helper, client
from aprsd import main as aprsd_main
from aprsd import packets, plugin, threads, utils
from aprsd.main import cli
-from aprsd.rpc import server as rpc_server
-from aprsd.threads import registry, rx, tx
+from aprsd.threads import keep_alive, log_monitor, registry, rx
+from aprsd.threads import stats as stats_thread
+from aprsd.threads import tx
CONF = cfg.CONF
@@ -47,6 +48,14 @@ def server(ctx, flush):
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
+ if not client.factory.is_client_enabled():
+ LOG.error("No Clients are enabled in config.")
+ sys.exit(-1)
+
+ # Creates the client object
+ LOG.info("Creating client connection")
+ aprs_client = client.factory.create()
+ LOG.info(aprs_client)
# Create the initial PM singleton and Register plugins
# We register plugins first here so we can register each
@@ -87,16 +96,21 @@ def server(ctx, flush):
packets.PacketTrack().flush()
packets.WatchList().flush()
packets.SeenList().flush()
+ packets.PacketList().flush()
else:
# Try and load saved MsgTrack list
LOG.debug("Loading saved MsgTrack object.")
packets.PacketTrack().load()
packets.WatchList().load()
packets.SeenList().load()
+ packets.PacketList().load()
- keepalive = threads.KeepAliveThread()
+ keepalive = keep_alive.KeepAliveThread()
keepalive.start()
+ stats_store_thread = stats_thread.APRSDStatsStoreThread()
+ stats_store_thread.start()
+
rx_thread = rx.APRSDPluginRXThread(
packet_queue=threads.packet_queue,
)
@@ -106,7 +120,6 @@ def server(ctx, flush):
rx_thread.start()
process_thread.start()
- packets.PacketTrack().restart()
if CONF.enable_beacon:
LOG.info("Beacon Enabled. Starting Beacon thread.")
bcn_thread = tx.BeaconSendThread()
@@ -117,11 +130,9 @@ def server(ctx, flush):
registry_thread = registry.APRSRegistryThread()
registry_thread.start()
- if CONF.rpc_settings.enabled:
- rpc = rpc_server.APRSDRPCThread()
- rpc.start()
- log_monitor = threads.log_monitor.LogMonitorThread()
- log_monitor.start()
+ if CONF.admin.web_enabled:
+ log_monitor_thread = log_monitor.LogMonitorThread()
+ log_monitor_thread.start()
rx_thread.join()
process_thread.join()
diff --git a/aprsd/cmds/webchat.py b/aprsd/cmds/webchat.py
index 0a3f68e..c683e7b 100644
--- a/aprsd/cmds/webchat.py
+++ b/aprsd/cmds/webchat.py
@@ -23,7 +23,7 @@ from aprsd import (
)
from aprsd.main import cli
from aprsd.threads import aprsd as aprsd_threads
-from aprsd.threads import rx, tx
+from aprsd.threads import keep_alive, rx, tx
from aprsd.utils import trace
@@ -63,7 +63,7 @@ def signal_handler(sig, frame):
time.sleep(1.5)
# packets.WatchList().save()
# packets.SeenList().save()
- LOG.info(stats.APRSDStats())
+ LOG.info(stats.stats_collector.collect())
LOG.info("Telling flask to bail.")
signal.signal(signal.SIGTERM, sys.exit(0))
@@ -378,7 +378,7 @@ def _get_transport(stats):
transport = "aprs-is"
aprs_connection = (
"APRS-IS Server: "
- "{}".format(stats["stats"]["aprs-is"]["server"])
+ "{}".format(stats["APRSClientStats"]["server_string"])
)
elif client.KISSClient.is_enabled():
transport = client.KISSClient.transport()
@@ -414,12 +414,13 @@ def location(callsign):
@flask_app.route("/")
def index():
stats = _stats()
+ LOG.error(stats)
# For development
html_template = "index.html"
LOG.debug(f"Template {html_template}")
- transport, aprs_connection = _get_transport(stats)
+ transport, aprs_connection = _get_transport(stats["stats"])
LOG.debug(f"transport {transport} aprs_connection {aprs_connection}")
stats["transport"] = transport
@@ -454,27 +455,28 @@ def send_message_status():
def _stats():
- stats_obj = stats.APRSDStats()
now = datetime.datetime.now()
time_format = "%m-%d-%Y %H:%M:%S"
- stats_dict = stats_obj.stats()
+ stats_dict = stats.stats_collector.collect(serializable=True)
# Webchat doesnt need these
- if "watch_list" in stats_dict["aprsd"]:
- del stats_dict["aprsd"]["watch_list"]
- if "seen_list" in stats_dict["aprsd"]:
- del stats_dict["aprsd"]["seen_list"]
- if "threads" in stats_dict["aprsd"]:
- del stats_dict["aprsd"]["threads"]
- # del stats_dict["email"]
- # del stats_dict["plugins"]
- # del stats_dict["messages"]
+ if "WatchList" in stats_dict:
+ del stats_dict["WatchList"]
+ if "SeenList" in stats_dict:
+ del stats_dict["SeenList"]
+ if "APRSDThreadList" in stats_dict:
+ del stats_dict["APRSDThreadList"]
+ if "PacketList" in stats_dict:
+ del stats_dict["PacketList"]
+ if "EmailStats" in stats_dict:
+ del stats_dict["EmailStats"]
+ if "PluginManager" in stats_dict:
+ del stats_dict["PluginManager"]
result = {
"time": now.strftime(time_format),
"stats": stats_dict,
}
-
return result
@@ -544,7 +546,7 @@ class SendMessageNamespace(Namespace):
LOG.debug(f"Long {long}")
tx.send(
- packets.GPSPacket(
+ packets.BeaconPacket(
from_call=CONF.callsign,
to_call="APDW16",
latitude=lat,
@@ -642,7 +644,7 @@ def webchat(ctx, flush, port):
packets.WatchList()
packets.SeenList()
- keepalive = threads.KeepAliveThread()
+ keepalive = keep_alive.KeepAliveThread()
LOG.info("Start KeepAliveThread")
keepalive.start()
diff --git a/aprsd/conf/common.py b/aprsd/conf/common.py
index 6221d4f..2a119b5 100644
--- a/aprsd/conf/common.py
+++ b/aprsd/conf/common.py
@@ -15,10 +15,6 @@ watch_list_group = cfg.OptGroup(
name="watch_list",
title="Watch List settings",
)
-rpc_group = cfg.OptGroup(
- name="rpc_settings",
- title="RPC Settings for admin <--> web",
-)
webchat_group = cfg.OptGroup(
name="webchat",
title="Settings specific to the webchat command",
@@ -146,7 +142,7 @@ admin_opts = [
default=False,
help="Enable the Admin Web Interface",
),
- cfg.IPOpt(
+ cfg.StrOpt(
"web_ip",
default="0.0.0.0",
help="The ip address to listen on",
@@ -169,28 +165,6 @@ admin_opts = [
),
]
-rpc_opts = [
- cfg.BoolOpt(
- "enabled",
- default=True,
- help="Enable RPC calls",
- ),
- cfg.StrOpt(
- "ip",
- default="localhost",
- help="The ip address to listen on",
- ),
- cfg.PortOpt(
- "port",
- default=18861,
- help="The port to listen on",
- ),
- cfg.StrOpt(
- "magic_word",
- default=APRSD_DEFAULT_MAGIC_WORD,
- help="Magic word to authenticate requests between client/server",
- ),
-]
enabled_plugins_opts = [
cfg.ListOpt(
@@ -213,7 +187,7 @@ enabled_plugins_opts = [
]
webchat_opts = [
- cfg.IPOpt(
+ cfg.StrOpt(
"web_ip",
default="0.0.0.0",
help="The ip address to listen on",
@@ -281,8 +255,6 @@ def register_opts(config):
config.register_opts(admin_opts, group=admin_group)
config.register_group(watch_list_group)
config.register_opts(watch_list_opts, group=watch_list_group)
- config.register_group(rpc_group)
- config.register_opts(rpc_opts, group=rpc_group)
config.register_group(webchat_group)
config.register_opts(webchat_opts, group=webchat_group)
config.register_group(registry_group)
@@ -294,7 +266,6 @@ def list_opts():
"DEFAULT": (aprsd_opts + enabled_plugins_opts),
admin_group.name: admin_opts,
watch_list_group.name: watch_list_opts,
- rpc_group.name: rpc_opts,
webchat_group.name: webchat_opts,
registry_group.name: registry_opts,
}
diff --git a/aprsd/log/log.py b/aprsd/log/log.py
index fcdb620..2c34e12 100644
--- a/aprsd/log/log.py
+++ b/aprsd/log/log.py
@@ -36,7 +36,6 @@ class InterceptHandler(logging.Handler):
# to disable log to stdout, but still log to file
# use the --quiet option on the cmdln
def setup_logging(loglevel=None, quiet=False):
- print(f"setup_logging: loglevel={loglevel}, quiet={quiet}")
if not loglevel:
log_level = CONF.logging.log_level
else:
@@ -58,6 +57,8 @@ def setup_logging(loglevel=None, quiet=False):
webserver_list = [
"werkzeug",
"werkzeug._internal",
+ "socketio",
+ "urllib3.connectionpool",
]
# We don't really want to see the aprslib parsing debug output.
diff --git a/aprsd/main.py b/aprsd/main.py
index 26c228e..96d2249 100644
--- a/aprsd/main.py
+++ b/aprsd/main.py
@@ -35,7 +35,8 @@ from oslo_config import cfg, generator
# local imports here
import aprsd
-from aprsd import cli_helper, packets, stats, threads, utils
+from aprsd import cli_helper, packets, threads, utils
+from aprsd.stats import collector
# setup the global logger
@@ -44,7 +45,6 @@ CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
flask_enabled = False
-rpc_serv = None
def custom_startswith(string, incomplete):
@@ -96,7 +96,8 @@ def signal_handler(sig, frame):
packets.PacketTrack().save()
packets.WatchList().save()
packets.SeenList().save()
- LOG.info(stats.APRSDStats())
+ packets.PacketList().save()
+ LOG.info(collector.Collector().collect())
# signal.signal(signal.SIGTERM, sys.exit(0))
# sys.exit(0)
diff --git a/aprsd/packets/core.py b/aprsd/packets/core.py
index 1665e29..344c70a 100644
--- a/aprsd/packets/core.py
+++ b/aprsd/packets/core.py
@@ -109,14 +109,6 @@ class Packet:
path: List[str] = field(default_factory=list, compare=False, hash=False)
via: Optional[str] = field(default=None, compare=False, hash=False)
- @property
- def json(self):
- """get the json formated string.
-
- This is used soley by the rpc server to return json over the wire.
- """
- return self.to_json()
-
def get(self, key: str, default: Optional[str] = None):
"""Emulate a getter on a dict."""
if hasattr(self, key):
@@ -218,6 +210,11 @@ class BulletinPacket(Packet):
bid: Optional[str] = field(default="1")
message_text: Optional[str] = field(default=None)
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ return f"{self.from_call}:BLN{self.bid}"
+
@property
def human_info(self) -> str:
return f"BLN{self.bid} {self.message_text}"
@@ -385,6 +382,14 @@ class BeaconPacket(GPSPacket):
f"{self.payload}"
)
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ if self.raw_timestamp:
+ return f"{self.from_call}:{self.raw_timestamp}"
+ else:
+ return f"{self.from_call}:{self.human_info.replace(' ','')}"
+
@property
def human_info(self) -> str:
h_str = []
@@ -407,6 +412,11 @@ class MicEPacket(GPSPacket):
# 0 to 360
course: int = 0
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ return f"{self.from_call}:{self.human_info.replace(' ', '')}"
+
@property
def human_info(self) -> str:
h_info = super().human_info
@@ -428,6 +438,14 @@ class TelemetryPacket(GPSPacket):
# 0 to 360
course: int = 0
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ if self.raw_timestamp:
+ return f"{self.from_call}:{self.raw_timestamp}"
+ else:
+ return f"{self.from_call}:{self.human_info.replace(' ','')}"
+
@property
def human_info(self) -> str:
h_info = super().human_info
@@ -548,6 +566,14 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
raw = cls._translate(cls, kvs) # type: ignore
return super().from_dict(raw)
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ if self.raw_timestamp:
+ return f"{self.from_call}:{self.raw_timestamp}"
+ elif self.wx_raw_timestamp:
+ return f"{self.from_call}:{self.wx_raw_timestamp}"
+
@property
def human_info(self) -> str:
h_str = []
@@ -643,6 +669,11 @@ class ThirdPartyPacket(Packet, DataClassJsonMixin):
obj.subpacket = factory(obj.subpacket) # type: ignore
return obj
+ @property
+ def key(self) -> str:
+ """Build a key for finding this packet in a dict."""
+ return f"{self.from_call}:{self.subpacket.key}"
+
@property
def human_info(self) -> str:
sub_info = self.subpacket.human_info
@@ -772,8 +803,7 @@ def factory(raw_packet: dict[Any, Any]) -> type[Packet]:
if "latitude" in raw:
packet_class = GPSPacket
else:
- LOG.warning(f"Unknown packet type {packet_type}")
- LOG.warning(raw)
+ # LOG.warning(raw)
packet_class = UnknownPacket
raw.get("addresse", raw.get("to_call"))
diff --git a/aprsd/packets/packet_list.py b/aprsd/packets/packet_list.py
index 3d75a07..0813a49 100644
--- a/aprsd/packets/packet_list.py
+++ b/aprsd/packets/packet_list.py
@@ -6,26 +6,28 @@ import threading
from oslo_config import cfg
import wrapt
-from aprsd import stats
from aprsd.packets import seen_list
+from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
-class PacketList(MutableMapping):
+class PacketList(MutableMapping, objectstore.ObjectStoreMixin):
_instance = None
lock = threading.Lock()
_total_rx: int = 0
_total_tx: int = 0
- types = {}
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._maxlen = 100
- cls.d = OrderedDict()
+ cls.data = {
+ "types": {},
+ "packets": OrderedDict(),
+ }
return cls._instance
@wrapt.synchronized(lock)
@@ -34,11 +36,10 @@ class PacketList(MutableMapping):
self._total_rx += 1
self._add(packet)
ptype = packet.__class__.__name__
- if not ptype in self.types:
- self.types[ptype] = {"tx": 0, "rx": 0}
- self.types[ptype]["rx"] += 1
+ if not ptype in self.data["types"]:
+ self.data["types"][ptype] = {"tx": 0, "rx": 0}
+ self.data["types"][ptype]["rx"] += 1
seen_list.SeenList().update_seen(packet)
- stats.APRSDStats().rx(packet)
@wrapt.synchronized(lock)
def tx(self, packet):
@@ -46,18 +47,17 @@ class PacketList(MutableMapping):
self._total_tx += 1
self._add(packet)
ptype = packet.__class__.__name__
- if not ptype in self.types:
- self.types[ptype] = {"tx": 0, "rx": 0}
- self.types[ptype]["tx"] += 1
+ if not ptype in self.data["types"]:
+ self.data["types"][ptype] = {"tx": 0, "rx": 0}
+ self.data["types"][ptype]["tx"] += 1
seen_list.SeenList().update_seen(packet)
- stats.APRSDStats().tx(packet)
@wrapt.synchronized(lock)
def add(self, packet):
self._add(packet)
def _add(self, packet):
- self[packet.key] = packet
+ self.data["packets"][packet.key] = packet
def copy(self):
return self.d.copy()
@@ -72,23 +72,23 @@ class PacketList(MutableMapping):
def __getitem__(self, key):
# self.d.move_to_end(key)
- return self.d[key]
+ return self.data["packets"][key]
def __setitem__(self, key, value):
- if key in self.d:
- self.d.move_to_end(key)
- elif len(self.d) == self.maxlen:
- self.d.popitem(last=False)
- self.d[key] = value
+ if key in self.data["packets"]:
+ self.data["packets"].move_to_end(key)
+ elif len(self.data["packets"]) == self.maxlen:
+ self.data["packets"].popitem(last=False)
+ self.data["packets"][key] = value
def __delitem__(self, key):
- del self.d[key]
+ del self.data["packets"][key]
def __iter__(self):
- return self.d.__iter__()
+ return self.data["packets"].__iter__()
def __len__(self):
- return len(self.d)
+ return len(self.data["packets"])
@wrapt.synchronized(lock)
def total_rx(self):
@@ -97,3 +97,14 @@ class PacketList(MutableMapping):
@wrapt.synchronized(lock)
def total_tx(self):
return self._total_tx
+
+ def stats(self, serializable=False) -> dict:
+ stats = {
+ "total_tracked": self.total_tx() + self.total_rx(),
+ "rx": self.total_rx(),
+ "tx": self.total_tx(),
+ "types": self.data["types"],
+ "packets": self.data["packets"],
+ }
+
+ return stats
diff --git a/aprsd/packets/seen_list.py b/aprsd/packets/seen_list.py
index f917eb9..9b81831 100644
--- a/aprsd/packets/seen_list.py
+++ b/aprsd/packets/seen_list.py
@@ -26,6 +26,10 @@ class SeenList(objectstore.ObjectStoreMixin):
cls._instance.data = {}
return cls._instance
+ def stats(self, serializable=False):
+ """Return the stats for the PacketTrack class."""
+ return self.data
+
@wrapt.synchronized(lock)
def update_seen(self, packet):
callsign = None
@@ -39,5 +43,5 @@ class SeenList(objectstore.ObjectStoreMixin):
"last": None,
"count": 0,
}
- self.data[callsign]["last"] = str(datetime.datetime.now())
+ self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["count"] += 1
diff --git a/aprsd/packets/tracker.py b/aprsd/packets/tracker.py
index 1246dc1..1714557 100644
--- a/aprsd/packets/tracker.py
+++ b/aprsd/packets/tracker.py
@@ -4,7 +4,6 @@ import threading
from oslo_config import cfg
import wrapt
-from aprsd.threads import tx
from aprsd.utils import objectstore
@@ -58,6 +57,24 @@ class PacketTrack(objectstore.ObjectStoreMixin):
def values(self):
return self.data.values()
+ @wrapt.synchronized(lock)
+ def stats(self, serializable=False):
+ stats = {
+ "total_tracked": self.total_tracked,
+ }
+ pkts = {}
+ for key in self.data:
+ last_send_time = self.data[key].last_send_time
+ last_send_attempt = self.data[key]._last_send_attempt
+ pkts[key] = {
+ "last_send_time": last_send_time,
+ "last_send_attempt": last_send_attempt,
+ "retry_count": self.data[key].retry_count,
+ "message": self.data[key].raw,
+ }
+ stats["packets"] = pkts
+ return stats
+
@wrapt.synchronized(lock)
def __len__(self):
return len(self.data)
@@ -79,33 +96,3 @@ class PacketTrack(objectstore.ObjectStoreMixin):
del self.data[key]
except KeyError:
pass
-
- def restart(self):
- """Walk the list of messages and restart them if any."""
- for key in self.data.keys():
- pkt = self.data[key]
- if pkt._last_send_attempt < pkt.retry_count:
- tx.send(pkt)
-
- def _resend(self, packet):
- packet._last_send_attempt = 0
- tx.send(packet)
-
- def restart_delayed(self, count=None, most_recent=True):
- """Walk the list of delayed messages and restart them if any."""
- if not count:
- # Send all the delayed messages
- for key in self.data.keys():
- pkt = self.data[key]
- if pkt._last_send_attempt == pkt._retry_count:
- self._resend(pkt)
- else:
- # They want to resend delayed messages
- tmp = sorted(
- self.data.items(),
- reverse=most_recent,
- key=lambda x: x[1].last_send_time,
- )
- pkt_list = tmp[:count]
- for (_key, pkt) in pkt_list:
- self._resend(pkt)
diff --git a/aprsd/packets/watch_list.py b/aprsd/packets/watch_list.py
index dee0631..ad8f222 100644
--- a/aprsd/packets/watch_list.py
+++ b/aprsd/packets/watch_list.py
@@ -28,7 +28,7 @@ class WatchList(objectstore.ObjectStoreMixin):
return cls._instance
def __init__(self, config=None):
- ring_size = CONF.watch_list.packet_keep_count
+ CONF.watch_list.packet_keep_count
if CONF.watch_list.callsigns:
for callsign in CONF.watch_list.callsigns:
@@ -38,12 +38,22 @@ class WatchList(objectstore.ObjectStoreMixin):
# last time a message was seen by aprs-is. For now this
# is all we can do.
self.data[call] = {
- "last": datetime.datetime.now(),
- "packets": utils.RingBuffer(
- ring_size,
- ),
+ "last": None,
+ "packet": None,
}
+ @wrapt.synchronized(lock)
+ def stats(self, serializable=False) -> dict:
+ stats = {}
+ for callsign in self.data:
+ stats[callsign] = {
+ "last": self.data[callsign]["last"],
+ "packet": self.data[callsign]["packet"],
+ "age": self.age(callsign),
+ "old": self.is_old(callsign),
+ }
+ return stats
+
def is_enabled(self):
return CONF.watch_list.enabled
@@ -58,7 +68,7 @@ class WatchList(objectstore.ObjectStoreMixin):
callsign = packet.from_call
if self.callsign_in_watchlist(callsign):
self.data[callsign]["last"] = datetime.datetime.now()
- self.data[callsign]["packets"].append(packet)
+ self.data[callsign]["packet"] = packet
def last_seen(self, callsign):
if self.callsign_in_watchlist(callsign):
@@ -66,7 +76,11 @@ class WatchList(objectstore.ObjectStoreMixin):
def age(self, callsign):
now = datetime.datetime.now()
- return str(now - self.last_seen(callsign))
+ last_seen_time = self.last_seen(callsign)
+ if last_seen_time:
+ return str(now - last_seen_time)
+ else:
+ return None
def max_delta(self, seconds=None):
if not seconds:
@@ -83,14 +97,19 @@ class WatchList(objectstore.ObjectStoreMixin):
We put this here so any notification plugin can use this
same test.
"""
+ if not self.callsign_in_watchlist(callsign):
+ return False
+
age = self.age(callsign)
+ if age:
+ delta = utils.parse_delta_str(age)
+ d = datetime.timedelta(**delta)
- delta = utils.parse_delta_str(age)
- d = datetime.timedelta(**delta)
+ max_delta = self.max_delta(seconds=seconds)
- max_delta = self.max_delta(seconds=seconds)
-
- if d > max_delta:
- return True
+ if d > max_delta:
+ return True
+ else:
+ return False
else:
return False
diff --git a/aprsd/plugin.py b/aprsd/plugin.py
index f73e0c6..b923ee9 100644
--- a/aprsd/plugin.py
+++ b/aprsd/plugin.py
@@ -344,6 +344,28 @@ class PluginManager:
self._watchlist_pm = pluggy.PluginManager("aprsd")
self._watchlist_pm.add_hookspecs(APRSDPluginSpec)
+ def stats(self, serializable=False) -> dict:
+ """Collect and return stats for all plugins."""
+ def full_name_with_qualname(obj):
+ return "{}.{}".format(
+ obj.__class__.__module__,
+ obj.__class__.__qualname__,
+ )
+
+ plugin_stats = {}
+ plugins = self.get_plugins()
+ if plugins:
+
+ for p in plugins:
+ plugin_stats[full_name_with_qualname(p)] = {
+ "enabled": p.enabled,
+ "rx": p.rx_count,
+ "tx": p.tx_count,
+ "version": p.version,
+ }
+
+ return plugin_stats
+
def is_plugin(self, obj):
for c in inspect.getmro(obj):
if issubclass(c, APRSDPluginBase):
@@ -369,7 +391,9 @@ class PluginManager:
try:
module_name, class_name = module_class_string.rsplit(".", 1)
module = importlib.import_module(module_name)
- module = importlib.reload(module)
+ # Commented out because the email thread starts in a different context
+ # and hence gives a different singleton for the EmailStats
+ # module = importlib.reload(module)
except Exception as ex:
if not module_name:
LOG.error(f"Failed to load Plugin {module_class_string}")
diff --git a/aprsd/plugins/email.py b/aprsd/plugins/email.py
index f6f273a..12657a1 100644
--- a/aprsd/plugins/email.py
+++ b/aprsd/plugins/email.py
@@ -11,7 +11,7 @@ import time
import imapclient
from oslo_config import cfg
-from aprsd import packets, plugin, stats, threads
+from aprsd import packets, plugin, threads, utils
from aprsd.threads import tx
from aprsd.utils import trace
@@ -60,6 +60,38 @@ class EmailInfo:
self._delay = val
+@utils.singleton
+class EmailStats:
+ """Singleton object to store stats related to email."""
+ _instance = None
+ tx = 0
+ rx = 0
+ email_thread_last_time = None
+
+ def stats(self, serializable=False):
+ if CONF.email_plugin.enabled:
+ last_check_time = self.email_thread_last_time
+ if serializable and last_check_time:
+ last_check_time = last_check_time.isoformat()
+ stats = {
+ "tx": self.tx,
+ "rx": self.rx,
+ "last_check_time": last_check_time,
+ }
+ else:
+ stats = {}
+ return stats
+
+ def tx_inc(self):
+ self.tx += 1
+
+ def rx_inc(self):
+ self.rx += 1
+
+ def email_thread_update(self):
+ self.email_thread_last_time = datetime.datetime.now()
+
+
class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
"""Email Plugin."""
@@ -190,10 +222,6 @@ class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
def _imap_connect():
imap_port = CONF.email_plugin.imap_port
use_ssl = CONF.email_plugin.imap_use_ssl
- # host = CONFIG["aprsd"]["email"]["imap"]["host"]
- # msg = "{}{}:{}".format("TLS " if use_ssl else "", host, imap_port)
- # LOG.debug("Connect to IMAP host {} with user '{}'".
- # format(msg, CONFIG['imap']['login']))
try:
server = imapclient.IMAPClient(
@@ -440,7 +468,7 @@ def send_email(to_addr, content):
[to_addr],
msg.as_string(),
)
- stats.APRSDStats().email_tx_inc()
+ EmailStats().tx_inc()
except Exception:
LOG.exception("Sendmail Error!!!!")
server.quit()
@@ -545,7 +573,7 @@ class APRSDEmailThread(threads.APRSDThread):
def loop(self):
time.sleep(5)
- stats.APRSDStats().email_thread_update()
+ EmailStats().email_thread_update()
# always sleep for 5 seconds and see if we need to check email
# This allows CTRL-C to stop the execution of this loop sooner
# than check_email_delay time
diff --git a/aprsd/plugins/query.py b/aprsd/plugins/query.py
deleted file mode 100644
index 871b249..0000000
--- a/aprsd/plugins/query.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import datetime
-import logging
-import re
-
-from oslo_config import cfg
-
-from aprsd import packets, plugin
-from aprsd.packets import tracker
-from aprsd.utils import trace
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger("APRSD")
-
-
-class QueryPlugin(plugin.APRSDRegexCommandPluginBase):
- """Query command."""
-
- command_regex = r"^\!.*"
- command_name = "query"
- short_description = "APRSD Owner command to query messages in the MsgTrack"
-
- def setup(self):
- """Do any plugin setup here."""
- if not CONF.query_plugin.callsign:
- LOG.error("Config query_plugin.callsign not set. Disabling plugin")
- self.enabled = False
- self.enabled = True
-
- @trace.trace
- def process(self, packet: packets.MessagePacket):
- LOG.info("Query COMMAND")
-
- fromcall = packet.from_call
- message = packet.get("message_text", None)
-
- pkt_tracker = tracker.PacketTrack()
- now = datetime.datetime.now()
- reply = "Pending messages ({}) {}".format(
- len(pkt_tracker),
- now.strftime("%H:%M:%S"),
- )
-
- searchstring = "^" + CONF.query_plugin.callsign + ".*"
- # only I can do admin commands
- if re.search(searchstring, fromcall):
-
- # resend last N most recent: "!3"
- r = re.search(r"^\!([0-9]).*", message)
- if r is not None:
- if len(pkt_tracker) > 0:
- last_n = r.group(1)
- reply = packets.NULL_MESSAGE
- LOG.debug(reply)
- pkt_tracker.restart_delayed(count=int(last_n))
- else:
- reply = "No pending msgs to resend"
- LOG.debug(reply)
- return reply
-
- # resend all: "!a"
- r = re.search(r"^\![aA].*", message)
- if r is not None:
- if len(pkt_tracker) > 0:
- reply = packets.NULL_MESSAGE
- LOG.debug(reply)
- pkt_tracker.restart_delayed()
- else:
- reply = "No pending msgs"
- LOG.debug(reply)
- return reply
-
- # delete all: "!d"
- r = re.search(r"^\![dD].*", message)
- if r is not None:
- reply = "Deleted ALL pending msgs."
- LOG.debug(reply)
- pkt_tracker.flush()
- return reply
-
- return reply
diff --git a/aprsd/plugins/version.py b/aprsd/plugins/version.py
index 32037a0..7dce7cb 100644
--- a/aprsd/plugins/version.py
+++ b/aprsd/plugins/version.py
@@ -1,7 +1,8 @@
import logging
import aprsd
-from aprsd import plugin, stats
+from aprsd import plugin
+from aprsd.stats import collector
LOG = logging.getLogger("APRSD")
@@ -23,8 +24,8 @@ class VersionPlugin(plugin.APRSDRegexCommandPluginBase):
# fromcall = packet.get("from")
# message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
- s = stats.APRSDStats().stats()
+ s = collector.Collector().collect()
return "APRSD ver:{} uptime:{}".format(
aprsd.__version__,
- s["aprsd"]["uptime"],
+ s["APRSDStats"]["uptime"],
)
diff --git a/aprsd/rpc/__init__.py b/aprsd/rpc/__init__.py
deleted file mode 100644
index 87df2d6..0000000
--- a/aprsd/rpc/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import rpyc
-
-
-class AuthSocketStream(rpyc.SocketStream):
- """Used to authenitcate the RPC stream to remote."""
-
- @classmethod
- def connect(cls, *args, authorizer=None, **kwargs):
- stream_obj = super().connect(*args, **kwargs)
-
- if callable(authorizer):
- authorizer(stream_obj.sock)
-
- return stream_obj
diff --git a/aprsd/rpc/client.py b/aprsd/rpc/client.py
deleted file mode 100644
index 985dc2e..0000000
--- a/aprsd/rpc/client.py
+++ /dev/null
@@ -1,165 +0,0 @@
-import json
-import logging
-
-from oslo_config import cfg
-import rpyc
-
-from aprsd import conf # noqa
-from aprsd import rpc
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger("APRSD")
-
-
-class RPCClient:
- _instance = None
- _rpc_client = None
-
- ip = None
- port = None
- magic_word = None
-
- def __new__(cls, *args, **kwargs):
- if cls._instance is None:
- cls._instance = super().__new__(cls)
- return cls._instance
-
- def __init__(self, ip=None, port=None, magic_word=None):
- if ip:
- self.ip = ip
- else:
- self.ip = CONF.rpc_settings.ip
- if port:
- self.port = int(port)
- else:
- self.port = CONF.rpc_settings.port
- if magic_word:
- self.magic_word = magic_word
- else:
- self.magic_word = CONF.rpc_settings.magic_word
- self._check_settings()
- self.get_rpc_client()
-
- def _check_settings(self):
- if not CONF.rpc_settings.enabled:
- LOG.warning("RPC is not enabled, no way to get stats!!")
-
- if self.magic_word == conf.common.APRSD_DEFAULT_MAGIC_WORD:
- LOG.warning("You are using the default RPC magic word!!!")
- LOG.warning("edit aprsd.conf and change rpc_settings.magic_word")
-
- LOG.debug(f"RPC Client: {self.ip}:{self.port} {self.magic_word}")
-
- def _rpyc_connect(
- self, host, port, service=rpyc.VoidService,
- config={}, ipv6=False,
- keepalive=False, authorizer=None, ):
-
- LOG.info(f"Connecting to RPC host '{host}:{port}'")
- try:
- s = rpc.AuthSocketStream.connect(
- host, port, ipv6=ipv6, keepalive=keepalive,
- authorizer=authorizer,
- )
- return rpyc.utils.factory.connect_stream(s, service, config=config)
- except ConnectionRefusedError:
- LOG.error(f"Failed to connect to RPC host '{host}:{port}'")
- return None
-
- def get_rpc_client(self):
- if not self._rpc_client:
- self._rpc_client = self._rpyc_connect(
- self.ip,
- self.port,
- authorizer=lambda sock: sock.send(self.magic_word.encode()),
- )
- return self._rpc_client
-
- def get_stats_dict(self):
- cl = self.get_rpc_client()
- result = {}
- if not cl:
- return result
-
- try:
- rpc_stats_dict = cl.root.get_stats()
- result = json.loads(rpc_stats_dict)
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_stats(self):
- cl = self.get_rpc_client()
- result = {}
- if not cl:
- return result
-
- try:
- result = cl.root.get_stats_obj()
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_packet_track(self):
- cl = self.get_rpc_client()
- result = None
- if not cl:
- return result
- try:
- result = cl.root.get_packet_track()
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_packet_list(self):
- cl = self.get_rpc_client()
- result = None
- if not cl:
- return result
- try:
- result = cl.root.get_packet_list()
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_watch_list(self):
- cl = self.get_rpc_client()
- result = None
- if not cl:
- return result
- try:
- result = cl.root.get_watch_list()
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_seen_list(self):
- cl = self.get_rpc_client()
- result = None
- if not cl:
- return result
- try:
- result = cl.root.get_seen_list()
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
-
- def get_log_entries(self):
- cl = self.get_rpc_client()
- result = None
- if not cl:
- return result
- try:
- result_str = cl.root.get_log_entries()
- result = json.loads(result_str)
- except EOFError:
- LOG.error("Lost connection to RPC Host")
- self._rpc_client = None
- return result
diff --git a/aprsd/rpc/server.py b/aprsd/rpc/server.py
deleted file mode 100644
index 749300f..0000000
--- a/aprsd/rpc/server.py
+++ /dev/null
@@ -1,99 +0,0 @@
-import json
-import logging
-
-from oslo_config import cfg
-import rpyc
-from rpyc.utils.authenticators import AuthenticationError
-from rpyc.utils.server import ThreadPoolServer
-
-from aprsd import conf # noqa: F401
-from aprsd import packets, stats, threads
-from aprsd.threads import log_monitor
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger("APRSD")
-
-
-def magic_word_authenticator(sock):
- client_ip = sock.getpeername()[0]
- magic = sock.recv(len(CONF.rpc_settings.magic_word)).decode()
- if magic != CONF.rpc_settings.magic_word:
- LOG.error(
- f"wrong magic word passed from {client_ip} "
- "'{magic}' != '{CONF.rpc_settings.magic_word}'",
- )
- raise AuthenticationError(
- f"wrong magic word passed in '{magic}'"
- f" != '{CONF.rpc_settings.magic_word}'",
- )
- return sock, None
-
-
-class APRSDRPCThread(threads.APRSDThread):
- def __init__(self):
- super().__init__(name="RPCThread")
- self.thread = ThreadPoolServer(
- APRSDService,
- port=CONF.rpc_settings.port,
- protocol_config={"allow_public_attrs": True},
- authenticator=magic_word_authenticator,
- )
-
- def stop(self):
- if self.thread:
- self.thread.close()
- self.thread_stop = True
-
- def loop(self):
- # there is no loop as run is blocked
- if self.thread and not self.thread_stop:
- # This is a blocking call
- self.thread.start()
-
-
-@rpyc.service
-class APRSDService(rpyc.Service):
- def on_connect(self, conn):
- # code that runs when a connection is created
- # (to init the service, if needed)
- LOG.info("RPC Client Connected")
- self._conn = conn
-
- def on_disconnect(self, conn):
- # code that runs after the connection has already closed
- # (to finalize the service, if needed)
- LOG.info("RPC Client Disconnected")
- self._conn = None
-
- @rpyc.exposed
- def get_stats(self):
- stat = stats.APRSDStats()
- stats_dict = stat.stats()
- return_str = json.dumps(stats_dict, indent=4, sort_keys=True, default=str)
- return return_str
-
- @rpyc.exposed
- def get_stats_obj(self):
- return stats.APRSDStats()
-
- @rpyc.exposed
- def get_packet_list(self):
- return packets.PacketList()
-
- @rpyc.exposed
- def get_packet_track(self):
- return packets.PacketTrack()
-
- @rpyc.exposed
- def get_watch_list(self):
- return packets.WatchList()
-
- @rpyc.exposed
- def get_seen_list(self):
- return packets.SeenList()
-
- @rpyc.exposed
- def get_log_entries(self):
- entries = log_monitor.LogEntries().get_all_and_purge()
- return json.dumps(entries, default=str)
diff --git a/aprsd/stats.py b/aprsd/stats.py
deleted file mode 100644
index cb20eb8..0000000
--- a/aprsd/stats.py
+++ /dev/null
@@ -1,265 +0,0 @@
-import datetime
-import logging
-import threading
-
-from oslo_config import cfg
-import wrapt
-
-import aprsd
-from aprsd import packets, plugin, utils
-
-
-CONF = cfg.CONF
-LOG = logging.getLogger("APRSD")
-
-
-class APRSDStats:
-
- _instance = None
- lock = threading.Lock()
-
- start_time = None
- _aprsis_server = None
- _aprsis_keepalive = None
-
- _email_thread_last_time = None
- _email_tx = 0
- _email_rx = 0
-
- _mem_current = 0
- _mem_peak = 0
-
- _thread_info = {}
-
- _pkt_cnt = {
- "Packet": {
- "tx": 0,
- "rx": 0,
- },
- "AckPacket": {
- "tx": 0,
- "rx": 0,
- },
- "GPSPacket": {
- "tx": 0,
- "rx": 0,
- },
- "StatusPacket": {
- "tx": 0,
- "rx": 0,
- },
- "MicEPacket": {
- "tx": 0,
- "rx": 0,
- },
- "MessagePacket": {
- "tx": 0,
- "rx": 0,
- },
- "WeatherPacket": {
- "tx": 0,
- "rx": 0,
- },
- "ObjectPacket": {
- "tx": 0,
- "rx": 0,
- },
- }
-
- def __new__(cls, *args, **kwargs):
- if cls._instance is None:
- cls._instance = super().__new__(cls)
- # any init here
- cls._instance.start_time = datetime.datetime.now()
- cls._instance._aprsis_keepalive = datetime.datetime.now()
- return cls._instance
-
- @wrapt.synchronized(lock)
- @property
- def uptime(self):
- return datetime.datetime.now() - self.start_time
-
- @wrapt.synchronized(lock)
- @property
- def memory(self):
- return self._mem_current
-
- @wrapt.synchronized(lock)
- def set_memory(self, memory):
- self._mem_current = memory
-
- @wrapt.synchronized(lock)
- @property
- def memory_peak(self):
- return self._mem_peak
-
- @wrapt.synchronized(lock)
- def set_memory_peak(self, memory):
- self._mem_peak = memory
-
- @wrapt.synchronized(lock)
- def set_thread_info(self, thread_info):
- self._thread_info = thread_info
-
- @wrapt.synchronized(lock)
- @property
- def thread_info(self):
- return self._thread_info
-
- @wrapt.synchronized(lock)
- @property
- def aprsis_server(self):
- return self._aprsis_server
-
- @wrapt.synchronized(lock)
- def set_aprsis_server(self, server):
- self._aprsis_server = server
-
- @wrapt.synchronized(lock)
- @property
- def aprsis_keepalive(self):
- return self._aprsis_keepalive
-
- @wrapt.synchronized(lock)
- def set_aprsis_keepalive(self):
- self._aprsis_keepalive = datetime.datetime.now()
-
- def rx(self, packet):
- pkt_type = packet.__class__.__name__
- if pkt_type not in self._pkt_cnt:
- self._pkt_cnt[pkt_type] = {
- "tx": 0,
- "rx": 0,
- }
- self._pkt_cnt[pkt_type]["rx"] += 1
-
- def tx(self, packet):
- pkt_type = packet.__class__.__name__
- if pkt_type not in self._pkt_cnt:
- self._pkt_cnt[pkt_type] = {
- "tx": 0,
- "rx": 0,
- }
- self._pkt_cnt[pkt_type]["tx"] += 1
-
- @wrapt.synchronized(lock)
- @property
- def msgs_tracked(self):
- return packets.PacketTrack().total_tracked
-
- @wrapt.synchronized(lock)
- @property
- def email_tx(self):
- return self._email_tx
-
- @wrapt.synchronized(lock)
- def email_tx_inc(self):
- self._email_tx += 1
-
- @wrapt.synchronized(lock)
- @property
- def email_rx(self):
- return self._email_rx
-
- @wrapt.synchronized(lock)
- def email_rx_inc(self):
- self._email_rx += 1
-
- @wrapt.synchronized(lock)
- @property
- def email_thread_time(self):
- return self._email_thread_last_time
-
- @wrapt.synchronized(lock)
- def email_thread_update(self):
- self._email_thread_last_time = datetime.datetime.now()
-
- def stats(self):
- now = datetime.datetime.now()
- if self._email_thread_last_time:
- last_update = str(now - self._email_thread_last_time)
- else:
- last_update = "never"
-
- if self._aprsis_keepalive:
- last_aprsis_keepalive = str(now - self._aprsis_keepalive)
- else:
- last_aprsis_keepalive = "never"
-
- pm = plugin.PluginManager()
- plugins = pm.get_plugins()
- plugin_stats = {}
- if plugins:
- def full_name_with_qualname(obj):
- return "{}.{}".format(
- obj.__class__.__module__,
- obj.__class__.__qualname__,
- )
-
- for p in plugins:
- plugin_stats[full_name_with_qualname(p)] = {
- "enabled": p.enabled,
- "rx": p.rx_count,
- "tx": p.tx_count,
- "version": p.version,
- }
-
- wl = packets.WatchList()
- sl = packets.SeenList()
- pl = packets.PacketList()
-
- stats = {
- "aprsd": {
- "version": aprsd.__version__,
- "uptime": utils.strfdelta(self.uptime),
- "callsign": CONF.callsign,
- "memory_current": int(self.memory),
- "memory_current_str": utils.human_size(self.memory),
- "memory_peak": int(self.memory_peak),
- "memory_peak_str": utils.human_size(self.memory_peak),
- "threads": self._thread_info,
- "watch_list": wl.get_all(),
- "seen_list": sl.get_all(),
- },
- "aprs-is": {
- "server": str(self.aprsis_server),
- "callsign": CONF.aprs_network.login,
- "last_update": last_aprsis_keepalive,
- },
- "packets": {
- "total_tracked": int(pl.total_tx() + pl.total_rx()),
- "total_sent": int(pl.total_tx()),
- "total_received": int(pl.total_rx()),
- "by_type": self._pkt_cnt,
- },
- "messages": {
- "sent": self._pkt_cnt["MessagePacket"]["tx"],
- "received": self._pkt_cnt["MessagePacket"]["tx"],
- "ack_sent": self._pkt_cnt["AckPacket"]["tx"],
- },
- "email": {
- "enabled": CONF.email_plugin.enabled,
- "sent": int(self._email_tx),
- "received": int(self._email_rx),
- "thread_last_update": last_update,
- },
- "plugins": plugin_stats,
- }
- return stats
-
- def __str__(self):
- pl = packets.PacketList()
- return (
- "Uptime:{} Msgs TX:{} RX:{} "
- "ACK: TX:{} RX:{} "
- "Email TX:{} RX:{} LastLoop:{} ".format(
- self.uptime,
- pl.total_tx(),
- pl.total_rx(),
- self._pkt_cnt["AckPacket"]["tx"],
- self._pkt_cnt["AckPacket"]["rx"],
- self._email_tx,
- self._email_rx,
- self._email_thread_last_time,
- )
- )
diff --git a/aprsd/stats/__init__.py b/aprsd/stats/__init__.py
new file mode 100644
index 0000000..44602fd
--- /dev/null
+++ b/aprsd/stats/__init__.py
@@ -0,0 +1,20 @@
+from aprsd import client as aprs_client
+from aprsd import plugin
+from aprsd.packets import packet_list, seen_list, tracker, watch_list
+from aprsd.plugins import email
+from aprsd.stats import app, collector
+from aprsd.threads import aprsd
+
+
+# Create the collector and register all the objects
+# that APRSD has that implement the stats protocol
+stats_collector = collector.Collector()
+stats_collector.register_producer(app.APRSDStats())
+stats_collector.register_producer(packet_list.PacketList())
+stats_collector.register_producer(watch_list.WatchList())
+stats_collector.register_producer(tracker.PacketTrack())
+stats_collector.register_producer(plugin.PluginManager())
+stats_collector.register_producer(aprsd.APRSDThreadList())
+stats_collector.register_producer(email.EmailStats())
+stats_collector.register_producer(aprs_client.APRSClientStats())
+stats_collector.register_producer(seen_list.SeenList())
diff --git a/aprsd/stats/app.py b/aprsd/stats/app.py
new file mode 100644
index 0000000..890bf02
--- /dev/null
+++ b/aprsd/stats/app.py
@@ -0,0 +1,47 @@
+import datetime
+import tracemalloc
+
+from oslo_config import cfg
+
+import aprsd
+from aprsd import utils
+
+
+CONF = cfg.CONF
+
+
+class APRSDStats:
+ """The AppStats class is used to collect stats from the application."""
+
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ """Have to override the new method to make this a singleton
+
+ instead of using @singletone decorator so the unit tests work.
+ """
+ if not cls._instance:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self):
+ self.start_time = datetime.datetime.now()
+
+ def uptime(self):
+ return datetime.datetime.now() - self.start_time
+
+ def stats(self, serializable=False) -> dict:
+ current, peak = tracemalloc.get_traced_memory()
+ uptime = self.uptime()
+ if serializable:
+ uptime = str(uptime)
+ stats = {
+ "version": aprsd.__version__,
+ "uptime": uptime,
+ "callsign": CONF.callsign,
+ "memory_current": int(current),
+ "memory_current_str": utils.human_size(current),
+ "memory_peak": int(peak),
+ "memory_peak_str": utils.human_size(peak),
+ }
+ return stats
diff --git a/aprsd/stats/collector.py b/aprsd/stats/collector.py
new file mode 100644
index 0000000..c58b242
--- /dev/null
+++ b/aprsd/stats/collector.py
@@ -0,0 +1,30 @@
+from typing import Protocol
+
+from aprsd.utils import singleton
+
+
+class StatsProducer(Protocol):
+ """The StatsProducer protocol is used to define the interface for collecting stats."""
+ def stats(self, serializeable=False) -> dict:
+ """provide stats in a dictionary format."""
+ ...
+
+
+@singleton
+class Collector:
+ """The Collector class is used to collect stats from multiple StatsProducer instances."""
+ def __init__(self):
+ self.producers: dict[str, StatsProducer] = {}
+
+ def collect(self, serializable=False) -> dict:
+ stats = {}
+ for name, producer in self.producers.items():
+ # No need to put in empty stats
+ tmp_stats = producer.stats(serializable=serializable)
+ if tmp_stats:
+ stats[name] = tmp_stats
+ return stats
+
+ def register_producer(self, producer: StatsProducer):
+ name = producer.__class__.__name__
+ self.producers[name] = producer
diff --git a/aprsd/threads/__init__.py b/aprsd/threads/__init__.py
index 9220a65..bd26e01 100644
--- a/aprsd/threads/__init__.py
+++ b/aprsd/threads/__init__.py
@@ -3,8 +3,9 @@ import queue
# Make these available to anyone importing
# aprsd.threads
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
-from .keep_alive import KeepAliveThread # noqa: F401
-from .rx import APRSDRXThread, APRSDDupeRXThread, APRSDProcessPacketThread # noqa: F401
+from .rx import ( # noqa: F401
+ APRSDDupeRXThread, APRSDProcessPacketThread, APRSDRXThread,
+)
packet_queue = queue.Queue(maxsize=20)
diff --git a/aprsd/threads/aprsd.py b/aprsd/threads/aprsd.py
index d815af6..52220fe 100644
--- a/aprsd/threads/aprsd.py
+++ b/aprsd/threads/aprsd.py
@@ -13,7 +13,7 @@ LOG = logging.getLogger("APRSD")
class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
"""Base class for all threads in APRSD."""
- loop_interval = 1
+ loop_count = 1
def __init__(self, name):
super().__init__(name=name)
@@ -47,8 +47,8 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
def run(self):
LOG.debug("Starting")
while not self._should_quit():
+ self.loop_count += 1
can_loop = self.loop()
- self.loop_interval += 1
self._last_loop = datetime.datetime.now()
if not can_loop:
self.stop()
@@ -71,6 +71,20 @@ class APRSDThreadList:
cls.threads_list = []
return cls._instance
+ def stats(self, serializable=False) -> dict:
+ stats = {}
+ for th in self.threads_list:
+ age = th.loop_age()
+ if serializable:
+ age = str(age)
+ stats[th.__class__.__name__] = {
+ "name": th.name,
+ "alive": th.is_alive(),
+ "age": th.loop_age(),
+ "loop_count": th.loop_count,
+ }
+ return stats
+
@wrapt.synchronized(lock)
def add(self, thread_obj):
self.threads_list.append(thread_obj)
diff --git a/aprsd/threads/keep_alive.py b/aprsd/threads/keep_alive.py
index 940802d..98e2208 100644
--- a/aprsd/threads/keep_alive.py
+++ b/aprsd/threads/keep_alive.py
@@ -5,7 +5,8 @@ import tracemalloc
from oslo_config import cfg
-from aprsd import client, packets, stats, utils
+from aprsd import client, packets, utils
+from aprsd.stats import collector
from aprsd.threads import APRSDThread, APRSDThreadList
@@ -24,61 +25,66 @@ class KeepAliveThread(APRSDThread):
self.max_delta = datetime.timedelta(**max_timeout)
def loop(self):
- if self.cntr % 60 == 0:
- pkt_tracker = packets.PacketTrack()
- stats_obj = stats.APRSDStats()
+ if self.loop_count % 60 == 0:
+ stats_json = collector.Collector().collect()
pl = packets.PacketList()
thread_list = APRSDThreadList()
now = datetime.datetime.now()
- last_email = stats_obj.email_thread_time
- if last_email:
- email_thread_time = utils.strfdelta(now - last_email)
+
+ if "EmailStats" in stats_json:
+ email_stats = stats_json["EmailStats"]
+ if email_stats["last_check_time"]:
+ email_thread_time = utils.strfdelta(now - email_stats["last_check_time"])
+ else:
+ email_thread_time = "N/A"
else:
email_thread_time = "N/A"
- last_msg_time = utils.strfdelta(now - stats_obj.aprsis_keepalive)
+ if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
+ if stats_json["APRSClientStats"].get("server_keepalive"):
+ last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
+ else:
+ last_msg_time = "N/A"
+ else:
+ last_msg_time = "N/A"
- current, peak = tracemalloc.get_traced_memory()
- stats_obj.set_memory(current)
- stats_obj.set_memory_peak(peak)
-
- login = CONF.callsign
-
- tracked_packets = len(pkt_tracker)
+ tracked_packets = stats_json["PacketTrack"]["total_tracked"]
+ tx_msg = 0
+ rx_msg = 0
+ if "PacketList" in stats_json:
+ msg_packets = stats_json["PacketList"].get("MessagePacket")
+ if msg_packets:
+ tx_msg = msg_packets.get("tx", 0)
+ rx_msg = msg_packets.get("rx", 0)
keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{}"
).format(
- login,
- utils.strfdelta(stats_obj.uptime),
+ stats_json["APRSDStats"]["callsign"],
+ stats_json["APRSDStats"]["uptime"],
pl.total_rx(),
pl.total_tx(),
tracked_packets,
- stats_obj._pkt_cnt["MessagePacket"]["tx"],
- stats_obj._pkt_cnt["MessagePacket"]["rx"],
+ tx_msg,
+ rx_msg,
last_msg_time,
email_thread_time,
- utils.human_size(current),
- utils.human_size(peak),
+ stats_json["APRSDStats"]["memory_current_str"],
+ stats_json["APRSDStats"]["memory_peak_str"],
len(thread_list),
)
LOG.info(keepalive)
- thread_out = []
- thread_info = {}
- for thread in thread_list.threads_list:
- alive = thread.is_alive()
- age = thread.loop_age()
- key = thread.__class__.__name__
- thread_out.append(f"{key}:{alive}:{age}")
- if key not in thread_info:
- thread_info[key] = {}
- thread_info[key]["alive"] = alive
- thread_info[key]["age"] = age
- if not alive:
- LOG.error(f"Thread {thread}")
- LOG.info(",".join(thread_out))
- stats_obj.set_thread_info(thread_info)
+ if "APRSDThreadList" in stats_json:
+ thread_list = stats_json["APRSDThreadList"]
+ for thread_name in thread_list:
+ thread = thread_list[thread_name]
+ alive = thread["alive"]
+ age = thread["age"]
+ key = thread["name"]
+ if not alive:
+ LOG.error(f"Thread {thread}")
+ LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
# check the APRS connection
cl = client.factory.create()
@@ -90,18 +96,18 @@ class KeepAliveThread(APRSDThread):
if not cl.is_alive() and self.cntr > 0:
LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
client.factory.create().reset()
- else:
- # See if we should reset the aprs-is client
- # Due to losing a keepalive from them
- delta_dict = utils.parse_delta_str(last_msg_time)
- delta = datetime.timedelta(**delta_dict)
-
- if delta > self.max_delta:
- # We haven't gotten a keepalive from aprs-is in a while
- # reset the connection.a
- if not client.KISSClient.is_enabled():
- LOG.warning(f"Resetting connection to APRS-IS {delta}")
- client.factory.create().reset()
+ # else:
+ # # See if we should reset the aprs-is client
+ # # Due to losing a keepalive from them
+ # delta_dict = utils.parse_delta_str(last_msg_time)
+ # delta = datetime.timedelta(**delta_dict)
+ #
+ # if delta > self.max_delta:
+ # # We haven't gotten a keepalive from aprs-is in a while
+ # # reset the connection.a
+ # if not client.KISSClient.is_enabled():
+ # LOG.warning(f"Resetting connection to APRS-IS {delta}")
+ # client.factory.create().reset()
# Check version every day
delta = now - self.checker_time
@@ -110,6 +116,6 @@ class KeepAliveThread(APRSDThread):
level, msg = utils._check_version()
if level:
LOG.warning(msg)
- self.cntr += 1
+ self.cntr += 1
time.sleep(1)
return True
diff --git a/aprsd/threads/log_monitor.py b/aprsd/threads/log_monitor.py
index 8b93ab5..098e6ce 100644
--- a/aprsd/threads/log_monitor.py
+++ b/aprsd/threads/log_monitor.py
@@ -1,25 +1,56 @@
+import datetime
import logging
import threading
+from oslo_config import cfg
+import requests
import wrapt
from aprsd import threads
from aprsd.log import log
+CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
+def send_log_entries(force=False):
+ """Send all of the log entries to the web interface."""
+ if CONF.admin.web_enabled:
+ if force or LogEntries().is_purge_ready():
+ entries = LogEntries().get_all_and_purge()
+ print(f"Sending log entries {len(entries)}")
+ if entries:
+ try:
+ requests.post(
+ f"http://{CONF.admin.web_ip}:{CONF.admin.web_port}/log_entries",
+ json=entries,
+ auth=(CONF.admin.user, CONF.admin.password),
+ )
+ except Exception as ex:
+ LOG.warning(f"Failed to send log entries {len(entries)}")
+ LOG.warning(ex)
+
+
class LogEntries:
entries = []
lock = threading.Lock()
_instance = None
+ last_purge = datetime.datetime.now()
+ max_delta = datetime.timedelta(
+ hours=0.0, minutes=0, seconds=2,
+ )
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
+ def stats(self) -> dict:
+ return {
+ "log_entries": self.entries,
+ }
+
@wrapt.synchronized(lock)
def add(self, entry):
self.entries.append(entry)
@@ -28,8 +59,18 @@ class LogEntries:
def get_all_and_purge(self):
entries = self.entries.copy()
self.entries = []
+ self.last_purge = datetime.datetime.now()
return entries
+ def is_purge_ready(self):
+ now = datetime.datetime.now()
+ if (
+ now - self.last_purge > self.max_delta
+ and len(self.entries) > 1
+ ):
+ return True
+ return False
+
@wrapt.synchronized(lock)
def __len__(self):
return len(self.entries)
@@ -40,6 +81,10 @@ class LogMonitorThread(threads.APRSDThread):
def __init__(self):
super().__init__("LogMonitorThread")
+ def stop(self):
+ send_log_entries(force=True)
+ super().stop()
+
def loop(self):
try:
record = log.logging_queue.get(block=True, timeout=2)
@@ -54,6 +99,7 @@ class LogMonitorThread(threads.APRSDThread):
# Just ignore thi
pass
+ send_log_entries()
return True
def json_record(self, record):
diff --git a/aprsd/threads/rx.py b/aprsd/threads/rx.py
index e78a204..b643b65 100644
--- a/aprsd/threads/rx.py
+++ b/aprsd/threads/rx.py
@@ -6,7 +6,7 @@ import time
import aprslib
from oslo_config import cfg
-from aprsd import client, packets, plugin, stats
+from aprsd import client, packets, plugin
from aprsd.packets import log as packet_log
from aprsd.threads import APRSDThread, tx
@@ -27,7 +27,6 @@ class APRSDRXThread(APRSDThread):
self._client.stop()
def loop(self):
- LOG.debug(f"RX_MSG-LOOP {self.loop_interval}")
if not self._client:
self._client = client.factory.create()
time.sleep(1)
@@ -43,31 +42,29 @@ class APRSDRXThread(APRSDThread):
# and the aprslib developer didn't want to allow a PR to add
# kwargs. :(
# https://github.com/rossengeorgiev/aprs-python/pull/56
- LOG.debug(f"Calling client consumer CL {self._client}")
self._client.consumer(
self._process_packet, raw=False, blocking=False,
)
- LOG.debug(f"Consumer done {self._client}")
except (
aprslib.exceptions.ConnectionDrop,
aprslib.exceptions.ConnectionError,
):
LOG.error("Connection dropped, reconnecting")
- time.sleep(5)
# Force the deletion of the client object connected to aprs
# This will cause a reconnect, next time client.get_client()
# is called
self._client.reset()
- except Exception as ex:
- LOG.error("Something bad happened!!!")
- LOG.exception(ex)
- return False
+ time.sleep(5)
+ except Exception:
+ # LOG.exception(ex)
+ LOG.error("Resetting connection and trying again.")
+ self._client.reset()
+ time.sleep(5)
# Continue to loop
return True
def _process_packet(self, *args, **kwargs):
"""Intermediate callback so we can update the keepalive time."""
- stats.APRSDStats().set_aprsis_keepalive()
# Now call the 'real' packet processing for a RX'x packet
self.process_packet(*args, **kwargs)
@@ -155,7 +152,6 @@ class APRSDProcessPacketThread(APRSDThread):
def __init__(self, packet_queue):
self.packet_queue = packet_queue
super().__init__("ProcessPKT")
- self._loop_cnt = 1
def process_ack_packet(self, packet):
"""We got an ack for a message, no need to resend it."""
@@ -178,12 +174,11 @@ class APRSDProcessPacketThread(APRSDThread):
self.process_packet(packet)
except queue.Empty:
pass
- self._loop_cnt += 1
return True
def process_packet(self, packet):
"""Process a packet received from aprs-is server."""
- LOG.debug(f"ProcessPKT-LOOP {self._loop_cnt}")
+ LOG.debug(f"ProcessPKT-LOOP {self.loop_count}")
our_call = CONF.callsign.lower()
from_call = packet.from_call
diff --git a/aprsd/threads/stats.py b/aprsd/threads/stats.py
new file mode 100644
index 0000000..a6517be
--- /dev/null
+++ b/aprsd/threads/stats.py
@@ -0,0 +1,38 @@
+import logging
+import threading
+import time
+
+from oslo_config import cfg
+
+from aprsd.stats import collector
+from aprsd.threads import APRSDThread
+from aprsd.utils import objectstore
+
+
+CONF = cfg.CONF
+LOG = logging.getLogger("APRSD")
+
+
+class StatsStore(objectstore.ObjectStoreMixin):
+ """Container to save the stats from the collector."""
+ lock = threading.Lock()
+
+
+class APRSDStatsStoreThread(APRSDThread):
+ """Save APRSD Stats to disk periodically."""
+
+ # how often in seconds to write the file
+ save_interval = 10
+
+ def __init__(self):
+ super().__init__("StatsStore")
+
+ def loop(self):
+ if self.loop_count % self.save_interval == 0:
+ stats = collector.Collector().collect()
+ ss = StatsStore()
+ ss.data = stats
+ ss.save()
+
+ time.sleep(1)
+ return True
diff --git a/aprsd/threads/tx.py b/aprsd/threads/tx.py
index c84b4bf..3dbab16 100644
--- a/aprsd/threads/tx.py
+++ b/aprsd/threads/tx.py
@@ -77,7 +77,11 @@ def _send_direct(packet, aprs_client=None):
packet.update_timestamp()
packet_log.log(packet, tx=True)
- cl.send(packet)
+ try:
+ cl.send(packet)
+ except Exception as e:
+ LOG.error(f"Failed to send packet: {packet}")
+ LOG.error(e)
class SendPacketThread(aprsd_threads.APRSDThread):
@@ -232,7 +236,15 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
comment="APRSD GPS Beacon",
symbol=CONF.beacon_symbol,
)
- send(pkt, direct=True)
+ try:
+ # Only send it once
+ pkt.retry_count = 1
+ send(pkt, direct=True)
+ except Exception as e:
+ LOG.error(f"Failed to send beacon: {e}")
+ client.factory.create().reset()
+ time.sleep(5)
+
self._loop_cnt += 1
time.sleep(1)
return True
diff --git a/aprsd/utils/__init__.py b/aprsd/utils/__init__.py
index 1924172..eb24fac 100644
--- a/aprsd/utils/__init__.py
+++ b/aprsd/utils/__init__.py
@@ -1,6 +1,7 @@
"""Utilities and helper functions."""
import errno
+import functools
import os
import re
import sys
@@ -22,6 +23,17 @@ else:
from collections.abc import MutableMapping
+def singleton(cls):
+ """Make a class a Singleton class (only one instance)"""
+ @functools.wraps(cls)
+ def wrapper_singleton(*args, **kwargs):
+ if wrapper_singleton.instance is None:
+ wrapper_singleton.instance = cls(*args, **kwargs)
+ return wrapper_singleton.instance
+ wrapper_singleton.instance = None
+ return wrapper_singleton
+
+
def env(*vars, **kwargs):
"""This returns the first environment variable set.
if none are non-empty, defaults to '' or keyword arg default
diff --git a/aprsd/utils/json.py b/aprsd/utils/json.py
index 8876738..648238a 100644
--- a/aprsd/utils/json.py
+++ b/aprsd/utils/json.py
@@ -3,6 +3,8 @@ import decimal
import json
import sys
+from aprsd.packets import core
+
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, obj):
@@ -42,6 +44,24 @@ class EnhancedJSONEncoder(json.JSONEncoder):
return super().default(obj)
+class SimpleJSONEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, datetime.datetime):
+ return obj.isoformat()
+ elif isinstance(obj, datetime.date):
+ return str(obj)
+ elif isinstance(obj, datetime.time):
+ return str(obj)
+ elif isinstance(obj, datetime.timedelta):
+ return str(obj)
+ elif isinstance(obj, decimal.Decimal):
+ return str(obj)
+ elif isinstance(obj, core.Packet):
+ return obj.to_dict()
+ else:
+ return super().default(obj)
+
+
class EnhancedJSONDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
diff --git a/aprsd/utils/objectstore.py b/aprsd/utils/objectstore.py
index 7431b7e..1abcaf5 100644
--- a/aprsd/utils/objectstore.py
+++ b/aprsd/utils/objectstore.py
@@ -71,12 +71,13 @@ class ObjectStoreMixin:
if not CONF.enable_save:
return
if len(self) > 0:
+ save_filename = self._save_filename()
LOG.info(
f"{self.__class__.__name__}::Saving"
- f" {len(self)} entries to disk at"
- f"{CONF.save_location}",
+ f" {len(self)} entries to disk at "
+ f"{save_filename}",
)
- with open(self._save_filename(), "wb+") as fp:
+ with open(save_filename, "wb+") as fp:
pickle.dump(self._dump(), fp)
else:
LOG.debug(
diff --git a/aprsd/web/admin/static/css/prism.css b/aprsd/web/admin/static/css/prism.css
index 8511262..e088b7f 100644
--- a/aprsd/web/admin/static/css/prism.css
+++ b/aprsd/web/admin/static/css/prism.css
@@ -1,189 +1,4 @@
-/* PrismJS 1.24.1
-https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+log&plugins=show-language+toolbar */
-/**
- * prism.js tomorrow night eighties for JavaScript, CoffeeScript, CSS and HTML
- * Based on https://github.com/chriskempson/tomorrow-theme
- * @author Rose Pritchard
- */
-
-code[class*="language-"],
-pre[class*="language-"] {
- color: #ccc;
- background: none;
- font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace;
- font-size: 1em;
- text-align: left;
- white-space: pre;
- word-spacing: normal;
- word-break: normal;
- word-wrap: normal;
- line-height: 1.5;
-
- -moz-tab-size: 4;
- -o-tab-size: 4;
- tab-size: 4;
-
- -webkit-hyphens: none;
- -moz-hyphens: none;
- -ms-hyphens: none;
- hyphens: none;
-
-}
-
-/* Code blocks */
-pre[class*="language-"] {
- padding: 1em;
- margin: .5em 0;
- overflow: auto;
-}
-
-:not(pre) > code[class*="language-"],
-pre[class*="language-"] {
- background: #2d2d2d;
-}
-
-/* Inline code */
-:not(pre) > code[class*="language-"] {
- padding: .1em;
- border-radius: .3em;
- white-space: normal;
-}
-
-.token.comment,
-.token.block-comment,
-.token.prolog,
-.token.doctype,
-.token.cdata {
- color: #999;
-}
-
-.token.punctuation {
- color: #ccc;
-}
-
-.token.tag,
-.token.attr-name,
-.token.namespace,
-.token.deleted {
- color: #e2777a;
-}
-
-.token.function-name {
- color: #6196cc;
-}
-
-.token.boolean,
-.token.number,
-.token.function {
- color: #f08d49;
-}
-
-.token.property,
-.token.class-name,
-.token.constant,
-.token.symbol {
- color: #f8c555;
-}
-
-.token.selector,
-.token.important,
-.token.atrule,
-.token.keyword,
-.token.builtin {
- color: #cc99cd;
-}
-
-.token.string,
-.token.char,
-.token.attr-value,
-.token.regex,
-.token.variable {
- color: #7ec699;
-}
-
-.token.operator,
-.token.entity,
-.token.url {
- color: #67cdcc;
-}
-
-.token.important,
-.token.bold {
- font-weight: bold;
-}
-.token.italic {
- font-style: italic;
-}
-
-.token.entity {
- cursor: help;
-}
-
-.token.inserted {
- color: green;
-}
-
-div.code-toolbar {
- position: relative;
-}
-
-div.code-toolbar > .toolbar {
- position: absolute;
- top: .3em;
- right: .2em;
- transition: opacity 0.3s ease-in-out;
- opacity: 0;
-}
-
-div.code-toolbar:hover > .toolbar {
- opacity: 1;
-}
-
-/* Separate line b/c rules are thrown out if selector is invalid.
- IE11 and old Edge versions don't support :focus-within. */
-div.code-toolbar:focus-within > .toolbar {
- opacity: 1;
-}
-
-div.code-toolbar > .toolbar > .toolbar-item {
- display: inline-block;
-}
-
-div.code-toolbar > .toolbar > .toolbar-item > a {
- cursor: pointer;
-}
-
-div.code-toolbar > .toolbar > .toolbar-item > button {
- background: none;
- border: 0;
- color: inherit;
- font: inherit;
- line-height: normal;
- overflow: visible;
- padding: 0;
- -webkit-user-select: none; /* for button */
- -moz-user-select: none;
- -ms-user-select: none;
-}
-
-div.code-toolbar > .toolbar > .toolbar-item > a,
-div.code-toolbar > .toolbar > .toolbar-item > button,
-div.code-toolbar > .toolbar > .toolbar-item > span {
- color: #bbb;
- font-size: .8em;
- padding: 0 .5em;
- background: #f5f2f0;
- background: rgba(224, 224, 224, 0.2);
- box-shadow: 0 2px 0 0 rgba(0,0,0,0.2);
- border-radius: .5em;
-}
-
-div.code-toolbar > .toolbar > .toolbar-item > a:hover,
-div.code-toolbar > .toolbar > .toolbar-item > a:focus,
-div.code-toolbar > .toolbar > .toolbar-item > button:hover,
-div.code-toolbar > .toolbar > .toolbar-item > button:focus,
-div.code-toolbar > .toolbar > .toolbar-item > span:hover,
-div.code-toolbar > .toolbar > .toolbar-item > span:focus {
- color: inherit;
- text-decoration: none;
-}
+/* PrismJS 1.29.0
+https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+json+json5+log&plugins=show-language+toolbar */
+code[class*=language-],pre[class*=language-]{color:#ccc;background:0 0;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#2d2d2d}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.block-comment,.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#999}.token.punctuation{color:#ccc}.token.attr-name,.token.deleted,.token.namespace,.token.tag{color:#e2777a}.token.function-name{color:#6196cc}.token.boolean,.token.function,.token.number{color:#f08d49}.token.class-name,.token.constant,.token.property,.token.symbol{color:#f8c555}.token.atrule,.token.builtin,.token.important,.token.keyword,.token.selector{color:#cc99cd}.token.attr-value,.token.char,.token.regex,.token.string,.token.variable{color:#7ec699}.token.entity,.token.operator,.token.url{color:#67cdcc}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}.token.inserted{color:green}
+div.code-toolbar{position:relative}div.code-toolbar>.toolbar{position:absolute;z-index:10;top:.3em;right:.2em;transition:opacity .3s ease-in-out;opacity:0}div.code-toolbar:hover>.toolbar{opacity:1}div.code-toolbar:focus-within>.toolbar{opacity:1}div.code-toolbar>.toolbar>.toolbar-item{display:inline-block}div.code-toolbar>.toolbar>.toolbar-item>a{cursor:pointer}div.code-toolbar>.toolbar>.toolbar-item>button{background:0 0;border:0;color:inherit;font:inherit;line-height:normal;overflow:visible;padding:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none}div.code-toolbar>.toolbar>.toolbar-item>a,div.code-toolbar>.toolbar>.toolbar-item>button,div.code-toolbar>.toolbar>.toolbar-item>span{color:#bbb;font-size:.8em;padding:0 .5em;background:#f5f2f0;background:rgba(224,224,224,.2);box-shadow:0 2px 0 0 rgba(0,0,0,.2);border-radius:.5em}div.code-toolbar>.toolbar>.toolbar-item>a:focus,div.code-toolbar>.toolbar>.toolbar-item>a:hover,div.code-toolbar>.toolbar>.toolbar-item>button:focus,div.code-toolbar>.toolbar>.toolbar-item>button:hover,div.code-toolbar>.toolbar>.toolbar-item>span:focus,div.code-toolbar>.toolbar>.toolbar-item>span:hover{color:inherit;text-decoration:none}
diff --git a/aprsd/web/admin/static/js/charts.js b/aprsd/web/admin/static/js/charts.js
index 4ceb6d7..237641a 100644
--- a/aprsd/web/admin/static/js/charts.js
+++ b/aprsd/web/admin/static/js/charts.js
@@ -219,15 +219,17 @@ function updateQuadData(chart, label, first, second, third, fourth) {
}
function update_stats( data ) {
- our_callsign = data["stats"]["aprsd"]["callsign"];
- $("#version").text( data["stats"]["aprsd"]["version"] );
+ our_callsign = data["APRSDStats"]["callsign"];
+ $("#version").text( data["APRSDStats"]["version"] );
$("#aprs_connection").html( data["aprs_connection"] );
- $("#uptime").text( "uptime: " + data["stats"]["aprsd"]["uptime"] );
+ $("#uptime").text( "uptime: " + data["APRSDStats"]["uptime"] );
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
$("#jsonstats").html(html_pretty);
short_time = data["time"].split(/\s(.+)/)[1];
- updateDualData(packets_chart, short_time, data["stats"]["packets"]["sent"], data["stats"]["packets"]["received"]);
- updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]);
- updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]);
- updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]);
+ packet_list = data["PacketList"]["packets"];
+ updateDualData(packets_chart, short_time, data["PacketList"]["sent"], data["PacketList"]["received"]);
+ updateQuadData(message_chart, short_time, packet_list["MessagePacket"]["tx"], packet_list["MessagePacket"]["rx"],
+ packet_list["AckPacket"]["tx"], packet_list["AckPacket"]["rx"]);
+ updateDualData(email_chart, short_time, data["EmailStats"]["sent"], data["EmailStats"]["recieved"]);
+ updateDualData(memory_chart, short_time, data["APRSDStats"]["memory_peak"], data["APRSDStats"]["memory_current"]);
}
diff --git a/aprsd/web/admin/static/js/echarts.js b/aprsd/web/admin/static/js/echarts.js
index adeb5f6..8d67a73 100644
--- a/aprsd/web/admin/static/js/echarts.js
+++ b/aprsd/web/admin/static/js/echarts.js
@@ -327,7 +327,6 @@ function updatePacketTypesChart() {
option = {
series: series
}
- console.log(option)
packet_types_chart.setOption(option);
}
@@ -381,22 +380,23 @@ function updateAcksChart() {
}
function update_stats( data ) {
- console.log(data);
- our_callsign = data["stats"]["aprsd"]["callsign"];
- $("#version").text( data["stats"]["aprsd"]["version"] );
- $("#aprs_connection").html( data["aprs_connection"] );
- $("#uptime").text( "uptime: " + data["stats"]["aprsd"]["uptime"] );
+ console.log("update_stats() echarts.js called")
+ stats = data["stats"];
+ our_callsign = stats["APRSDStats"]["callsign"];
+ $("#version").text( stats["APRSDStats"]["version"] );
+ $("#aprs_connection").html( stats["aprs_connection"] );
+ $("#uptime").text( "uptime: " + stats["APRSDStats"]["uptime"] );
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
$("#jsonstats").html(html_pretty);
t = Date.parse(data["time"]);
ts = new Date(t);
- updatePacketData(packets_chart, ts, data["stats"]["packets"]["sent"], data["stats"]["packets"]["received"]);
- updatePacketTypesData(ts, data["stats"]["packets"]["types"]);
+ updatePacketData(packets_chart, ts, stats["PacketList"]["tx"], stats["PacketList"]["rx"]);
+ updatePacketTypesData(ts, stats["PacketList"]["types"]);
updatePacketTypesChart();
updateMessagesChart();
updateAcksChart();
- updateMemChart(ts, data["stats"]["aprsd"]["memory_current"], data["stats"]["aprsd"]["memory_peak"]);
+ updateMemChart(ts, stats["APRSDStats"]["memory_current"], stats["APRSDStats"]["memory_peak"]);
//updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]);
//updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]);
//updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]);
diff --git a/aprsd/web/admin/static/js/main.js b/aprsd/web/admin/static/js/main.js
index 4598f59..99c956a 100644
--- a/aprsd/web/admin/static/js/main.js
+++ b/aprsd/web/admin/static/js/main.js
@@ -24,11 +24,12 @@ function ord(str){return str.charCodeAt(0);}
function update_watchlist( data ) {
- // Update the watch list
+ // Update the watch list
+ stats = data["stats"];
var watchdiv = $("#watchDiv");
var html_str = 'HAM Callsign | Age since last seen by APRSD |
'
watchdiv.html('')
- jQuery.each(data["stats"]["aprsd"]["watch_list"], function(i, val) {
+ jQuery.each(stats["WatchList"], function(i, val) {
html_str += '' + i + ' | ' + val["last"] + ' |
'
});
html_str += "
";
@@ -60,12 +61,13 @@ function update_watchlist_from_packet(callsign, val) {
}
function update_seenlist( data ) {
+ stats = data["stats"];
var seendiv = $("#seenDiv");
var html_str = ''
html_str += 'HAM Callsign | Age since last seen by APRSD | '
html_str += 'Number of packets RX |
'
seendiv.html('')
- var seen_list = data["stats"]["aprsd"]["seen_list"]
+ var seen_list = stats["SeenList"]
var len = Object.keys(seen_list).length
$('#seen_count').html(len)
jQuery.each(seen_list, function(i, val) {
@@ -79,6 +81,7 @@ function update_seenlist( data ) {
}
function update_plugins( data ) {
+ stats = data["stats"];
var plugindiv = $("#pluginDiv");
var html_str = ''
html_str += 'Plugin Name | Plugin Enabled? | '
@@ -87,7 +90,7 @@ function update_plugins( data ) {
html_str += '
'
plugindiv.html('')
- var plugins = data["stats"]["plugins"];
+ var plugins = stats["PluginManager"];
var keys = Object.keys(plugins);
keys.sort();
for (var i=0; i 0) {
packetsdiv.html('')
}
- jQuery.each(data, function(i, val) {
- pkt = JSON.parse(val);
+ jQuery.each(data.packets, function(i, val) {
+ pkt = val;
update_watchlist_from_packet(pkt['from_call'], pkt);
if ( packet_list.hasOwnProperty(pkt['timestamp']) == false ) {
diff --git a/aprsd/web/admin/static/js/prism.js b/aprsd/web/admin/static/js/prism.js
index f232b27..2b957cb 100644
--- a/aprsd/web/admin/static/js/prism.js
+++ b/aprsd/web/admin/static/js/prism.js
@@ -1,2247 +1,12 @@
-/* PrismJS 1.24.1
+/* PrismJS 1.29.0
https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+json+json5+log&plugins=show-language+toolbar */
-///
-
-var _self = (typeof window !== 'undefined')
- ? window // if in browser
- : (
- (typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope)
- ? self // if in worker
- : {} // if in node js
- );
-
-/**
- * Prism: Lightweight, robust, elegant syntax highlighting
- *
- * @license MIT
- * @author Lea Verou
- * @namespace
- * @public
- */
-var Prism = (function (_self) {
-
- // Private helper vars
- var lang = /\blang(?:uage)?-([\w-]+)\b/i;
- var uniqueId = 0;
-
- // The grammar object for plaintext
- var plainTextGrammar = {};
-
-
- var _ = {
- /**
- * By default, Prism will attempt to highlight all code elements (by calling {@link Prism.highlightAll}) on the
- * current page after the page finished loading. This might be a problem if e.g. you wanted to asynchronously load
- * additional languages or plugins yourself.
- *
- * By setting this value to `true`, Prism will not automatically highlight all code elements on the page.
- *
- * You obviously have to change this value before the automatic highlighting started. To do this, you can add an
- * empty Prism object into the global scope before loading the Prism script like this:
- *
- * ```js
- * window.Prism = window.Prism || {};
- * Prism.manual = true;
- * // add a new