Merge pull request #159 from craigerl/stats-rework

Reworked the stats making the rpc server obsolete.
This commit is contained in:
Walter A. Boring IV 2024-04-08 16:12:16 -04:00 committed by GitHub
commit 1267a53ec8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
56 changed files with 862 additions and 4053 deletions

View File

@ -1,15 +1,18 @@
import abc import abc
import datetime
import logging import logging
import threading
import time import time
import aprslib import aprslib
from aprslib.exceptions import LoginError from aprslib.exceptions import LoginError
from oslo_config import cfg from oslo_config import cfg
import wrapt
from aprsd import exception from aprsd import exception
from aprsd.clients import aprsis, fake, kiss from aprsd.clients import aprsis, fake, kiss
from aprsd.packets import core, packet_list from aprsd.packets import core, packet_list
from aprsd.utils import trace from aprsd.utils import singleton, trace
CONF = cfg.CONF CONF = cfg.CONF
@ -25,6 +28,34 @@ TRANSPORT_FAKE = "fake"
factory = None factory = None
@singleton
class APRSClientStats:
lock = threading.Lock()
@wrapt.synchronized(lock)
def stats(self, serializable=False):
client = factory.create()
stats = {
"transport": client.transport(),
"filter": client.filter,
"connected": client.connected,
}
if client.transport() == TRANSPORT_APRSIS:
stats["server_string"] = client.client.server_string
keepalive = client.client.aprsd_keepalive
if serializable:
keepalive = keepalive.isoformat()
stats["server_keepalive"] = keepalive
elif client.transport() == TRANSPORT_TCPKISS:
stats["host"] = CONF.kiss_tcp.host
stats["port"] = CONF.kiss_tcp.port
elif client.transport() == TRANSPORT_SERIALKISS:
stats["device"] = CONF.kiss_serial.device
return stats
class Client: class Client:
"""Singleton client class that constructs the aprslib connection.""" """Singleton client class that constructs the aprslib connection."""
@ -32,8 +63,8 @@ class Client:
_client = None _client = None
connected = False connected = False
server_string = None
filter = None filter = None
lock = threading.Lock()
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton.""" """This magic turns this into a singleton."""
@ -43,6 +74,10 @@ class Client:
cls._instance._create_client() cls._instance._create_client()
return cls._instance return cls._instance
@abc.abstractmethod
def stats(self) -> dict:
pass
def set_filter(self, filter): def set_filter(self, filter):
self.filter = filter self.filter = filter
if self._client: if self._client:
@ -69,9 +104,12 @@ class Client:
packet_list.PacketList().tx(packet) packet_list.PacketList().tx(packet)
self.client.send(packet) self.client.send(packet)
@wrapt.synchronized(lock)
def reset(self): def reset(self):
"""Call this to force a rebuild/reconnect.""" """Call this to force a rebuild/reconnect."""
LOG.info("Resetting client connection.")
if self._client: if self._client:
self._client.close()
del self._client del self._client
self._create_client() self._create_client()
else: else:
@ -102,11 +140,34 @@ class Client:
def consumer(self, callback, blocking=False, immortal=False, raw=False): def consumer(self, callback, blocking=False, immortal=False, raw=False):
pass pass
@abc.abstractmethod
def is_alive(self):
pass
@abc.abstractmethod
def close(self):
pass
class APRSISClient(Client): class APRSISClient(Client):
_client = None _client = None
def __init__(self):
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
self.max_delta = datetime.timedelta(**max_timeout)
def stats(self) -> dict:
stats = {}
if self.is_configured():
stats = {
"server_string": self._client.server_string,
"sever_keepalive": self._client.aprsd_keepalive,
"filter": self.filter,
}
return stats
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
# Defaults to True if the enabled flag is non existent # Defaults to True if the enabled flag is non existent
@ -138,14 +199,24 @@ class APRSISClient(Client):
return True return True
return True return True
def _is_stale_connection(self):
delta = datetime.datetime.now() - self._client.aprsd_keepalive
if delta > self.max_delta:
LOG.error(f"Connection is stale, last heard {delta} ago.")
return True
def is_alive(self): def is_alive(self):
if self._client: if self._client:
LOG.warning(f"APRS_CLIENT {self._client} alive? {self._client.is_alive()}") return self._client.is_alive() and not self._is_stale_connection()
return self._client.is_alive()
else: else:
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!") LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
return False return False
def close(self):
if self._client:
self._client.stop()
self._client.close()
@staticmethod @staticmethod
def transport(): def transport():
return TRANSPORT_APRSIS return TRANSPORT_APRSIS
@ -159,25 +230,25 @@ class APRSISClient(Client):
password = CONF.aprs_network.password password = CONF.aprs_network.password
host = CONF.aprs_network.host host = CONF.aprs_network.host
port = CONF.aprs_network.port port = CONF.aprs_network.port
connected = False self.connected = False
backoff = 1 backoff = 1
aprs_client = None aprs_client = None
while not connected: while not self.connected:
try: try:
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.") LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port) aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
# Force the log to be the same # Force the log to be the same
aprs_client.logger = LOG aprs_client.logger = LOG
aprs_client.connect() aprs_client.connect()
connected = True self.connected = True
backoff = 1 backoff = 1
except LoginError as e: except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'") LOG.error(f"Failed to login to APRS-IS Server '{e}'")
connected = False self.connected = False
time.sleep(backoff) time.sleep(backoff)
except Exception as e: except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ") LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
connected = False self.connected = False
time.sleep(backoff) time.sleep(backoff)
# Don't allow the backoff to go to inifinity. # Don't allow the backoff to go to inifinity.
if backoff > 5: if backoff > 5:
@ -190,17 +261,24 @@ class APRSISClient(Client):
return aprs_client return aprs_client
def consumer(self, callback, blocking=False, immortal=False, raw=False): def consumer(self, callback, blocking=False, immortal=False, raw=False):
if self.is_alive(): self._client.consumer(
self._client.consumer( callback, blocking=blocking,
callback, blocking=blocking, immortal=immortal, raw=raw,
immortal=immortal, raw=raw, )
)
class KISSClient(Client): class KISSClient(Client):
_client = None _client = None
def stats(self) -> dict:
stats = {}
if self.is_configured():
return {
"transport": self.transport(),
}
return stats
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
"""Return if tcp or serial KISS is enabled.""" """Return if tcp or serial KISS is enabled."""
@ -239,6 +317,10 @@ class KISSClient(Client):
else: else:
return False return False
def close(self):
if self._client:
self._client.stop()
@staticmethod @staticmethod
def transport(): def transport():
if CONF.kiss_serial.enabled: if CONF.kiss_serial.enabled:
@ -268,6 +350,7 @@ class KISSClient(Client):
def setup_connection(self): def setup_connection(self):
self._client = kiss.KISS3Client() self._client = kiss.KISS3Client()
self.connected = True
return self._client return self._client
def consumer(self, callback, blocking=False, immortal=False, raw=False): def consumer(self, callback, blocking=False, immortal=False, raw=False):
@ -276,6 +359,9 @@ class KISSClient(Client):
class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass): class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass):
def stats(self) -> dict:
return {}
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
if CONF.fake_client.enabled: if CONF.fake_client.enabled:
@ -289,7 +375,11 @@ class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass):
def is_alive(self): def is_alive(self):
return True return True
def close(self):
pass
def setup_connection(self): def setup_connection(self):
self.connected = True
return fake.APRSDFakeClient() return fake.APRSDFakeClient()
@staticmethod @staticmethod
@ -329,7 +419,6 @@ class ClientFactory:
key = TRANSPORT_FAKE key = TRANSPORT_FAKE
builder = self._builders.get(key) builder = self._builders.get(key)
LOG.debug(f"ClientFactory Creating client of type '{key}'")
if not builder: if not builder:
raise ValueError(key) raise ValueError(key)
return builder() return builder()

View File

@ -1,3 +1,4 @@
import datetime
import logging import logging
import select import select
import threading import threading
@ -11,7 +12,6 @@ from aprslib.exceptions import (
import wrapt import wrapt
import aprsd import aprsd
from aprsd import stats
from aprsd.packets import core from aprsd.packets import core
@ -24,6 +24,9 @@ class Aprsdis(aprslib.IS):
# flag to tell us to stop # flag to tell us to stop
thread_stop = False thread_stop = False
# date for last time we heard from the server
aprsd_keepalive = datetime.datetime.now()
# timeout in seconds # timeout in seconds
select_timeout = 1 select_timeout = 1
lock = threading.Lock() lock = threading.Lock()
@ -142,7 +145,6 @@ class Aprsdis(aprslib.IS):
self.logger.info(f"Connected to {server_string}") self.logger.info(f"Connected to {server_string}")
self.server_string = server_string self.server_string = server_string
stats.APRSDStats().set_aprsis_server(server_string)
except LoginError as e: except LoginError as e:
self.logger.error(str(e)) self.logger.error(str(e))
@ -176,13 +178,14 @@ class Aprsdis(aprslib.IS):
try: try:
for line in self._socket_readlines(blocking): for line in self._socket_readlines(blocking):
if line[0:1] != b"#": if line[0:1] != b"#":
self.aprsd_keepalive = datetime.datetime.now()
if raw: if raw:
callback(line) callback(line)
else: else:
callback(self._parse(line)) callback(self._parse(line))
else: else:
self.logger.debug("Server: %s", line.decode("utf8")) self.logger.debug("Server: %s", line.decode("utf8"))
stats.APRSDStats().set_aprsis_keepalive() self.aprsd_keepalive = datetime.datetime.now()
except ParseError as exp: except ParseError as exp:
self.logger.log( self.logger.log(
11, 11,

View File

@ -1,10 +1,9 @@
# Fetch active stats from a remote running instance of aprsd server # Fetch active stats from a remote running instance of aprsd admin web interface.
# This uses the RPC server to fetch the stats from the remote server.
import logging import logging
import click import click
from oslo_config import cfg from oslo_config import cfg
import requests
from rich.console import Console from rich.console import Console
from rich.table import Table from rich.table import Table
@ -12,7 +11,6 @@ from rich.table import Table
import aprsd import aprsd
from aprsd import cli_helper from aprsd import cli_helper
from aprsd.main import cli from aprsd.main import cli
from aprsd.rpc import client as rpc_client
# setup the global logger # setup the global logger
@ -26,87 +24,80 @@ CONF = cfg.CONF
@click.option( @click.option(
"--host", type=str, "--host", type=str,
default=None, default=None,
help="IP address of the remote aprsd server to fetch stats from.", help="IP address of the remote aprsd admin web ui fetch stats from.",
) )
@click.option( @click.option(
"--port", type=int, "--port", type=int,
default=None, default=None,
help="Port of the remote aprsd server rpc port to fetch stats from.", help="Port of the remote aprsd web admin interface to fetch stats from.",
)
@click.option(
"--magic-word", type=str,
default=None,
help="Magic word of the remote aprsd server rpc port to fetch stats from.",
) )
@click.pass_context @click.pass_context
@cli_helper.process_standard_options @cli_helper.process_standard_options
def fetch_stats(ctx, host, port, magic_word): def fetch_stats(ctx, host, port):
"""Fetch stats from a remote running instance of aprsd server.""" """Fetch stats from a APRSD admin web interface."""
LOG.info(f"APRSD Fetch-Stats started version: {aprsd.__version__}") console = Console()
console.print(f"APRSD Fetch-Stats started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG) CONF.log_opt_values(LOG, logging.DEBUG)
if not host: if not host:
host = CONF.rpc_settings.ip host = CONF.admin.web_ip
if not port: if not port:
port = CONF.rpc_settings.port port = CONF.admin.web_port
if not magic_word:
magic_word = CONF.rpc_settings.magic_word
msg = f"Fetching stats from {host}:{port} with magic word '{magic_word}'" msg = f"Fetching stats from {host}:{port}"
console = Console()
console.print(msg) console.print(msg)
with console.status(msg): with console.status(msg):
client = rpc_client.RPCClient(host, port, magic_word) response = requests.get(f"http://{host}:{port}/stats", timeout=120)
stats = client.get_stats_dict() if not response:
if stats: console.print(
console.print_json(data=stats) f"Failed to fetch stats from {host}:{port}?",
else: style="bold red",
LOG.error(f"Failed to fetch stats via RPC aprsd server at {host}:{port}") )
return return
stats = response.json()
if not stats:
console.print(
f"Failed to fetch stats from aprsd admin ui at {host}:{port}",
style="bold red",
)
return
aprsd_title = ( aprsd_title = (
"APRSD " "APRSD "
f"[bold cyan]v{stats['aprsd']['version']}[/] " f"[bold cyan]v{stats['APRSDStats']['version']}[/] "
f"Callsign [bold green]{stats['aprsd']['callsign']}[/] " f"Callsign [bold green]{stats['APRSDStats']['callsign']}[/] "
f"Uptime [bold yellow]{stats['aprsd']['uptime']}[/]" f"Uptime [bold yellow]{stats['APRSDStats']['uptime']}[/]"
) )
console.rule(f"Stats from {host}:{port} with magic word '{magic_word}'") console.rule(f"Stats from {host}:{port}")
console.print("\n\n") console.print("\n\n")
console.rule(aprsd_title) console.rule(aprsd_title)
# Show the connection to APRS # Show the connection to APRS
# It can be a connection to an APRS-IS server or a local TNC via KISS or KISSTCP # It can be a connection to an APRS-IS server or a local TNC via KISS or KISSTCP
if "aprs-is" in stats: if "aprs-is" in stats:
title = f"APRS-IS Connection {stats['aprs-is']['server']}" title = f"APRS-IS Connection {stats['APRSClientStats']['server_string']}"
table = Table(title=title) table = Table(title=title)
table.add_column("Key") table.add_column("Key")
table.add_column("Value") table.add_column("Value")
for key, value in stats["aprs-is"].items(): for key, value in stats["APRSClientStats"].items():
table.add_row(key, value) table.add_row(key, value)
console.print(table) console.print(table)
threads_table = Table(title="Threads") threads_table = Table(title="Threads")
threads_table.add_column("Name") threads_table.add_column("Name")
threads_table.add_column("Alive?") threads_table.add_column("Alive?")
for name, alive in stats["aprsd"]["threads"].items(): for name, alive in stats["APRSDThreadList"].items():
threads_table.add_row(name, str(alive)) threads_table.add_row(name, str(alive))
console.print(threads_table) console.print(threads_table)
msgs_table = Table(title="Messages")
msgs_table.add_column("Key")
msgs_table.add_column("Value")
for key, value in stats["messages"].items():
msgs_table.add_row(key, str(value))
console.print(msgs_table)
packet_totals = Table(title="Packet Totals") packet_totals = Table(title="Packet Totals")
packet_totals.add_column("Key") packet_totals.add_column("Key")
packet_totals.add_column("Value") packet_totals.add_column("Value")
packet_totals.add_row("Total Received", str(stats["packets"]["total_received"])) packet_totals.add_row("Total Received", str(stats["PacketList"]["rx"]))
packet_totals.add_row("Total Sent", str(stats["packets"]["total_sent"])) packet_totals.add_row("Total Sent", str(stats["PacketList"]["tx"]))
packet_totals.add_row("Total Tracked", str(stats["packets"]["total_tracked"]))
console.print(packet_totals) console.print(packet_totals)
# Show each of the packet types # Show each of the packet types
@ -114,47 +105,52 @@ def fetch_stats(ctx, host, port, magic_word):
packets_table.add_column("Packet Type") packets_table.add_column("Packet Type")
packets_table.add_column("TX") packets_table.add_column("TX")
packets_table.add_column("RX") packets_table.add_column("RX")
for key, value in stats["packets"]["by_type"].items(): for key, value in stats["PacketList"]["packets"].items():
packets_table.add_row(key, str(value["tx"]), str(value["rx"])) packets_table.add_row(key, str(value["tx"]), str(value["rx"]))
console.print(packets_table) console.print(packets_table)
if "plugins" in stats: if "plugins" in stats:
count = len(stats["plugins"]) count = len(stats["PluginManager"])
plugins_table = Table(title=f"Plugins ({count})") plugins_table = Table(title=f"Plugins ({count})")
plugins_table.add_column("Plugin") plugins_table.add_column("Plugin")
plugins_table.add_column("Enabled") plugins_table.add_column("Enabled")
plugins_table.add_column("Version") plugins_table.add_column("Version")
plugins_table.add_column("TX") plugins_table.add_column("TX")
plugins_table.add_column("RX") plugins_table.add_column("RX")
for key, value in stats["plugins"].items(): plugins = stats["PluginManager"]
for key, value in plugins.items():
plugins_table.add_row( plugins_table.add_row(
key, key,
str(stats["plugins"][key]["enabled"]), str(plugins[key]["enabled"]),
stats["plugins"][key]["version"], plugins[key]["version"],
str(stats["plugins"][key]["tx"]), str(plugins[key]["tx"]),
str(stats["plugins"][key]["rx"]), str(plugins[key]["rx"]),
) )
console.print(plugins_table) console.print(plugins_table)
if "seen_list" in stats["aprsd"]: seen_list = stats.get("SeenList")
count = len(stats["aprsd"]["seen_list"])
if seen_list:
count = len(seen_list)
seen_table = Table(title=f"Seen List ({count})") seen_table = Table(title=f"Seen List ({count})")
seen_table.add_column("Callsign") seen_table.add_column("Callsign")
seen_table.add_column("Message Count") seen_table.add_column("Message Count")
seen_table.add_column("Last Heard") seen_table.add_column("Last Heard")
for key, value in stats["aprsd"]["seen_list"].items(): for key, value in seen_list.items():
seen_table.add_row(key, str(value["count"]), value["last"]) seen_table.add_row(key, str(value["count"]), value["last"])
console.print(seen_table) console.print(seen_table)
if "watch_list" in stats["aprsd"]: watch_list = stats.get("WatchList")
count = len(stats["aprsd"]["watch_list"])
if watch_list:
count = len(watch_list)
watch_table = Table(title=f"Watch List ({count})") watch_table = Table(title=f"Watch List ({count})")
watch_table.add_column("Callsign") watch_table.add_column("Callsign")
watch_table.add_column("Last Heard") watch_table.add_column("Last Heard")
for key, value in stats["aprsd"]["watch_list"].items(): for key, value in watch_list.items():
watch_table.add_row(key, value["last"]) watch_table.add_row(key, value["last"])
console.print(watch_table) console.print(watch_table)

View File

@ -13,11 +13,11 @@ from oslo_config import cfg
from rich.console import Console from rich.console import Console
import aprsd import aprsd
from aprsd import cli_helper, utils from aprsd import cli_helper
from aprsd import conf # noqa from aprsd import conf # noqa
# local imports here # local imports here
from aprsd.main import cli from aprsd.main import cli
from aprsd.rpc import client as aprsd_rpc_client from aprsd.threads import stats as stats_threads
# setup the global logger # setup the global logger
@ -39,46 +39,48 @@ console = Console()
@cli_helper.process_standard_options @cli_helper.process_standard_options
def healthcheck(ctx, timeout): def healthcheck(ctx, timeout):
"""Check the health of the running aprsd server.""" """Check the health of the running aprsd server."""
console.log(f"APRSD HealthCheck version: {aprsd.__version__}") ver_str = f"APRSD HealthCheck version: {aprsd.__version__}"
if not CONF.rpc_settings.enabled: console.log(ver_str)
LOG.error("Must enable rpc_settings.enabled to use healthcheck")
sys.exit(-1)
if not CONF.rpc_settings.ip:
LOG.error("Must enable rpc_settings.ip to use healthcheck")
sys.exit(-1)
if not CONF.rpc_settings.magic_word:
LOG.error("Must enable rpc_settings.magic_word to use healthcheck")
sys.exit(-1)
with console.status(f"APRSD HealthCheck version: {aprsd.__version__}") as status: with console.status(ver_str):
try: try:
status.update(f"Contacting APRSD via RPC {CONF.rpc_settings.ip}") stats_obj = stats_threads.StatsStore()
stats = aprsd_rpc_client.RPCClient().get_stats_dict() stats_obj.load()
stats = stats_obj.data
# console.print(stats)
except Exception as ex: except Exception as ex:
console.log(f"Failed to fetch healthcheck : '{ex}'") console.log(f"Failed to load stats: '{ex}'")
sys.exit(-1) sys.exit(-1)
else: else:
now = datetime.datetime.now()
if not stats: if not stats:
console.log("No stats from aprsd") console.log("No stats from aprsd")
sys.exit(-1) sys.exit(-1)
email_thread_last_update = stats["email"]["thread_last_update"]
if email_thread_last_update != "never": email_stats = stats.get("EmailStats")
delta = utils.parse_delta_str(email_thread_last_update) if email_stats:
d = datetime.timedelta(**delta) email_thread_last_update = email_stats["last_check_time"]
if email_thread_last_update != "never":
d = now - email_thread_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
console.log(f"Email thread is very old! {d}")
sys.exit(-1)
client_stats = stats.get("APRSClientStats")
if not client_stats:
console.log("No APRSClientStats")
sys.exit(-1)
else:
aprsis_last_update = client_stats["server_keepalive"]
d = now - aprsis_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0} max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout) max_delta = datetime.timedelta(**max_timeout)
if d > max_delta: if d > max_delta:
console.log(f"Email thread is very old! {d}") LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1) sys.exit(-1)
aprsis_last_update = stats["aprs-is"]["last_update"] console.log("OK")
delta = utils.parse_delta_str(aprsis_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
sys.exit(0) sys.exit(0)

View File

@ -21,7 +21,7 @@ from aprsd import cli_helper
from aprsd import plugin as aprsd_plugin from aprsd import plugin as aprsd_plugin
from aprsd.main import cli from aprsd.main import cli
from aprsd.plugins import ( from aprsd.plugins import (
email, fortune, location, notify, ping, query, time, version, weather, email, fortune, location, notify, ping, time, version, weather,
) )
@ -122,7 +122,7 @@ def get_installed_extensions():
def show_built_in_plugins(console): def show_built_in_plugins(console):
modules = [email, fortune, location, notify, ping, query, time, version, weather] modules = [email, fortune, location, notify, ping, time, version, weather]
plugins = [] plugins = []
for module in modules: for module in modules:

View File

@ -15,10 +15,10 @@ from rich.console import Console
# local imports here # local imports here
import aprsd import aprsd
from aprsd import cli_helper, client, packets, plugin, stats, threads from aprsd import cli_helper, client, packets, plugin, threads
from aprsd.main import cli from aprsd.main import cli
from aprsd.packets import log as packet_log from aprsd.packets import log as packet_log
from aprsd.rpc import server as rpc_server from aprsd.stats import collector
from aprsd.threads import rx from aprsd.threads import rx
@ -38,7 +38,7 @@ def signal_handler(sig, frame):
), ),
) )
time.sleep(5) time.sleep(5)
LOG.info(stats.APRSDStats()) LOG.info(collector.Collector().collect())
class APRSDListenThread(rx.APRSDRXThread): class APRSDListenThread(rx.APRSDRXThread):
@ -169,6 +169,7 @@ def listen(
LOG.info(f"APRSD Listen Started version: {aprsd.__version__}") LOG.info(f"APRSD Listen Started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG) CONF.log_opt_values(LOG, logging.DEBUG)
collector.Collector()
# Try and load saved MsgTrack list # Try and load saved MsgTrack list
LOG.debug("Loading saved MsgTrack object.") LOG.debug("Loading saved MsgTrack object.")
@ -192,10 +193,6 @@ def listen(
keepalive = threads.KeepAliveThread() keepalive = threads.KeepAliveThread()
# keepalive.start() # keepalive.start()
if CONF.rpc_settings.enabled:
rpc = rpc_server.APRSDRPCThread()
rpc.start()
pm = None pm = None
pm = plugin.PluginManager() pm = plugin.PluginManager()
if load_plugins: if load_plugins:
@ -206,6 +203,8 @@ def listen(
"Not Loading any plugins use --load-plugins to load what's " "Not Loading any plugins use --load-plugins to load what's "
"defined in the config file.", "defined in the config file.",
) )
stats_thread = threads.APRSDStatsStoreThread()
stats_thread.start()
LOG.debug("Create APRSDListenThread") LOG.debug("Create APRSDListenThread")
listen_thread = APRSDListenThread( listen_thread = APRSDListenThread(
@ -221,6 +220,4 @@ def listen(
keepalive.join() keepalive.join()
LOG.debug("listen_thread Join") LOG.debug("listen_thread Join")
listen_thread.join() listen_thread.join()
stats_thread.join()
if CONF.rpc_settings.enabled:
rpc.join()

View File

@ -76,7 +76,6 @@ def send_message(
aprs_login = CONF.aprs_network.login aprs_login = CONF.aprs_network.login
if not aprs_password: if not aprs_password:
LOG.warning(CONF.aprs_network.password)
if not CONF.aprs_network.password: if not CONF.aprs_network.password:
click.echo("Must set --aprs-password or APRS_PASSWORD") click.echo("Must set --aprs-password or APRS_PASSWORD")
ctx.exit(-1) ctx.exit(-1)

View File

@ -10,8 +10,9 @@ from aprsd import cli_helper, client
from aprsd import main as aprsd_main from aprsd import main as aprsd_main
from aprsd import packets, plugin, threads, utils from aprsd import packets, plugin, threads, utils
from aprsd.main import cli from aprsd.main import cli
from aprsd.rpc import server as rpc_server from aprsd.threads import keep_alive, log_monitor, registry, rx
from aprsd.threads import registry, rx, tx from aprsd.threads import stats as stats_thread
from aprsd.threads import tx
CONF = cfg.CONF CONF = cfg.CONF
@ -47,6 +48,14 @@ def server(ctx, flush):
# Initialize the client factory and create # Initialize the client factory and create
# The correct client object ready for use # The correct client object ready for use
client.ClientFactory.setup() client.ClientFactory.setup()
if not client.factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
# Creates the client object
LOG.info("Creating client connection")
aprs_client = client.factory.create()
LOG.info(aprs_client)
# Create the initial PM singleton and Register plugins # Create the initial PM singleton and Register plugins
# We register plugins first here so we can register each # We register plugins first here so we can register each
@ -87,16 +96,21 @@ def server(ctx, flush):
packets.PacketTrack().flush() packets.PacketTrack().flush()
packets.WatchList().flush() packets.WatchList().flush()
packets.SeenList().flush() packets.SeenList().flush()
packets.PacketList().flush()
else: else:
# Try and load saved MsgTrack list # Try and load saved MsgTrack list
LOG.debug("Loading saved MsgTrack object.") LOG.debug("Loading saved MsgTrack object.")
packets.PacketTrack().load() packets.PacketTrack().load()
packets.WatchList().load() packets.WatchList().load()
packets.SeenList().load() packets.SeenList().load()
packets.PacketList().load()
keepalive = threads.KeepAliveThread() keepalive = keep_alive.KeepAliveThread()
keepalive.start() keepalive.start()
stats_store_thread = stats_thread.APRSDStatsStoreThread()
stats_store_thread.start()
rx_thread = rx.APRSDPluginRXThread( rx_thread = rx.APRSDPluginRXThread(
packet_queue=threads.packet_queue, packet_queue=threads.packet_queue,
) )
@ -106,7 +120,6 @@ def server(ctx, flush):
rx_thread.start() rx_thread.start()
process_thread.start() process_thread.start()
packets.PacketTrack().restart()
if CONF.enable_beacon: if CONF.enable_beacon:
LOG.info("Beacon Enabled. Starting Beacon thread.") LOG.info("Beacon Enabled. Starting Beacon thread.")
bcn_thread = tx.BeaconSendThread() bcn_thread = tx.BeaconSendThread()
@ -117,11 +130,9 @@ def server(ctx, flush):
registry_thread = registry.APRSRegistryThread() registry_thread = registry.APRSRegistryThread()
registry_thread.start() registry_thread.start()
if CONF.rpc_settings.enabled: if CONF.admin.web_enabled:
rpc = rpc_server.APRSDRPCThread() log_monitor_thread = log_monitor.LogMonitorThread()
rpc.start() log_monitor_thread.start()
log_monitor = threads.log_monitor.LogMonitorThread()
log_monitor.start()
rx_thread.join() rx_thread.join()
process_thread.join() process_thread.join()

View File

@ -23,7 +23,7 @@ from aprsd import (
) )
from aprsd.main import cli from aprsd.main import cli
from aprsd.threads import aprsd as aprsd_threads from aprsd.threads import aprsd as aprsd_threads
from aprsd.threads import rx, tx from aprsd.threads import keep_alive, rx, tx
from aprsd.utils import trace from aprsd.utils import trace
@ -63,7 +63,7 @@ def signal_handler(sig, frame):
time.sleep(1.5) time.sleep(1.5)
# packets.WatchList().save() # packets.WatchList().save()
# packets.SeenList().save() # packets.SeenList().save()
LOG.info(stats.APRSDStats()) LOG.info(stats.stats_collector.collect())
LOG.info("Telling flask to bail.") LOG.info("Telling flask to bail.")
signal.signal(signal.SIGTERM, sys.exit(0)) signal.signal(signal.SIGTERM, sys.exit(0))
@ -378,7 +378,7 @@ def _get_transport(stats):
transport = "aprs-is" transport = "aprs-is"
aprs_connection = ( aprs_connection = (
"APRS-IS Server: <a href='http://status.aprs2.net' >" "APRS-IS Server: <a href='http://status.aprs2.net' >"
"{}</a>".format(stats["stats"]["aprs-is"]["server"]) "{}</a>".format(stats["APRSClientStats"]["server_string"])
) )
elif client.KISSClient.is_enabled(): elif client.KISSClient.is_enabled():
transport = client.KISSClient.transport() transport = client.KISSClient.transport()
@ -414,12 +414,13 @@ def location(callsign):
@flask_app.route("/") @flask_app.route("/")
def index(): def index():
stats = _stats() stats = _stats()
LOG.error(stats)
# For development # For development
html_template = "index.html" html_template = "index.html"
LOG.debug(f"Template {html_template}") LOG.debug(f"Template {html_template}")
transport, aprs_connection = _get_transport(stats) transport, aprs_connection = _get_transport(stats["stats"])
LOG.debug(f"transport {transport} aprs_connection {aprs_connection}") LOG.debug(f"transport {transport} aprs_connection {aprs_connection}")
stats["transport"] = transport stats["transport"] = transport
@ -454,27 +455,28 @@ def send_message_status():
def _stats(): def _stats():
stats_obj = stats.APRSDStats()
now = datetime.datetime.now() now = datetime.datetime.now()
time_format = "%m-%d-%Y %H:%M:%S" time_format = "%m-%d-%Y %H:%M:%S"
stats_dict = stats_obj.stats() stats_dict = stats.stats_collector.collect(serializable=True)
# Webchat doesnt need these # Webchat doesnt need these
if "watch_list" in stats_dict["aprsd"]: if "WatchList" in stats_dict:
del stats_dict["aprsd"]["watch_list"] del stats_dict["WatchList"]
if "seen_list" in stats_dict["aprsd"]: if "SeenList" in stats_dict:
del stats_dict["aprsd"]["seen_list"] del stats_dict["SeenList"]
if "threads" in stats_dict["aprsd"]: if "APRSDThreadList" in stats_dict:
del stats_dict["aprsd"]["threads"] del stats_dict["APRSDThreadList"]
# del stats_dict["email"] if "PacketList" in stats_dict:
# del stats_dict["plugins"] del stats_dict["PacketList"]
# del stats_dict["messages"] if "EmailStats" in stats_dict:
del stats_dict["EmailStats"]
if "PluginManager" in stats_dict:
del stats_dict["PluginManager"]
result = { result = {
"time": now.strftime(time_format), "time": now.strftime(time_format),
"stats": stats_dict, "stats": stats_dict,
} }
return result return result
@ -544,7 +546,7 @@ class SendMessageNamespace(Namespace):
LOG.debug(f"Long {long}") LOG.debug(f"Long {long}")
tx.send( tx.send(
packets.GPSPacket( packets.BeaconPacket(
from_call=CONF.callsign, from_call=CONF.callsign,
to_call="APDW16", to_call="APDW16",
latitude=lat, latitude=lat,
@ -642,7 +644,7 @@ def webchat(ctx, flush, port):
packets.WatchList() packets.WatchList()
packets.SeenList() packets.SeenList()
keepalive = threads.KeepAliveThread() keepalive = keep_alive.KeepAliveThread()
LOG.info("Start KeepAliveThread") LOG.info("Start KeepAliveThread")
keepalive.start() keepalive.start()

View File

@ -15,10 +15,6 @@ watch_list_group = cfg.OptGroup(
name="watch_list", name="watch_list",
title="Watch List settings", title="Watch List settings",
) )
rpc_group = cfg.OptGroup(
name="rpc_settings",
title="RPC Settings for admin <--> web",
)
webchat_group = cfg.OptGroup( webchat_group = cfg.OptGroup(
name="webchat", name="webchat",
title="Settings specific to the webchat command", title="Settings specific to the webchat command",
@ -146,7 +142,7 @@ admin_opts = [
default=False, default=False,
help="Enable the Admin Web Interface", help="Enable the Admin Web Interface",
), ),
cfg.IPOpt( cfg.StrOpt(
"web_ip", "web_ip",
default="0.0.0.0", default="0.0.0.0",
help="The ip address to listen on", help="The ip address to listen on",
@ -169,28 +165,6 @@ admin_opts = [
), ),
] ]
rpc_opts = [
cfg.BoolOpt(
"enabled",
default=True,
help="Enable RPC calls",
),
cfg.StrOpt(
"ip",
default="localhost",
help="The ip address to listen on",
),
cfg.PortOpt(
"port",
default=18861,
help="The port to listen on",
),
cfg.StrOpt(
"magic_word",
default=APRSD_DEFAULT_MAGIC_WORD,
help="Magic word to authenticate requests between client/server",
),
]
enabled_plugins_opts = [ enabled_plugins_opts = [
cfg.ListOpt( cfg.ListOpt(
@ -213,7 +187,7 @@ enabled_plugins_opts = [
] ]
webchat_opts = [ webchat_opts = [
cfg.IPOpt( cfg.StrOpt(
"web_ip", "web_ip",
default="0.0.0.0", default="0.0.0.0",
help="The ip address to listen on", help="The ip address to listen on",
@ -281,8 +255,6 @@ def register_opts(config):
config.register_opts(admin_opts, group=admin_group) config.register_opts(admin_opts, group=admin_group)
config.register_group(watch_list_group) config.register_group(watch_list_group)
config.register_opts(watch_list_opts, group=watch_list_group) config.register_opts(watch_list_opts, group=watch_list_group)
config.register_group(rpc_group)
config.register_opts(rpc_opts, group=rpc_group)
config.register_group(webchat_group) config.register_group(webchat_group)
config.register_opts(webchat_opts, group=webchat_group) config.register_opts(webchat_opts, group=webchat_group)
config.register_group(registry_group) config.register_group(registry_group)
@ -294,7 +266,6 @@ def list_opts():
"DEFAULT": (aprsd_opts + enabled_plugins_opts), "DEFAULT": (aprsd_opts + enabled_plugins_opts),
admin_group.name: admin_opts, admin_group.name: admin_opts,
watch_list_group.name: watch_list_opts, watch_list_group.name: watch_list_opts,
rpc_group.name: rpc_opts,
webchat_group.name: webchat_opts, webchat_group.name: webchat_opts,
registry_group.name: registry_opts, registry_group.name: registry_opts,
} }

View File

@ -36,7 +36,6 @@ class InterceptHandler(logging.Handler):
# to disable log to stdout, but still log to file # to disable log to stdout, but still log to file
# use the --quiet option on the cmdln # use the --quiet option on the cmdln
def setup_logging(loglevel=None, quiet=False): def setup_logging(loglevel=None, quiet=False):
print(f"setup_logging: loglevel={loglevel}, quiet={quiet}")
if not loglevel: if not loglevel:
log_level = CONF.logging.log_level log_level = CONF.logging.log_level
else: else:
@ -58,6 +57,8 @@ def setup_logging(loglevel=None, quiet=False):
webserver_list = [ webserver_list = [
"werkzeug", "werkzeug",
"werkzeug._internal", "werkzeug._internal",
"socketio",
"urllib3.connectionpool",
] ]
# We don't really want to see the aprslib parsing debug output. # We don't really want to see the aprslib parsing debug output.

View File

@ -35,7 +35,8 @@ from oslo_config import cfg, generator
# local imports here # local imports here
import aprsd import aprsd
from aprsd import cli_helper, packets, stats, threads, utils from aprsd import cli_helper, packets, threads, utils
from aprsd.stats import collector
# setup the global logger # setup the global logger
@ -44,7 +45,6 @@ CONF = cfg.CONF
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
flask_enabled = False flask_enabled = False
rpc_serv = None
def custom_startswith(string, incomplete): def custom_startswith(string, incomplete):
@ -96,7 +96,8 @@ def signal_handler(sig, frame):
packets.PacketTrack().save() packets.PacketTrack().save()
packets.WatchList().save() packets.WatchList().save()
packets.SeenList().save() packets.SeenList().save()
LOG.info(stats.APRSDStats()) packets.PacketList().save()
LOG.info(collector.Collector().collect())
# signal.signal(signal.SIGTERM, sys.exit(0)) # signal.signal(signal.SIGTERM, sys.exit(0))
# sys.exit(0) # sys.exit(0)

View File

@ -109,14 +109,6 @@ class Packet:
path: List[str] = field(default_factory=list, compare=False, hash=False) path: List[str] = field(default_factory=list, compare=False, hash=False)
via: Optional[str] = field(default=None, compare=False, hash=False) via: Optional[str] = field(default=None, compare=False, hash=False)
@property
def json(self):
"""get the json formated string.
This is used soley by the rpc server to return json over the wire.
"""
return self.to_json()
def get(self, key: str, default: Optional[str] = None): def get(self, key: str, default: Optional[str] = None):
"""Emulate a getter on a dict.""" """Emulate a getter on a dict."""
if hasattr(self, key): if hasattr(self, key):
@ -218,6 +210,11 @@ class BulletinPacket(Packet):
bid: Optional[str] = field(default="1") bid: Optional[str] = field(default="1")
message_text: Optional[str] = field(default=None) message_text: Optional[str] = field(default=None)
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
return f"{self.from_call}:BLN{self.bid}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
return f"BLN{self.bid} {self.message_text}" return f"BLN{self.bid} {self.message_text}"
@ -385,6 +382,14 @@ class BeaconPacket(GPSPacket):
f"{self.payload}" f"{self.payload}"
) )
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
if self.raw_timestamp:
return f"{self.from_call}:{self.raw_timestamp}"
else:
return f"{self.from_call}:{self.human_info.replace(' ','')}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
h_str = [] h_str = []
@ -407,6 +412,11 @@ class MicEPacket(GPSPacket):
# 0 to 360 # 0 to 360
course: int = 0 course: int = 0
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
return f"{self.from_call}:{self.human_info.replace(' ', '')}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
h_info = super().human_info h_info = super().human_info
@ -428,6 +438,14 @@ class TelemetryPacket(GPSPacket):
# 0 to 360 # 0 to 360
course: int = 0 course: int = 0
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
if self.raw_timestamp:
return f"{self.from_call}:{self.raw_timestamp}"
else:
return f"{self.from_call}:{self.human_info.replace(' ','')}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
h_info = super().human_info h_info = super().human_info
@ -548,6 +566,14 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
raw = cls._translate(cls, kvs) # type: ignore raw = cls._translate(cls, kvs) # type: ignore
return super().from_dict(raw) return super().from_dict(raw)
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
if self.raw_timestamp:
return f"{self.from_call}:{self.raw_timestamp}"
elif self.wx_raw_timestamp:
return f"{self.from_call}:{self.wx_raw_timestamp}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
h_str = [] h_str = []
@ -643,6 +669,11 @@ class ThirdPartyPacket(Packet, DataClassJsonMixin):
obj.subpacket = factory(obj.subpacket) # type: ignore obj.subpacket = factory(obj.subpacket) # type: ignore
return obj return obj
@property
def key(self) -> str:
"""Build a key for finding this packet in a dict."""
return f"{self.from_call}:{self.subpacket.key}"
@property @property
def human_info(self) -> str: def human_info(self) -> str:
sub_info = self.subpacket.human_info sub_info = self.subpacket.human_info
@ -772,8 +803,7 @@ def factory(raw_packet: dict[Any, Any]) -> type[Packet]:
if "latitude" in raw: if "latitude" in raw:
packet_class = GPSPacket packet_class = GPSPacket
else: else:
LOG.warning(f"Unknown packet type {packet_type}") # LOG.warning(raw)
LOG.warning(raw)
packet_class = UnknownPacket packet_class = UnknownPacket
raw.get("addresse", raw.get("to_call")) raw.get("addresse", raw.get("to_call"))

View File

@ -6,26 +6,28 @@ import threading
from oslo_config import cfg from oslo_config import cfg
import wrapt import wrapt
from aprsd import stats
from aprsd.packets import seen_list from aprsd.packets import seen_list
from aprsd.utils import objectstore
CONF = cfg.CONF CONF = cfg.CONF
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
class PacketList(MutableMapping): class PacketList(MutableMapping, objectstore.ObjectStoreMixin):
_instance = None _instance = None
lock = threading.Lock() lock = threading.Lock()
_total_rx: int = 0 _total_rx: int = 0
_total_tx: int = 0 _total_tx: int = 0
types = {}
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
if cls._instance is None: if cls._instance is None:
cls._instance = super().__new__(cls) cls._instance = super().__new__(cls)
cls._maxlen = 100 cls._maxlen = 100
cls.d = OrderedDict() cls.data = {
"types": {},
"packets": OrderedDict(),
}
return cls._instance return cls._instance
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
@ -34,11 +36,10 @@ class PacketList(MutableMapping):
self._total_rx += 1 self._total_rx += 1
self._add(packet) self._add(packet)
ptype = packet.__class__.__name__ ptype = packet.__class__.__name__
if not ptype in self.types: if not ptype in self.data["types"]:
self.types[ptype] = {"tx": 0, "rx": 0} self.data["types"][ptype] = {"tx": 0, "rx": 0}
self.types[ptype]["rx"] += 1 self.data["types"][ptype]["rx"] += 1
seen_list.SeenList().update_seen(packet) seen_list.SeenList().update_seen(packet)
stats.APRSDStats().rx(packet)
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def tx(self, packet): def tx(self, packet):
@ -46,18 +47,17 @@ class PacketList(MutableMapping):
self._total_tx += 1 self._total_tx += 1
self._add(packet) self._add(packet)
ptype = packet.__class__.__name__ ptype = packet.__class__.__name__
if not ptype in self.types: if not ptype in self.data["types"]:
self.types[ptype] = {"tx": 0, "rx": 0} self.data["types"][ptype] = {"tx": 0, "rx": 0}
self.types[ptype]["tx"] += 1 self.data["types"][ptype]["tx"] += 1
seen_list.SeenList().update_seen(packet) seen_list.SeenList().update_seen(packet)
stats.APRSDStats().tx(packet)
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def add(self, packet): def add(self, packet):
self._add(packet) self._add(packet)
def _add(self, packet): def _add(self, packet):
self[packet.key] = packet self.data["packets"][packet.key] = packet
def copy(self): def copy(self):
return self.d.copy() return self.d.copy()
@ -72,23 +72,23 @@ class PacketList(MutableMapping):
def __getitem__(self, key): def __getitem__(self, key):
# self.d.move_to_end(key) # self.d.move_to_end(key)
return self.d[key] return self.data["packets"][key]
def __setitem__(self, key, value): def __setitem__(self, key, value):
if key in self.d: if key in self.data["packets"]:
self.d.move_to_end(key) self.data["packets"].move_to_end(key)
elif len(self.d) == self.maxlen: elif len(self.data["packets"]) == self.maxlen:
self.d.popitem(last=False) self.data["packets"].popitem(last=False)
self.d[key] = value self.data["packets"][key] = value
def __delitem__(self, key): def __delitem__(self, key):
del self.d[key] del self.data["packets"][key]
def __iter__(self): def __iter__(self):
return self.d.__iter__() return self.data["packets"].__iter__()
def __len__(self): def __len__(self):
return len(self.d) return len(self.data["packets"])
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def total_rx(self): def total_rx(self):
@ -97,3 +97,14 @@ class PacketList(MutableMapping):
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def total_tx(self): def total_tx(self):
return self._total_tx return self._total_tx
def stats(self, serializable=False) -> dict:
stats = {
"total_tracked": self.total_tx() + self.total_rx(),
"rx": self.total_rx(),
"tx": self.total_tx(),
"types": self.data["types"],
"packets": self.data["packets"],
}
return stats

View File

@ -26,6 +26,10 @@ class SeenList(objectstore.ObjectStoreMixin):
cls._instance.data = {} cls._instance.data = {}
return cls._instance return cls._instance
def stats(self, serializable=False):
"""Return the stats for the PacketTrack class."""
return self.data
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def update_seen(self, packet): def update_seen(self, packet):
callsign = None callsign = None
@ -39,5 +43,5 @@ class SeenList(objectstore.ObjectStoreMixin):
"last": None, "last": None,
"count": 0, "count": 0,
} }
self.data[callsign]["last"] = str(datetime.datetime.now()) self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["count"] += 1 self.data[callsign]["count"] += 1

View File

@ -4,7 +4,6 @@ import threading
from oslo_config import cfg from oslo_config import cfg
import wrapt import wrapt
from aprsd.threads import tx
from aprsd.utils import objectstore from aprsd.utils import objectstore
@ -58,6 +57,24 @@ class PacketTrack(objectstore.ObjectStoreMixin):
def values(self): def values(self):
return self.data.values() return self.data.values()
@wrapt.synchronized(lock)
def stats(self, serializable=False):
stats = {
"total_tracked": self.total_tracked,
}
pkts = {}
for key in self.data:
last_send_time = self.data[key].last_send_time
last_send_attempt = self.data[key]._last_send_attempt
pkts[key] = {
"last_send_time": last_send_time,
"last_send_attempt": last_send_attempt,
"retry_count": self.data[key].retry_count,
"message": self.data[key].raw,
}
stats["packets"] = pkts
return stats
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def __len__(self): def __len__(self):
return len(self.data) return len(self.data)
@ -79,33 +96,3 @@ class PacketTrack(objectstore.ObjectStoreMixin):
del self.data[key] del self.data[key]
except KeyError: except KeyError:
pass pass
def restart(self):
"""Walk the list of messages and restart them if any."""
for key in self.data.keys():
pkt = self.data[key]
if pkt._last_send_attempt < pkt.retry_count:
tx.send(pkt)
def _resend(self, packet):
packet._last_send_attempt = 0
tx.send(packet)
def restart_delayed(self, count=None, most_recent=True):
"""Walk the list of delayed messages and restart them if any."""
if not count:
# Send all the delayed messages
for key in self.data.keys():
pkt = self.data[key]
if pkt._last_send_attempt == pkt._retry_count:
self._resend(pkt)
else:
# They want to resend <count> delayed messages
tmp = sorted(
self.data.items(),
reverse=most_recent,
key=lambda x: x[1].last_send_time,
)
pkt_list = tmp[:count]
for (_key, pkt) in pkt_list:
self._resend(pkt)

View File

@ -28,7 +28,7 @@ class WatchList(objectstore.ObjectStoreMixin):
return cls._instance return cls._instance
def __init__(self, config=None): def __init__(self, config=None):
ring_size = CONF.watch_list.packet_keep_count CONF.watch_list.packet_keep_count
if CONF.watch_list.callsigns: if CONF.watch_list.callsigns:
for callsign in CONF.watch_list.callsigns: for callsign in CONF.watch_list.callsigns:
@ -38,12 +38,22 @@ class WatchList(objectstore.ObjectStoreMixin):
# last time a message was seen by aprs-is. For now this # last time a message was seen by aprs-is. For now this
# is all we can do. # is all we can do.
self.data[call] = { self.data[call] = {
"last": datetime.datetime.now(), "last": None,
"packets": utils.RingBuffer( "packet": None,
ring_size,
),
} }
@wrapt.synchronized(lock)
def stats(self, serializable=False) -> dict:
stats = {}
for callsign in self.data:
stats[callsign] = {
"last": self.data[callsign]["last"],
"packet": self.data[callsign]["packet"],
"age": self.age(callsign),
"old": self.is_old(callsign),
}
return stats
def is_enabled(self): def is_enabled(self):
return CONF.watch_list.enabled return CONF.watch_list.enabled
@ -58,7 +68,7 @@ class WatchList(objectstore.ObjectStoreMixin):
callsign = packet.from_call callsign = packet.from_call
if self.callsign_in_watchlist(callsign): if self.callsign_in_watchlist(callsign):
self.data[callsign]["last"] = datetime.datetime.now() self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["packets"].append(packet) self.data[callsign]["packet"] = packet
def last_seen(self, callsign): def last_seen(self, callsign):
if self.callsign_in_watchlist(callsign): if self.callsign_in_watchlist(callsign):
@ -66,7 +76,11 @@ class WatchList(objectstore.ObjectStoreMixin):
def age(self, callsign): def age(self, callsign):
now = datetime.datetime.now() now = datetime.datetime.now()
return str(now - self.last_seen(callsign)) last_seen_time = self.last_seen(callsign)
if last_seen_time:
return str(now - last_seen_time)
else:
return None
def max_delta(self, seconds=None): def max_delta(self, seconds=None):
if not seconds: if not seconds:
@ -83,14 +97,19 @@ class WatchList(objectstore.ObjectStoreMixin):
We put this here so any notification plugin can use this We put this here so any notification plugin can use this
same test. same test.
""" """
if not self.callsign_in_watchlist(callsign):
return False
age = self.age(callsign) age = self.age(callsign)
if age:
delta = utils.parse_delta_str(age)
d = datetime.timedelta(**delta)
delta = utils.parse_delta_str(age) max_delta = self.max_delta(seconds=seconds)
d = datetime.timedelta(**delta)
max_delta = self.max_delta(seconds=seconds) if d > max_delta:
return True
if d > max_delta: else:
return True return False
else: else:
return False return False

View File

@ -344,6 +344,28 @@ class PluginManager:
self._watchlist_pm = pluggy.PluginManager("aprsd") self._watchlist_pm = pluggy.PluginManager("aprsd")
self._watchlist_pm.add_hookspecs(APRSDPluginSpec) self._watchlist_pm.add_hookspecs(APRSDPluginSpec)
def stats(self, serializable=False) -> dict:
"""Collect and return stats for all plugins."""
def full_name_with_qualname(obj):
return "{}.{}".format(
obj.__class__.__module__,
obj.__class__.__qualname__,
)
plugin_stats = {}
plugins = self.get_plugins()
if plugins:
for p in plugins:
plugin_stats[full_name_with_qualname(p)] = {
"enabled": p.enabled,
"rx": p.rx_count,
"tx": p.tx_count,
"version": p.version,
}
return plugin_stats
def is_plugin(self, obj): def is_plugin(self, obj):
for c in inspect.getmro(obj): for c in inspect.getmro(obj):
if issubclass(c, APRSDPluginBase): if issubclass(c, APRSDPluginBase):
@ -369,7 +391,9 @@ class PluginManager:
try: try:
module_name, class_name = module_class_string.rsplit(".", 1) module_name, class_name = module_class_string.rsplit(".", 1)
module = importlib.import_module(module_name) module = importlib.import_module(module_name)
module = importlib.reload(module) # Commented out because the email thread starts in a different context
# and hence gives a different singleton for the EmailStats
# module = importlib.reload(module)
except Exception as ex: except Exception as ex:
if not module_name: if not module_name:
LOG.error(f"Failed to load Plugin {module_class_string}") LOG.error(f"Failed to load Plugin {module_class_string}")

View File

@ -11,7 +11,7 @@ import time
import imapclient import imapclient
from oslo_config import cfg from oslo_config import cfg
from aprsd import packets, plugin, stats, threads from aprsd import packets, plugin, threads, utils
from aprsd.threads import tx from aprsd.threads import tx
from aprsd.utils import trace from aprsd.utils import trace
@ -60,6 +60,38 @@ class EmailInfo:
self._delay = val self._delay = val
@utils.singleton
class EmailStats:
"""Singleton object to store stats related to email."""
_instance = None
tx = 0
rx = 0
email_thread_last_time = None
def stats(self, serializable=False):
if CONF.email_plugin.enabled:
last_check_time = self.email_thread_last_time
if serializable and last_check_time:
last_check_time = last_check_time.isoformat()
stats = {
"tx": self.tx,
"rx": self.rx,
"last_check_time": last_check_time,
}
else:
stats = {}
return stats
def tx_inc(self):
self.tx += 1
def rx_inc(self):
self.rx += 1
def email_thread_update(self):
self.email_thread_last_time = datetime.datetime.now()
class EmailPlugin(plugin.APRSDRegexCommandPluginBase): class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
"""Email Plugin.""" """Email Plugin."""
@ -190,10 +222,6 @@ class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
def _imap_connect(): def _imap_connect():
imap_port = CONF.email_plugin.imap_port imap_port = CONF.email_plugin.imap_port
use_ssl = CONF.email_plugin.imap_use_ssl use_ssl = CONF.email_plugin.imap_use_ssl
# host = CONFIG["aprsd"]["email"]["imap"]["host"]
# msg = "{}{}:{}".format("TLS " if use_ssl else "", host, imap_port)
# LOG.debug("Connect to IMAP host {} with user '{}'".
# format(msg, CONFIG['imap']['login']))
try: try:
server = imapclient.IMAPClient( server = imapclient.IMAPClient(
@ -440,7 +468,7 @@ def send_email(to_addr, content):
[to_addr], [to_addr],
msg.as_string(), msg.as_string(),
) )
stats.APRSDStats().email_tx_inc() EmailStats().tx_inc()
except Exception: except Exception:
LOG.exception("Sendmail Error!!!!") LOG.exception("Sendmail Error!!!!")
server.quit() server.quit()
@ -545,7 +573,7 @@ class APRSDEmailThread(threads.APRSDThread):
def loop(self): def loop(self):
time.sleep(5) time.sleep(5)
stats.APRSDStats().email_thread_update() EmailStats().email_thread_update()
# always sleep for 5 seconds and see if we need to check email # always sleep for 5 seconds and see if we need to check email
# This allows CTRL-C to stop the execution of this loop sooner # This allows CTRL-C to stop the execution of this loop sooner
# than check_email_delay time # than check_email_delay time

View File

@ -1,81 +0,0 @@
import datetime
import logging
import re
from oslo_config import cfg
from aprsd import packets, plugin
from aprsd.packets import tracker
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class QueryPlugin(plugin.APRSDRegexCommandPluginBase):
"""Query command."""
command_regex = r"^\!.*"
command_name = "query"
short_description = "APRSD Owner command to query messages in the MsgTrack"
def setup(self):
"""Do any plugin setup here."""
if not CONF.query_plugin.callsign:
LOG.error("Config query_plugin.callsign not set. Disabling plugin")
self.enabled = False
self.enabled = True
@trace.trace
def process(self, packet: packets.MessagePacket):
LOG.info("Query COMMAND")
fromcall = packet.from_call
message = packet.get("message_text", None)
pkt_tracker = tracker.PacketTrack()
now = datetime.datetime.now()
reply = "Pending messages ({}) {}".format(
len(pkt_tracker),
now.strftime("%H:%M:%S"),
)
searchstring = "^" + CONF.query_plugin.callsign + ".*"
# only I can do admin commands
if re.search(searchstring, fromcall):
# resend last N most recent: "!3"
r = re.search(r"^\!([0-9]).*", message)
if r is not None:
if len(pkt_tracker) > 0:
last_n = r.group(1)
reply = packets.NULL_MESSAGE
LOG.debug(reply)
pkt_tracker.restart_delayed(count=int(last_n))
else:
reply = "No pending msgs to resend"
LOG.debug(reply)
return reply
# resend all: "!a"
r = re.search(r"^\![aA].*", message)
if r is not None:
if len(pkt_tracker) > 0:
reply = packets.NULL_MESSAGE
LOG.debug(reply)
pkt_tracker.restart_delayed()
else:
reply = "No pending msgs"
LOG.debug(reply)
return reply
# delete all: "!d"
r = re.search(r"^\![dD].*", message)
if r is not None:
reply = "Deleted ALL pending msgs."
LOG.debug(reply)
pkt_tracker.flush()
return reply
return reply

View File

@ -1,7 +1,8 @@
import logging import logging
import aprsd import aprsd
from aprsd import plugin, stats from aprsd import plugin
from aprsd.stats import collector
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
@ -23,8 +24,8 @@ class VersionPlugin(plugin.APRSDRegexCommandPluginBase):
# fromcall = packet.get("from") # fromcall = packet.get("from")
# message = packet.get("message_text", None) # message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0") # ack = packet.get("msgNo", "0")
s = stats.APRSDStats().stats() s = collector.Collector().collect()
return "APRSD ver:{} uptime:{}".format( return "APRSD ver:{} uptime:{}".format(
aprsd.__version__, aprsd.__version__,
s["aprsd"]["uptime"], s["APRSDStats"]["uptime"],
) )

View File

@ -1,14 +0,0 @@
import rpyc
class AuthSocketStream(rpyc.SocketStream):
"""Used to authenitcate the RPC stream to remote."""
@classmethod
def connect(cls, *args, authorizer=None, **kwargs):
stream_obj = super().connect(*args, **kwargs)
if callable(authorizer):
authorizer(stream_obj.sock)
return stream_obj

View File

@ -1,165 +0,0 @@
import json
import logging
from oslo_config import cfg
import rpyc
from aprsd import conf # noqa
from aprsd import rpc
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class RPCClient:
_instance = None
_rpc_client = None
ip = None
port = None
magic_word = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self, ip=None, port=None, magic_word=None):
if ip:
self.ip = ip
else:
self.ip = CONF.rpc_settings.ip
if port:
self.port = int(port)
else:
self.port = CONF.rpc_settings.port
if magic_word:
self.magic_word = magic_word
else:
self.magic_word = CONF.rpc_settings.magic_word
self._check_settings()
self.get_rpc_client()
def _check_settings(self):
if not CONF.rpc_settings.enabled:
LOG.warning("RPC is not enabled, no way to get stats!!")
if self.magic_word == conf.common.APRSD_DEFAULT_MAGIC_WORD:
LOG.warning("You are using the default RPC magic word!!!")
LOG.warning("edit aprsd.conf and change rpc_settings.magic_word")
LOG.debug(f"RPC Client: {self.ip}:{self.port} {self.magic_word}")
def _rpyc_connect(
self, host, port, service=rpyc.VoidService,
config={}, ipv6=False,
keepalive=False, authorizer=None, ):
LOG.info(f"Connecting to RPC host '{host}:{port}'")
try:
s = rpc.AuthSocketStream.connect(
host, port, ipv6=ipv6, keepalive=keepalive,
authorizer=authorizer,
)
return rpyc.utils.factory.connect_stream(s, service, config=config)
except ConnectionRefusedError:
LOG.error(f"Failed to connect to RPC host '{host}:{port}'")
return None
def get_rpc_client(self):
if not self._rpc_client:
self._rpc_client = self._rpyc_connect(
self.ip,
self.port,
authorizer=lambda sock: sock.send(self.magic_word.encode()),
)
return self._rpc_client
def get_stats_dict(self):
cl = self.get_rpc_client()
result = {}
if not cl:
return result
try:
rpc_stats_dict = cl.root.get_stats()
result = json.loads(rpc_stats_dict)
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_stats(self):
cl = self.get_rpc_client()
result = {}
if not cl:
return result
try:
result = cl.root.get_stats_obj()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_packet_track(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_packet_track()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_packet_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_packet_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_watch_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_watch_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_seen_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_seen_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_log_entries(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result_str = cl.root.get_log_entries()
result = json.loads(result_str)
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result

View File

@ -1,99 +0,0 @@
import json
import logging
from oslo_config import cfg
import rpyc
from rpyc.utils.authenticators import AuthenticationError
from rpyc.utils.server import ThreadPoolServer
from aprsd import conf # noqa: F401
from aprsd import packets, stats, threads
from aprsd.threads import log_monitor
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
def magic_word_authenticator(sock):
client_ip = sock.getpeername()[0]
magic = sock.recv(len(CONF.rpc_settings.magic_word)).decode()
if magic != CONF.rpc_settings.magic_word:
LOG.error(
f"wrong magic word passed from {client_ip} "
"'{magic}' != '{CONF.rpc_settings.magic_word}'",
)
raise AuthenticationError(
f"wrong magic word passed in '{magic}'"
f" != '{CONF.rpc_settings.magic_word}'",
)
return sock, None
class APRSDRPCThread(threads.APRSDThread):
def __init__(self):
super().__init__(name="RPCThread")
self.thread = ThreadPoolServer(
APRSDService,
port=CONF.rpc_settings.port,
protocol_config={"allow_public_attrs": True},
authenticator=magic_word_authenticator,
)
def stop(self):
if self.thread:
self.thread.close()
self.thread_stop = True
def loop(self):
# there is no loop as run is blocked
if self.thread and not self.thread_stop:
# This is a blocking call
self.thread.start()
@rpyc.service
class APRSDService(rpyc.Service):
def on_connect(self, conn):
# code that runs when a connection is created
# (to init the service, if needed)
LOG.info("RPC Client Connected")
self._conn = conn
def on_disconnect(self, conn):
# code that runs after the connection has already closed
# (to finalize the service, if needed)
LOG.info("RPC Client Disconnected")
self._conn = None
@rpyc.exposed
def get_stats(self):
stat = stats.APRSDStats()
stats_dict = stat.stats()
return_str = json.dumps(stats_dict, indent=4, sort_keys=True, default=str)
return return_str
@rpyc.exposed
def get_stats_obj(self):
return stats.APRSDStats()
@rpyc.exposed
def get_packet_list(self):
return packets.PacketList()
@rpyc.exposed
def get_packet_track(self):
return packets.PacketTrack()
@rpyc.exposed
def get_watch_list(self):
return packets.WatchList()
@rpyc.exposed
def get_seen_list(self):
return packets.SeenList()
@rpyc.exposed
def get_log_entries(self):
entries = log_monitor.LogEntries().get_all_and_purge()
return json.dumps(entries, default=str)

View File

@ -1,265 +0,0 @@
import datetime
import logging
import threading
from oslo_config import cfg
import wrapt
import aprsd
from aprsd import packets, plugin, utils
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSDStats:
_instance = None
lock = threading.Lock()
start_time = None
_aprsis_server = None
_aprsis_keepalive = None
_email_thread_last_time = None
_email_tx = 0
_email_rx = 0
_mem_current = 0
_mem_peak = 0
_thread_info = {}
_pkt_cnt = {
"Packet": {
"tx": 0,
"rx": 0,
},
"AckPacket": {
"tx": 0,
"rx": 0,
},
"GPSPacket": {
"tx": 0,
"rx": 0,
},
"StatusPacket": {
"tx": 0,
"rx": 0,
},
"MicEPacket": {
"tx": 0,
"rx": 0,
},
"MessagePacket": {
"tx": 0,
"rx": 0,
},
"WeatherPacket": {
"tx": 0,
"rx": 0,
},
"ObjectPacket": {
"tx": 0,
"rx": 0,
},
}
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
# any init here
cls._instance.start_time = datetime.datetime.now()
cls._instance._aprsis_keepalive = datetime.datetime.now()
return cls._instance
@wrapt.synchronized(lock)
@property
def uptime(self):
return datetime.datetime.now() - self.start_time
@wrapt.synchronized(lock)
@property
def memory(self):
return self._mem_current
@wrapt.synchronized(lock)
def set_memory(self, memory):
self._mem_current = memory
@wrapt.synchronized(lock)
@property
def memory_peak(self):
return self._mem_peak
@wrapt.synchronized(lock)
def set_memory_peak(self, memory):
self._mem_peak = memory
@wrapt.synchronized(lock)
def set_thread_info(self, thread_info):
self._thread_info = thread_info
@wrapt.synchronized(lock)
@property
def thread_info(self):
return self._thread_info
@wrapt.synchronized(lock)
@property
def aprsis_server(self):
return self._aprsis_server
@wrapt.synchronized(lock)
def set_aprsis_server(self, server):
self._aprsis_server = server
@wrapt.synchronized(lock)
@property
def aprsis_keepalive(self):
return self._aprsis_keepalive
@wrapt.synchronized(lock)
def set_aprsis_keepalive(self):
self._aprsis_keepalive = datetime.datetime.now()
def rx(self, packet):
pkt_type = packet.__class__.__name__
if pkt_type not in self._pkt_cnt:
self._pkt_cnt[pkt_type] = {
"tx": 0,
"rx": 0,
}
self._pkt_cnt[pkt_type]["rx"] += 1
def tx(self, packet):
pkt_type = packet.__class__.__name__
if pkt_type not in self._pkt_cnt:
self._pkt_cnt[pkt_type] = {
"tx": 0,
"rx": 0,
}
self._pkt_cnt[pkt_type]["tx"] += 1
@wrapt.synchronized(lock)
@property
def msgs_tracked(self):
return packets.PacketTrack().total_tracked
@wrapt.synchronized(lock)
@property
def email_tx(self):
return self._email_tx
@wrapt.synchronized(lock)
def email_tx_inc(self):
self._email_tx += 1
@wrapt.synchronized(lock)
@property
def email_rx(self):
return self._email_rx
@wrapt.synchronized(lock)
def email_rx_inc(self):
self._email_rx += 1
@wrapt.synchronized(lock)
@property
def email_thread_time(self):
return self._email_thread_last_time
@wrapt.synchronized(lock)
def email_thread_update(self):
self._email_thread_last_time = datetime.datetime.now()
def stats(self):
now = datetime.datetime.now()
if self._email_thread_last_time:
last_update = str(now - self._email_thread_last_time)
else:
last_update = "never"
if self._aprsis_keepalive:
last_aprsis_keepalive = str(now - self._aprsis_keepalive)
else:
last_aprsis_keepalive = "never"
pm = plugin.PluginManager()
plugins = pm.get_plugins()
plugin_stats = {}
if plugins:
def full_name_with_qualname(obj):
return "{}.{}".format(
obj.__class__.__module__,
obj.__class__.__qualname__,
)
for p in plugins:
plugin_stats[full_name_with_qualname(p)] = {
"enabled": p.enabled,
"rx": p.rx_count,
"tx": p.tx_count,
"version": p.version,
}
wl = packets.WatchList()
sl = packets.SeenList()
pl = packets.PacketList()
stats = {
"aprsd": {
"version": aprsd.__version__,
"uptime": utils.strfdelta(self.uptime),
"callsign": CONF.callsign,
"memory_current": int(self.memory),
"memory_current_str": utils.human_size(self.memory),
"memory_peak": int(self.memory_peak),
"memory_peak_str": utils.human_size(self.memory_peak),
"threads": self._thread_info,
"watch_list": wl.get_all(),
"seen_list": sl.get_all(),
},
"aprs-is": {
"server": str(self.aprsis_server),
"callsign": CONF.aprs_network.login,
"last_update": last_aprsis_keepalive,
},
"packets": {
"total_tracked": int(pl.total_tx() + pl.total_rx()),
"total_sent": int(pl.total_tx()),
"total_received": int(pl.total_rx()),
"by_type": self._pkt_cnt,
},
"messages": {
"sent": self._pkt_cnt["MessagePacket"]["tx"],
"received": self._pkt_cnt["MessagePacket"]["tx"],
"ack_sent": self._pkt_cnt["AckPacket"]["tx"],
},
"email": {
"enabled": CONF.email_plugin.enabled,
"sent": int(self._email_tx),
"received": int(self._email_rx),
"thread_last_update": last_update,
},
"plugins": plugin_stats,
}
return stats
def __str__(self):
pl = packets.PacketList()
return (
"Uptime:{} Msgs TX:{} RX:{} "
"ACK: TX:{} RX:{} "
"Email TX:{} RX:{} LastLoop:{} ".format(
self.uptime,
pl.total_tx(),
pl.total_rx(),
self._pkt_cnt["AckPacket"]["tx"],
self._pkt_cnt["AckPacket"]["rx"],
self._email_tx,
self._email_rx,
self._email_thread_last_time,
)
)

20
aprsd/stats/__init__.py Normal file
View File

@ -0,0 +1,20 @@
from aprsd import client as aprs_client
from aprsd import plugin
from aprsd.packets import packet_list, seen_list, tracker, watch_list
from aprsd.plugins import email
from aprsd.stats import app, collector
from aprsd.threads import aprsd
# Create the collector and register all the objects
# that APRSD has that implement the stats protocol
stats_collector = collector.Collector()
stats_collector.register_producer(app.APRSDStats())
stats_collector.register_producer(packet_list.PacketList())
stats_collector.register_producer(watch_list.WatchList())
stats_collector.register_producer(tracker.PacketTrack())
stats_collector.register_producer(plugin.PluginManager())
stats_collector.register_producer(aprsd.APRSDThreadList())
stats_collector.register_producer(email.EmailStats())
stats_collector.register_producer(aprs_client.APRSClientStats())
stats_collector.register_producer(seen_list.SeenList())

47
aprsd/stats/app.py Normal file
View File

@ -0,0 +1,47 @@
import datetime
import tracemalloc
from oslo_config import cfg
import aprsd
from aprsd import utils
CONF = cfg.CONF
class APRSDStats:
"""The AppStats class is used to collect stats from the application."""
_instance = None
def __new__(cls, *args, **kwargs):
"""Have to override the new method to make this a singleton
instead of using @singletone decorator so the unit tests work.
"""
if not cls._instance:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self):
self.start_time = datetime.datetime.now()
def uptime(self):
return datetime.datetime.now() - self.start_time
def stats(self, serializable=False) -> dict:
current, peak = tracemalloc.get_traced_memory()
uptime = self.uptime()
if serializable:
uptime = str(uptime)
stats = {
"version": aprsd.__version__,
"uptime": uptime,
"callsign": CONF.callsign,
"memory_current": int(current),
"memory_current_str": utils.human_size(current),
"memory_peak": int(peak),
"memory_peak_str": utils.human_size(peak),
}
return stats

30
aprsd/stats/collector.py Normal file
View File

@ -0,0 +1,30 @@
from typing import Protocol
from aprsd.utils import singleton
class StatsProducer(Protocol):
"""The StatsProducer protocol is used to define the interface for collecting stats."""
def stats(self, serializeable=False) -> dict:
"""provide stats in a dictionary format."""
...
@singleton
class Collector:
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
def __init__(self):
self.producers: dict[str, StatsProducer] = {}
def collect(self, serializable=False) -> dict:
stats = {}
for name, producer in self.producers.items():
# No need to put in empty stats
tmp_stats = producer.stats(serializable=serializable)
if tmp_stats:
stats[name] = tmp_stats
return stats
def register_producer(self, producer: StatsProducer):
name = producer.__class__.__name__
self.producers[name] = producer

View File

@ -3,8 +3,9 @@ import queue
# Make these available to anyone importing # Make these available to anyone importing
# aprsd.threads # aprsd.threads
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401 from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
from .keep_alive import KeepAliveThread # noqa: F401 from .rx import ( # noqa: F401
from .rx import APRSDRXThread, APRSDDupeRXThread, APRSDProcessPacketThread # noqa: F401 APRSDDupeRXThread, APRSDProcessPacketThread, APRSDRXThread,
)
packet_queue = queue.Queue(maxsize=20) packet_queue = queue.Queue(maxsize=20)

View File

@ -13,7 +13,7 @@ LOG = logging.getLogger("APRSD")
class APRSDThread(threading.Thread, metaclass=abc.ABCMeta): class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
"""Base class for all threads in APRSD.""" """Base class for all threads in APRSD."""
loop_interval = 1 loop_count = 1
def __init__(self, name): def __init__(self, name):
super().__init__(name=name) super().__init__(name=name)
@ -47,8 +47,8 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
def run(self): def run(self):
LOG.debug("Starting") LOG.debug("Starting")
while not self._should_quit(): while not self._should_quit():
self.loop_count += 1
can_loop = self.loop() can_loop = self.loop()
self.loop_interval += 1
self._last_loop = datetime.datetime.now() self._last_loop = datetime.datetime.now()
if not can_loop: if not can_loop:
self.stop() self.stop()
@ -71,6 +71,20 @@ class APRSDThreadList:
cls.threads_list = [] cls.threads_list = []
return cls._instance return cls._instance
def stats(self, serializable=False) -> dict:
stats = {}
for th in self.threads_list:
age = th.loop_age()
if serializable:
age = str(age)
stats[th.__class__.__name__] = {
"name": th.name,
"alive": th.is_alive(),
"age": th.loop_age(),
"loop_count": th.loop_count,
}
return stats
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def add(self, thread_obj): def add(self, thread_obj):
self.threads_list.append(thread_obj) self.threads_list.append(thread_obj)

View File

@ -5,7 +5,8 @@ import tracemalloc
from oslo_config import cfg from oslo_config import cfg
from aprsd import client, packets, stats, utils from aprsd import client, packets, utils
from aprsd.stats import collector
from aprsd.threads import APRSDThread, APRSDThreadList from aprsd.threads import APRSDThread, APRSDThreadList
@ -24,61 +25,66 @@ class KeepAliveThread(APRSDThread):
self.max_delta = datetime.timedelta(**max_timeout) self.max_delta = datetime.timedelta(**max_timeout)
def loop(self): def loop(self):
if self.cntr % 60 == 0: if self.loop_count % 60 == 0:
pkt_tracker = packets.PacketTrack() stats_json = collector.Collector().collect()
stats_obj = stats.APRSDStats()
pl = packets.PacketList() pl = packets.PacketList()
thread_list = APRSDThreadList() thread_list = APRSDThreadList()
now = datetime.datetime.now() now = datetime.datetime.now()
last_email = stats_obj.email_thread_time
if last_email: if "EmailStats" in stats_json:
email_thread_time = utils.strfdelta(now - last_email) email_stats = stats_json["EmailStats"]
if email_stats["last_check_time"]:
email_thread_time = utils.strfdelta(now - email_stats["last_check_time"])
else:
email_thread_time = "N/A"
else: else:
email_thread_time = "N/A" email_thread_time = "N/A"
last_msg_time = utils.strfdelta(now - stats_obj.aprsis_keepalive) if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
if stats_json["APRSClientStats"].get("server_keepalive"):
last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
else:
last_msg_time = "N/A"
else:
last_msg_time = "N/A"
current, peak = tracemalloc.get_traced_memory() tracked_packets = stats_json["PacketTrack"]["total_tracked"]
stats_obj.set_memory(current) tx_msg = 0
stats_obj.set_memory_peak(peak) rx_msg = 0
if "PacketList" in stats_json:
login = CONF.callsign msg_packets = stats_json["PacketList"].get("MessagePacket")
if msg_packets:
tracked_packets = len(pkt_tracker) tx_msg = msg_packets.get("tx", 0)
rx_msg = msg_packets.get("rx", 0)
keepalive = ( keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} " "{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{}" "Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{}"
).format( ).format(
login, stats_json["APRSDStats"]["callsign"],
utils.strfdelta(stats_obj.uptime), stats_json["APRSDStats"]["uptime"],
pl.total_rx(), pl.total_rx(),
pl.total_tx(), pl.total_tx(),
tracked_packets, tracked_packets,
stats_obj._pkt_cnt["MessagePacket"]["tx"], tx_msg,
stats_obj._pkt_cnt["MessagePacket"]["rx"], rx_msg,
last_msg_time, last_msg_time,
email_thread_time, email_thread_time,
utils.human_size(current), stats_json["APRSDStats"]["memory_current_str"],
utils.human_size(peak), stats_json["APRSDStats"]["memory_peak_str"],
len(thread_list), len(thread_list),
) )
LOG.info(keepalive) LOG.info(keepalive)
thread_out = [] if "APRSDThreadList" in stats_json:
thread_info = {} thread_list = stats_json["APRSDThreadList"]
for thread in thread_list.threads_list: for thread_name in thread_list:
alive = thread.is_alive() thread = thread_list[thread_name]
age = thread.loop_age() alive = thread["alive"]
key = thread.__class__.__name__ age = thread["age"]
thread_out.append(f"{key}:{alive}:{age}") key = thread["name"]
if key not in thread_info: if not alive:
thread_info[key] = {} LOG.error(f"Thread {thread}")
thread_info[key]["alive"] = alive LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
thread_info[key]["age"] = age
if not alive:
LOG.error(f"Thread {thread}")
LOG.info(",".join(thread_out))
stats_obj.set_thread_info(thread_info)
# check the APRS connection # check the APRS connection
cl = client.factory.create() cl = client.factory.create()
@ -90,18 +96,18 @@ class KeepAliveThread(APRSDThread):
if not cl.is_alive() and self.cntr > 0: if not cl.is_alive() and self.cntr > 0:
LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting") LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
client.factory.create().reset() client.factory.create().reset()
else: # else:
# See if we should reset the aprs-is client # # See if we should reset the aprs-is client
# Due to losing a keepalive from them # # Due to losing a keepalive from them
delta_dict = utils.parse_delta_str(last_msg_time) # delta_dict = utils.parse_delta_str(last_msg_time)
delta = datetime.timedelta(**delta_dict) # delta = datetime.timedelta(**delta_dict)
#
if delta > self.max_delta: # if delta > self.max_delta:
# We haven't gotten a keepalive from aprs-is in a while # # We haven't gotten a keepalive from aprs-is in a while
# reset the connection.a # # reset the connection.a
if not client.KISSClient.is_enabled(): # if not client.KISSClient.is_enabled():
LOG.warning(f"Resetting connection to APRS-IS {delta}") # LOG.warning(f"Resetting connection to APRS-IS {delta}")
client.factory.create().reset() # client.factory.create().reset()
# Check version every day # Check version every day
delta = now - self.checker_time delta = now - self.checker_time
@ -110,6 +116,6 @@ class KeepAliveThread(APRSDThread):
level, msg = utils._check_version() level, msg = utils._check_version()
if level: if level:
LOG.warning(msg) LOG.warning(msg)
self.cntr += 1 self.cntr += 1
time.sleep(1) time.sleep(1)
return True return True

View File

@ -1,25 +1,56 @@
import datetime
import logging import logging
import threading import threading
from oslo_config import cfg
import requests
import wrapt import wrapt
from aprsd import threads from aprsd import threads
from aprsd.log import log from aprsd.log import log
CONF = cfg.CONF
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
def send_log_entries(force=False):
"""Send all of the log entries to the web interface."""
if CONF.admin.web_enabled:
if force or LogEntries().is_purge_ready():
entries = LogEntries().get_all_and_purge()
print(f"Sending log entries {len(entries)}")
if entries:
try:
requests.post(
f"http://{CONF.admin.web_ip}:{CONF.admin.web_port}/log_entries",
json=entries,
auth=(CONF.admin.user, CONF.admin.password),
)
except Exception as ex:
LOG.warning(f"Failed to send log entries {len(entries)}")
LOG.warning(ex)
class LogEntries: class LogEntries:
entries = [] entries = []
lock = threading.Lock() lock = threading.Lock()
_instance = None _instance = None
last_purge = datetime.datetime.now()
max_delta = datetime.timedelta(
hours=0.0, minutes=0, seconds=2,
)
def __new__(cls, *args, **kwargs): def __new__(cls, *args, **kwargs):
if cls._instance is None: if cls._instance is None:
cls._instance = super().__new__(cls) cls._instance = super().__new__(cls)
return cls._instance return cls._instance
def stats(self) -> dict:
return {
"log_entries": self.entries,
}
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def add(self, entry): def add(self, entry):
self.entries.append(entry) self.entries.append(entry)
@ -28,8 +59,18 @@ class LogEntries:
def get_all_and_purge(self): def get_all_and_purge(self):
entries = self.entries.copy() entries = self.entries.copy()
self.entries = [] self.entries = []
self.last_purge = datetime.datetime.now()
return entries return entries
def is_purge_ready(self):
now = datetime.datetime.now()
if (
now - self.last_purge > self.max_delta
and len(self.entries) > 1
):
return True
return False
@wrapt.synchronized(lock) @wrapt.synchronized(lock)
def __len__(self): def __len__(self):
return len(self.entries) return len(self.entries)
@ -40,6 +81,10 @@ class LogMonitorThread(threads.APRSDThread):
def __init__(self): def __init__(self):
super().__init__("LogMonitorThread") super().__init__("LogMonitorThread")
def stop(self):
send_log_entries(force=True)
super().stop()
def loop(self): def loop(self):
try: try:
record = log.logging_queue.get(block=True, timeout=2) record = log.logging_queue.get(block=True, timeout=2)
@ -54,6 +99,7 @@ class LogMonitorThread(threads.APRSDThread):
# Just ignore thi # Just ignore thi
pass pass
send_log_entries()
return True return True
def json_record(self, record): def json_record(self, record):

View File

@ -6,7 +6,7 @@ import time
import aprslib import aprslib
from oslo_config import cfg from oslo_config import cfg
from aprsd import client, packets, plugin, stats from aprsd import client, packets, plugin
from aprsd.packets import log as packet_log from aprsd.packets import log as packet_log
from aprsd.threads import APRSDThread, tx from aprsd.threads import APRSDThread, tx
@ -27,7 +27,6 @@ class APRSDRXThread(APRSDThread):
self._client.stop() self._client.stop()
def loop(self): def loop(self):
LOG.debug(f"RX_MSG-LOOP {self.loop_interval}")
if not self._client: if not self._client:
self._client = client.factory.create() self._client = client.factory.create()
time.sleep(1) time.sleep(1)
@ -43,31 +42,29 @@ class APRSDRXThread(APRSDThread):
# and the aprslib developer didn't want to allow a PR to add # and the aprslib developer didn't want to allow a PR to add
# kwargs. :( # kwargs. :(
# https://github.com/rossengeorgiev/aprs-python/pull/56 # https://github.com/rossengeorgiev/aprs-python/pull/56
LOG.debug(f"Calling client consumer CL {self._client}")
self._client.consumer( self._client.consumer(
self._process_packet, raw=False, blocking=False, self._process_packet, raw=False, blocking=False,
) )
LOG.debug(f"Consumer done {self._client}")
except ( except (
aprslib.exceptions.ConnectionDrop, aprslib.exceptions.ConnectionDrop,
aprslib.exceptions.ConnectionError, aprslib.exceptions.ConnectionError,
): ):
LOG.error("Connection dropped, reconnecting") LOG.error("Connection dropped, reconnecting")
time.sleep(5)
# Force the deletion of the client object connected to aprs # Force the deletion of the client object connected to aprs
# This will cause a reconnect, next time client.get_client() # This will cause a reconnect, next time client.get_client()
# is called # is called
self._client.reset() self._client.reset()
except Exception as ex: time.sleep(5)
LOG.error("Something bad happened!!!") except Exception:
LOG.exception(ex) # LOG.exception(ex)
return False LOG.error("Resetting connection and trying again.")
self._client.reset()
time.sleep(5)
# Continue to loop # Continue to loop
return True return True
def _process_packet(self, *args, **kwargs): def _process_packet(self, *args, **kwargs):
"""Intermediate callback so we can update the keepalive time.""" """Intermediate callback so we can update the keepalive time."""
stats.APRSDStats().set_aprsis_keepalive()
# Now call the 'real' packet processing for a RX'x packet # Now call the 'real' packet processing for a RX'x packet
self.process_packet(*args, **kwargs) self.process_packet(*args, **kwargs)
@ -155,7 +152,6 @@ class APRSDProcessPacketThread(APRSDThread):
def __init__(self, packet_queue): def __init__(self, packet_queue):
self.packet_queue = packet_queue self.packet_queue = packet_queue
super().__init__("ProcessPKT") super().__init__("ProcessPKT")
self._loop_cnt = 1
def process_ack_packet(self, packet): def process_ack_packet(self, packet):
"""We got an ack for a message, no need to resend it.""" """We got an ack for a message, no need to resend it."""
@ -178,12 +174,11 @@ class APRSDProcessPacketThread(APRSDThread):
self.process_packet(packet) self.process_packet(packet)
except queue.Empty: except queue.Empty:
pass pass
self._loop_cnt += 1
return True return True
def process_packet(self, packet): def process_packet(self, packet):
"""Process a packet received from aprs-is server.""" """Process a packet received from aprs-is server."""
LOG.debug(f"ProcessPKT-LOOP {self._loop_cnt}") LOG.debug(f"ProcessPKT-LOOP {self.loop_count}")
our_call = CONF.callsign.lower() our_call = CONF.callsign.lower()
from_call = packet.from_call from_call = packet.from_call

38
aprsd/threads/stats.py Normal file
View File

@ -0,0 +1,38 @@
import logging
import threading
import time
from oslo_config import cfg
from aprsd.stats import collector
from aprsd.threads import APRSDThread
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class StatsStore(objectstore.ObjectStoreMixin):
"""Container to save the stats from the collector."""
lock = threading.Lock()
class APRSDStatsStoreThread(APRSDThread):
"""Save APRSD Stats to disk periodically."""
# how often in seconds to write the file
save_interval = 10
def __init__(self):
super().__init__("StatsStore")
def loop(self):
if self.loop_count % self.save_interval == 0:
stats = collector.Collector().collect()
ss = StatsStore()
ss.data = stats
ss.save()
time.sleep(1)
return True

View File

@ -77,7 +77,11 @@ def _send_direct(packet, aprs_client=None):
packet.update_timestamp() packet.update_timestamp()
packet_log.log(packet, tx=True) packet_log.log(packet, tx=True)
cl.send(packet) try:
cl.send(packet)
except Exception as e:
LOG.error(f"Failed to send packet: {packet}")
LOG.error(e)
class SendPacketThread(aprsd_threads.APRSDThread): class SendPacketThread(aprsd_threads.APRSDThread):
@ -232,7 +236,15 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
comment="APRSD GPS Beacon", comment="APRSD GPS Beacon",
symbol=CONF.beacon_symbol, symbol=CONF.beacon_symbol,
) )
send(pkt, direct=True) try:
# Only send it once
pkt.retry_count = 1
send(pkt, direct=True)
except Exception as e:
LOG.error(f"Failed to send beacon: {e}")
client.factory.create().reset()
time.sleep(5)
self._loop_cnt += 1 self._loop_cnt += 1
time.sleep(1) time.sleep(1)
return True return True

View File

@ -1,6 +1,7 @@
"""Utilities and helper functions.""" """Utilities and helper functions."""
import errno import errno
import functools
import os import os
import re import re
import sys import sys
@ -22,6 +23,17 @@ else:
from collections.abc import MutableMapping from collections.abc import MutableMapping
def singleton(cls):
"""Make a class a Singleton class (only one instance)"""
@functools.wraps(cls)
def wrapper_singleton(*args, **kwargs):
if wrapper_singleton.instance is None:
wrapper_singleton.instance = cls(*args, **kwargs)
return wrapper_singleton.instance
wrapper_singleton.instance = None
return wrapper_singleton
def env(*vars, **kwargs): def env(*vars, **kwargs):
"""This returns the first environment variable set. """This returns the first environment variable set.
if none are non-empty, defaults to '' or keyword arg default if none are non-empty, defaults to '' or keyword arg default

View File

@ -3,6 +3,8 @@ import decimal
import json import json
import sys import sys
from aprsd.packets import core
class EnhancedJSONEncoder(json.JSONEncoder): class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, obj): def default(self, obj):
@ -42,6 +44,24 @@ class EnhancedJSONEncoder(json.JSONEncoder):
return super().default(obj) return super().default(obj)
class SimpleJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return str(obj)
elif isinstance(obj, datetime.time):
return str(obj)
elif isinstance(obj, datetime.timedelta):
return str(obj)
elif isinstance(obj, decimal.Decimal):
return str(obj)
elif isinstance(obj, core.Packet):
return obj.to_dict()
else:
return super().default(obj)
class EnhancedJSONDecoder(json.JSONDecoder): class EnhancedJSONDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@ -71,12 +71,13 @@ class ObjectStoreMixin:
if not CONF.enable_save: if not CONF.enable_save:
return return
if len(self) > 0: if len(self) > 0:
save_filename = self._save_filename()
LOG.info( LOG.info(
f"{self.__class__.__name__}::Saving" f"{self.__class__.__name__}::Saving"
f" {len(self)} entries to disk at" f" {len(self)} entries to disk at "
f"{CONF.save_location}", f"{save_filename}",
) )
with open(self._save_filename(), "wb+") as fp: with open(save_filename, "wb+") as fp:
pickle.dump(self._dump(), fp) pickle.dump(self._dump(), fp)
else: else:
LOG.debug( LOG.debug(

View File

@ -1,189 +1,4 @@
/* PrismJS 1.24.1 /* PrismJS 1.29.0
https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+log&plugins=show-language+toolbar */ https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+json+json5+log&plugins=show-language+toolbar */
/** code[class*=language-],pre[class*=language-]{color:#ccc;background:0 0;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#2d2d2d}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.block-comment,.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#999}.token.punctuation{color:#ccc}.token.attr-name,.token.deleted,.token.namespace,.token.tag{color:#e2777a}.token.function-name{color:#6196cc}.token.boolean,.token.function,.token.number{color:#f08d49}.token.class-name,.token.constant,.token.property,.token.symbol{color:#f8c555}.token.atrule,.token.builtin,.token.important,.token.keyword,.token.selector{color:#cc99cd}.token.attr-value,.token.char,.token.regex,.token.string,.token.variable{color:#7ec699}.token.entity,.token.operator,.token.url{color:#67cdcc}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}.token.inserted{color:green}
* prism.js tomorrow night eighties for JavaScript, CoffeeScript, CSS and HTML div.code-toolbar{position:relative}div.code-toolbar>.toolbar{position:absolute;z-index:10;top:.3em;right:.2em;transition:opacity .3s ease-in-out;opacity:0}div.code-toolbar:hover>.toolbar{opacity:1}div.code-toolbar:focus-within>.toolbar{opacity:1}div.code-toolbar>.toolbar>.toolbar-item{display:inline-block}div.code-toolbar>.toolbar>.toolbar-item>a{cursor:pointer}div.code-toolbar>.toolbar>.toolbar-item>button{background:0 0;border:0;color:inherit;font:inherit;line-height:normal;overflow:visible;padding:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none}div.code-toolbar>.toolbar>.toolbar-item>a,div.code-toolbar>.toolbar>.toolbar-item>button,div.code-toolbar>.toolbar>.toolbar-item>span{color:#bbb;font-size:.8em;padding:0 .5em;background:#f5f2f0;background:rgba(224,224,224,.2);box-shadow:0 2px 0 0 rgba(0,0,0,.2);border-radius:.5em}div.code-toolbar>.toolbar>.toolbar-item>a:focus,div.code-toolbar>.toolbar>.toolbar-item>a:hover,div.code-toolbar>.toolbar>.toolbar-item>button:focus,div.code-toolbar>.toolbar>.toolbar-item>button:hover,div.code-toolbar>.toolbar>.toolbar-item>span:focus,div.code-toolbar>.toolbar>.toolbar-item>span:hover{color:inherit;text-decoration:none}
* Based on https://github.com/chriskempson/tomorrow-theme
* @author Rose Pritchard
*/
code[class*="language-"],
pre[class*="language-"] {
color: #ccc;
background: none;
font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace;
font-size: 1em;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
word-wrap: normal;
line-height: 1.5;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
/* Code blocks */
pre[class*="language-"] {
padding: 1em;
margin: .5em 0;
overflow: auto;
}
:not(pre) > code[class*="language-"],
pre[class*="language-"] {
background: #2d2d2d;
}
/* Inline code */
:not(pre) > code[class*="language-"] {
padding: .1em;
border-radius: .3em;
white-space: normal;
}
.token.comment,
.token.block-comment,
.token.prolog,
.token.doctype,
.token.cdata {
color: #999;
}
.token.punctuation {
color: #ccc;
}
.token.tag,
.token.attr-name,
.token.namespace,
.token.deleted {
color: #e2777a;
}
.token.function-name {
color: #6196cc;
}
.token.boolean,
.token.number,
.token.function {
color: #f08d49;
}
.token.property,
.token.class-name,
.token.constant,
.token.symbol {
color: #f8c555;
}
.token.selector,
.token.important,
.token.atrule,
.token.keyword,
.token.builtin {
color: #cc99cd;
}
.token.string,
.token.char,
.token.attr-value,
.token.regex,
.token.variable {
color: #7ec699;
}
.token.operator,
.token.entity,
.token.url {
color: #67cdcc;
}
.token.important,
.token.bold {
font-weight: bold;
}
.token.italic {
font-style: italic;
}
.token.entity {
cursor: help;
}
.token.inserted {
color: green;
}
div.code-toolbar {
position: relative;
}
div.code-toolbar > .toolbar {
position: absolute;
top: .3em;
right: .2em;
transition: opacity 0.3s ease-in-out;
opacity: 0;
}
div.code-toolbar:hover > .toolbar {
opacity: 1;
}
/* Separate line b/c rules are thrown out if selector is invalid.
IE11 and old Edge versions don't support :focus-within. */
div.code-toolbar:focus-within > .toolbar {
opacity: 1;
}
div.code-toolbar > .toolbar > .toolbar-item {
display: inline-block;
}
div.code-toolbar > .toolbar > .toolbar-item > a {
cursor: pointer;
}
div.code-toolbar > .toolbar > .toolbar-item > button {
background: none;
border: 0;
color: inherit;
font: inherit;
line-height: normal;
overflow: visible;
padding: 0;
-webkit-user-select: none; /* for button */
-moz-user-select: none;
-ms-user-select: none;
}
div.code-toolbar > .toolbar > .toolbar-item > a,
div.code-toolbar > .toolbar > .toolbar-item > button,
div.code-toolbar > .toolbar > .toolbar-item > span {
color: #bbb;
font-size: .8em;
padding: 0 .5em;
background: #f5f2f0;
background: rgba(224, 224, 224, 0.2);
box-shadow: 0 2px 0 0 rgba(0,0,0,0.2);
border-radius: .5em;
}
div.code-toolbar > .toolbar > .toolbar-item > a:hover,
div.code-toolbar > .toolbar > .toolbar-item > a:focus,
div.code-toolbar > .toolbar > .toolbar-item > button:hover,
div.code-toolbar > .toolbar > .toolbar-item > button:focus,
div.code-toolbar > .toolbar > .toolbar-item > span:hover,
div.code-toolbar > .toolbar > .toolbar-item > span:focus {
color: inherit;
text-decoration: none;
}

View File

@ -219,15 +219,17 @@ function updateQuadData(chart, label, first, second, third, fourth) {
} }
function update_stats( data ) { function update_stats( data ) {
our_callsign = data["stats"]["aprsd"]["callsign"]; our_callsign = data["APRSDStats"]["callsign"];
$("#version").text( data["stats"]["aprsd"]["version"] ); $("#version").text( data["APRSDStats"]["version"] );
$("#aprs_connection").html( data["aprs_connection"] ); $("#aprs_connection").html( data["aprs_connection"] );
$("#uptime").text( "uptime: " + data["stats"]["aprsd"]["uptime"] ); $("#uptime").text( "uptime: " + data["APRSDStats"]["uptime"] );
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json'); const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
$("#jsonstats").html(html_pretty); $("#jsonstats").html(html_pretty);
short_time = data["time"].split(/\s(.+)/)[1]; short_time = data["time"].split(/\s(.+)/)[1];
updateDualData(packets_chart, short_time, data["stats"]["packets"]["sent"], data["stats"]["packets"]["received"]); packet_list = data["PacketList"]["packets"];
updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]); updateDualData(packets_chart, short_time, data["PacketList"]["sent"], data["PacketList"]["received"]);
updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]); updateQuadData(message_chart, short_time, packet_list["MessagePacket"]["tx"], packet_list["MessagePacket"]["rx"],
updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]); packet_list["AckPacket"]["tx"], packet_list["AckPacket"]["rx"]);
updateDualData(email_chart, short_time, data["EmailStats"]["sent"], data["EmailStats"]["recieved"]);
updateDualData(memory_chart, short_time, data["APRSDStats"]["memory_peak"], data["APRSDStats"]["memory_current"]);
} }

View File

@ -327,7 +327,6 @@ function updatePacketTypesChart() {
option = { option = {
series: series series: series
} }
console.log(option)
packet_types_chart.setOption(option); packet_types_chart.setOption(option);
} }
@ -381,22 +380,23 @@ function updateAcksChart() {
} }
function update_stats( data ) { function update_stats( data ) {
console.log(data); console.log("update_stats() echarts.js called")
our_callsign = data["stats"]["aprsd"]["callsign"]; stats = data["stats"];
$("#version").text( data["stats"]["aprsd"]["version"] ); our_callsign = stats["APRSDStats"]["callsign"];
$("#aprs_connection").html( data["aprs_connection"] ); $("#version").text( stats["APRSDStats"]["version"] );
$("#uptime").text( "uptime: " + data["stats"]["aprsd"]["uptime"] ); $("#aprs_connection").html( stats["aprs_connection"] );
$("#uptime").text( "uptime: " + stats["APRSDStats"]["uptime"] );
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json'); const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
$("#jsonstats").html(html_pretty); $("#jsonstats").html(html_pretty);
t = Date.parse(data["time"]); t = Date.parse(data["time"]);
ts = new Date(t); ts = new Date(t);
updatePacketData(packets_chart, ts, data["stats"]["packets"]["sent"], data["stats"]["packets"]["received"]); updatePacketData(packets_chart, ts, stats["PacketList"]["tx"], stats["PacketList"]["rx"]);
updatePacketTypesData(ts, data["stats"]["packets"]["types"]); updatePacketTypesData(ts, stats["PacketList"]["types"]);
updatePacketTypesChart(); updatePacketTypesChart();
updateMessagesChart(); updateMessagesChart();
updateAcksChart(); updateAcksChart();
updateMemChart(ts, data["stats"]["aprsd"]["memory_current"], data["stats"]["aprsd"]["memory_peak"]); updateMemChart(ts, stats["APRSDStats"]["memory_current"], stats["APRSDStats"]["memory_peak"]);
//updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]); //updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]);
//updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]); //updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]);
//updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]); //updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]);

View File

@ -24,11 +24,12 @@ function ord(str){return str.charCodeAt(0);}
function update_watchlist( data ) { function update_watchlist( data ) {
// Update the watch list // Update the watch list
stats = data["stats"];
var watchdiv = $("#watchDiv"); var watchdiv = $("#watchDiv");
var html_str = '<table class="ui celled striped table"><thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th></tr></thead><tbody>' var html_str = '<table class="ui celled striped table"><thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th></tr></thead><tbody>'
watchdiv.html('') watchdiv.html('')
jQuery.each(data["stats"]["aprsd"]["watch_list"], function(i, val) { jQuery.each(stats["WatchList"], function(i, val) {
html_str += '<tr><td class="collapsing"><img id="callsign_'+i+'" class="aprsd_1"></img>' + i + '</td><td>' + val["last"] + '</td></tr>' html_str += '<tr><td class="collapsing"><img id="callsign_'+i+'" class="aprsd_1"></img>' + i + '</td><td>' + val["last"] + '</td></tr>'
}); });
html_str += "</tbody></table>"; html_str += "</tbody></table>";
@ -60,12 +61,13 @@ function update_watchlist_from_packet(callsign, val) {
} }
function update_seenlist( data ) { function update_seenlist( data ) {
stats = data["stats"];
var seendiv = $("#seenDiv"); var seendiv = $("#seenDiv");
var html_str = '<table class="ui celled striped table">' var html_str = '<table class="ui celled striped table">'
html_str += '<thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th>' html_str += '<thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th>'
html_str += '<th>Number of packets RX</th></tr></thead><tbody>' html_str += '<th>Number of packets RX</th></tr></thead><tbody>'
seendiv.html('') seendiv.html('')
var seen_list = data["stats"]["aprsd"]["seen_list"] var seen_list = stats["SeenList"]
var len = Object.keys(seen_list).length var len = Object.keys(seen_list).length
$('#seen_count').html(len) $('#seen_count').html(len)
jQuery.each(seen_list, function(i, val) { jQuery.each(seen_list, function(i, val) {
@ -79,6 +81,7 @@ function update_seenlist( data ) {
} }
function update_plugins( data ) { function update_plugins( data ) {
stats = data["stats"];
var plugindiv = $("#pluginDiv"); var plugindiv = $("#pluginDiv");
var html_str = '<table class="ui celled striped table"><thead><tr>' var html_str = '<table class="ui celled striped table"><thead><tr>'
html_str += '<th>Plugin Name</th><th>Plugin Enabled?</th>' html_str += '<th>Plugin Name</th><th>Plugin Enabled?</th>'
@ -87,7 +90,7 @@ function update_plugins( data ) {
html_str += '</tr></thead><tbody>' html_str += '</tr></thead><tbody>'
plugindiv.html('') plugindiv.html('')
var plugins = data["stats"]["plugins"]; var plugins = stats["PluginManager"];
var keys = Object.keys(plugins); var keys = Object.keys(plugins);
keys.sort(); keys.sort();
for (var i=0; i<keys.length; i++) { // now lets iterate in sort order for (var i=0; i<keys.length; i++) { // now lets iterate in sort order
@ -107,8 +110,8 @@ function update_packets( data ) {
if (size_dict(packet_list) == 0 && size_dict(data) > 0) { if (size_dict(packet_list) == 0 && size_dict(data) > 0) {
packetsdiv.html('') packetsdiv.html('')
} }
jQuery.each(data, function(i, val) { jQuery.each(data.packets, function(i, val) {
pkt = JSON.parse(val); pkt = val;
update_watchlist_from_packet(pkt['from_call'], pkt); update_watchlist_from_packet(pkt['from_call'], pkt);
if ( packet_list.hasOwnProperty(pkt['timestamp']) == false ) { if ( packet_list.hasOwnProperty(pkt['timestamp']) == false ) {

File diff suppressed because one or more lines are too long

View File

@ -1,57 +0,0 @@
/* Root element */
.json-document {
padding: 1em 2em;
}
/* Syntax highlighting for JSON objects */
ul.json-dict, ol.json-array {
list-style-type: none;
margin: 0 0 0 1px;
border-left: 1px dotted #ccc;
padding-left: 2em;
}
.json-string {
color: #0B7500;
}
.json-literal {
color: #1A01CC;
font-weight: bold;
}
/* Toggle button */
a.json-toggle {
position: relative;
color: inherit;
text-decoration: none;
}
a.json-toggle:focus {
outline: none;
}
a.json-toggle:before {
font-size: 1.1em;
color: #c0c0c0;
content: "\25BC"; /* down arrow */
position: absolute;
display: inline-block;
width: 1em;
text-align: center;
line-height: 1em;
left: -1.2em;
}
a.json-toggle:hover:before {
color: #aaa;
}
a.json-toggle.collapsed:before {
/* Use rotated down arrow, prevents right arrow appearing smaller than down arrow in some browsers */
transform: rotate(-90deg);
}
/* Collapsable placeholder links */
a.json-placeholder {
color: #aaa;
padding: 0 1em;
text-decoration: none;
}
a.json-placeholder:hover {
text-decoration: underline;
}

View File

@ -1,158 +0,0 @@
/**
* jQuery json-viewer
* @author: Alexandre Bodelot <alexandre.bodelot@gmail.com>
* @link: https://github.com/abodelot/jquery.json-viewer
*/
(function($) {
/**
* Check if arg is either an array with at least 1 element, or a dict with at least 1 key
* @return boolean
*/
function isCollapsable(arg) {
return arg instanceof Object && Object.keys(arg).length > 0;
}
/**
* Check if a string represents a valid url
* @return boolean
*/
function isUrl(string) {
var urlRegexp = /^(https?:\/\/|ftps?:\/\/)?([a-z0-9%-]+\.){1,}([a-z0-9-]+)?(:(\d{1,5}))?(\/([a-z0-9\-._~:/?#[\]@!$&'()*+,;=%]+)?)?$/i;
return urlRegexp.test(string);
}
/**
* Transform a json object into html representation
* @return string
*/
function json2html(json, options) {
var html = '';
if (typeof json === 'string') {
// Escape tags and quotes
json = json
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/'/g, '&apos;')
.replace(/"/g, '&quot;');
if (options.withLinks && isUrl(json)) {
html += '<a href="' + json + '" class="json-string" target="_blank">' + json + '</a>';
} else {
// Escape double quotes in the rendered non-URL string.
json = json.replace(/&quot;/g, '\\&quot;');
html += '<span class="json-string">"' + json + '"</span>';
}
} else if (typeof json === 'number') {
html += '<span class="json-literal">' + json + '</span>';
} else if (typeof json === 'boolean') {
html += '<span class="json-literal">' + json + '</span>';
} else if (json === null) {
html += '<span class="json-literal">null</span>';
} else if (json instanceof Array) {
if (json.length > 0) {
html += '[<ol class="json-array">';
for (var i = 0; i < json.length; ++i) {
html += '<li>';
// Add toggle button if item is collapsable
if (isCollapsable(json[i])) {
html += '<a href class="json-toggle"></a>';
}
html += json2html(json[i], options);
// Add comma if item is not last
if (i < json.length - 1) {
html += ',';
}
html += '</li>';
}
html += '</ol>]';
} else {
html += '[]';
}
} else if (typeof json === 'object') {
var keyCount = Object.keys(json).length;
if (keyCount > 0) {
html += '{<ul class="json-dict">';
for (var key in json) {
if (Object.prototype.hasOwnProperty.call(json, key)) {
html += '<li>';
var keyRepr = options.withQuotes ?
'<span class="json-string">"' + key + '"</span>' : key;
// Add toggle button if item is collapsable
if (isCollapsable(json[key])) {
html += '<a href class="json-toggle">' + keyRepr + '</a>';
} else {
html += keyRepr;
}
html += ': ' + json2html(json[key], options);
// Add comma if item is not last
if (--keyCount > 0) {
html += ',';
}
html += '</li>';
}
}
html += '</ul>}';
} else {
html += '{}';
}
}
return html;
}
/**
* jQuery plugin method
* @param json: a javascript object
* @param options: an optional options hash
*/
$.fn.jsonViewer = function(json, options) {
// Merge user options with default options
options = Object.assign({}, {
collapsed: false,
rootCollapsable: true,
withQuotes: false,
withLinks: true
}, options);
// jQuery chaining
return this.each(function() {
// Transform to HTML
var html = json2html(json, options);
if (options.rootCollapsable && isCollapsable(json)) {
html = '<a href class="json-toggle"></a>' + html;
}
// Insert HTML in target DOM element
$(this).html(html);
$(this).addClass('json-document');
// Bind click on toggle buttons
$(this).off('click');
$(this).on('click', 'a.json-toggle', function() {
var target = $(this).toggleClass('collapsed').siblings('ul.json-dict, ol.json-array');
target.toggle();
if (target.is(':visible')) {
target.siblings('.json-placeholder').remove();
} else {
var count = target.children('li').length;
var placeholder = count + (count > 1 ? ' items' : ' item');
target.after('<a href class="json-placeholder">' + placeholder + '</a>');
}
return false;
});
// Simulate click on toggle button when placeholder is clicked
$(this).on('click', 'a.json-placeholder', function() {
$(this).siblings('a.json-toggle').click();
return false;
});
if (options.collapsed == true) {
// Trigger click to collapse all nodes
$(this).find('a.json-toggle').click();
}
});
};
})(jQuery);

View File

@ -30,7 +30,6 @@
var color = Chart.helpers.color; var color = Chart.helpers.color;
$(document).ready(function() { $(document).ready(function() {
console.log(initial_stats);
start_update(); start_update();
start_charts(); start_charts();
init_messages(); init_messages();
@ -174,7 +173,7 @@
<div class="ui bottom attached tab segment" data-tab="raw-tab"> <div class="ui bottom attached tab segment" data-tab="raw-tab">
<h3 class="ui dividing header">Raw JSON</h3> <h3 class="ui dividing header">Raw JSON</h3>
<pre id="jsonstats" class="language-yaml" style="height:600px;overflow-y:auto;">{{ stats|safe }}</pre> <pre id="jsonstats" class="language-yaml" style="height:600px;overflow-y:auto;">{{ initial_stats|safe }}</pre>
</div> </div>
<div class="ui text container"> <div class="ui text container">

View File

@ -19,9 +19,10 @@ function show_aprs_icon(item, symbol) {
function ord(str){return str.charCodeAt(0);} function ord(str){return str.charCodeAt(0);}
function update_stats( data ) { function update_stats( data ) {
$("#version").text( data["stats"]["aprsd"]["version"] ); console.log(data);
$("#version").text( data["stats"]["APRSDStats"]["version"] );
$("#aprs_connection").html( data["aprs_connection"] ); $("#aprs_connection").html( data["aprs_connection"] );
$("#uptime").text( "uptime: " + data["stats"]["aprsd"]["uptime"] ); $("#uptime").text( "uptime: " + data["stats"]["APRSDStats"]["uptime"] );
short_time = data["time"].split(/\s(.+)/)[1]; short_time = data["time"].split(/\s(.+)/)[1];
} }

View File

@ -1,57 +0,0 @@
/* Root element */
.json-document {
padding: 1em 2em;
}
/* Syntax highlighting for JSON objects */
ul.json-dict, ol.json-array {
list-style-type: none;
margin: 0 0 0 1px;
border-left: 1px dotted #ccc;
padding-left: 2em;
}
.json-string {
color: #0B7500;
}
.json-literal {
color: #1A01CC;
font-weight: bold;
}
/* Toggle button */
a.json-toggle {
position: relative;
color: inherit;
text-decoration: none;
}
a.json-toggle:focus {
outline: none;
}
a.json-toggle:before {
font-size: 1.1em;
color: #c0c0c0;
content: "\25BC"; /* down arrow */
position: absolute;
display: inline-block;
width: 1em;
text-align: center;
line-height: 1em;
left: -1.2em;
}
a.json-toggle:hover:before {
color: #aaa;
}
a.json-toggle.collapsed:before {
/* Use rotated down arrow, prevents right arrow appearing smaller than down arrow in some browsers */
transform: rotate(-90deg);
}
/* Collapsable placeholder links */
a.json-placeholder {
color: #aaa;
padding: 0 1em;
text-decoration: none;
}
a.json-placeholder:hover {
text-decoration: underline;
}

View File

@ -1,158 +0,0 @@
/**
* jQuery json-viewer
* @author: Alexandre Bodelot <alexandre.bodelot@gmail.com>
* @link: https://github.com/abodelot/jquery.json-viewer
*/
(function($) {
/**
* Check if arg is either an array with at least 1 element, or a dict with at least 1 key
* @return boolean
*/
function isCollapsable(arg) {
return arg instanceof Object && Object.keys(arg).length > 0;
}
/**
* Check if a string represents a valid url
* @return boolean
*/
function isUrl(string) {
var urlRegexp = /^(https?:\/\/|ftps?:\/\/)?([a-z0-9%-]+\.){1,}([a-z0-9-]+)?(:(\d{1,5}))?(\/([a-z0-9\-._~:/?#[\]@!$&'()*+,;=%]+)?)?$/i;
return urlRegexp.test(string);
}
/**
* Transform a json object into html representation
* @return string
*/
function json2html(json, options) {
var html = '';
if (typeof json === 'string') {
// Escape tags and quotes
json = json
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/'/g, '&apos;')
.replace(/"/g, '&quot;');
if (options.withLinks && isUrl(json)) {
html += '<a href="' + json + '" class="json-string" target="_blank">' + json + '</a>';
} else {
// Escape double quotes in the rendered non-URL string.
json = json.replace(/&quot;/g, '\\&quot;');
html += '<span class="json-string">"' + json + '"</span>';
}
} else if (typeof json === 'number') {
html += '<span class="json-literal">' + json + '</span>';
} else if (typeof json === 'boolean') {
html += '<span class="json-literal">' + json + '</span>';
} else if (json === null) {
html += '<span class="json-literal">null</span>';
} else if (json instanceof Array) {
if (json.length > 0) {
html += '[<ol class="json-array">';
for (var i = 0; i < json.length; ++i) {
html += '<li>';
// Add toggle button if item is collapsable
if (isCollapsable(json[i])) {
html += '<a href class="json-toggle"></a>';
}
html += json2html(json[i], options);
// Add comma if item is not last
if (i < json.length - 1) {
html += ',';
}
html += '</li>';
}
html += '</ol>]';
} else {
html += '[]';
}
} else if (typeof json === 'object') {
var keyCount = Object.keys(json).length;
if (keyCount > 0) {
html += '{<ul class="json-dict">';
for (var key in json) {
if (Object.prototype.hasOwnProperty.call(json, key)) {
html += '<li>';
var keyRepr = options.withQuotes ?
'<span class="json-string">"' + key + '"</span>' : key;
// Add toggle button if item is collapsable
if (isCollapsable(json[key])) {
html += '<a href class="json-toggle">' + keyRepr + '</a>';
} else {
html += keyRepr;
}
html += ': ' + json2html(json[key], options);
// Add comma if item is not last
if (--keyCount > 0) {
html += ',';
}
html += '</li>';
}
}
html += '</ul>}';
} else {
html += '{}';
}
}
return html;
}
/**
* jQuery plugin method
* @param json: a javascript object
* @param options: an optional options hash
*/
$.fn.jsonViewer = function(json, options) {
// Merge user options with default options
options = Object.assign({}, {
collapsed: false,
rootCollapsable: true,
withQuotes: false,
withLinks: true
}, options);
// jQuery chaining
return this.each(function() {
// Transform to HTML
var html = json2html(json, options);
if (options.rootCollapsable && isCollapsable(json)) {
html = '<a href class="json-toggle"></a>' + html;
}
// Insert HTML in target DOM element
$(this).html(html);
$(this).addClass('json-document');
// Bind click on toggle buttons
$(this).off('click');
$(this).on('click', 'a.json-toggle', function() {
var target = $(this).toggleClass('collapsed').siblings('ul.json-dict, ol.json-array');
target.toggle();
if (target.is(':visible')) {
target.siblings('.json-placeholder').remove();
} else {
var count = target.children('li').length;
var placeholder = count + (count > 1 ? ' items' : ' item');
target.after('<a href class="json-placeholder">' + placeholder + '</a>');
}
return false;
});
// Simulate click on toggle button when placeholder is clicked
$(this).on('click', 'a.json-placeholder', function() {
$(this).siblings('a.json-toggle').click();
return false;
});
if (options.collapsed == true) {
// Trigger click to collapse all nodes
$(this).find('a.json-toggle').click();
}
});
};
})(jQuery);

View File

@ -3,10 +3,10 @@ import importlib.metadata as imp
import io import io
import json import json
import logging import logging
import time import queue
import flask import flask
from flask import Flask from flask import Flask, request
from flask_httpauth import HTTPBasicAuth from flask_httpauth import HTTPBasicAuth
from oslo_config import cfg, generator from oslo_config import cfg, generator
import socketio import socketio
@ -15,11 +15,13 @@ from werkzeug.security import check_password_hash
import aprsd import aprsd
from aprsd import cli_helper, client, conf, packets, plugin, threads from aprsd import cli_helper, client, conf, packets, plugin, threads
from aprsd.log import log from aprsd.log import log
from aprsd.rpc import client as aprsd_rpc_client from aprsd.threads import stats as stats_threads
from aprsd.utils import json as aprsd_json
CONF = cfg.CONF CONF = cfg.CONF
LOG = logging.getLogger("gunicorn.access") LOG = logging.getLogger("gunicorn.access")
logging_queue = queue.Queue()
auth = HTTPBasicAuth() auth = HTTPBasicAuth()
users: dict[str, str] = {} users: dict[str, str] = {}
@ -45,105 +47,26 @@ def verify_password(username, password):
def _stats(): def _stats():
track = aprsd_rpc_client.RPCClient().get_packet_track() stats_obj = stats_threads.StatsStore()
stats_obj.load()
now = datetime.datetime.now() now = datetime.datetime.now()
time_format = "%m-%d-%Y %H:%M:%S" time_format = "%m-%d-%Y %H:%M:%S"
stats = {
stats_dict = aprsd_rpc_client.RPCClient().get_stats_dict()
if not stats_dict:
stats_dict = {
"aprsd": {},
"aprs-is": {"server": ""},
"messages": {
"sent": 0,
"received": 0,
},
"email": {
"sent": 0,
"received": 0,
},
"seen_list": {
"sent": 0,
"received": 0,
},
}
# Convert the watch_list entries to age
wl = aprsd_rpc_client.RPCClient().get_watch_list()
new_list = {}
if wl:
for call in wl.get_all():
# call_date = datetime.datetime.strptime(
# str(wl.last_seen(call)),
# "%Y-%m-%d %H:%M:%S.%f",
# )
# We have to convert the RingBuffer to a real list
# so that json.dumps works.
# pkts = []
# for pkt in wl.get(call)["packets"].get():
# pkts.append(pkt)
new_list[call] = {
"last": wl.age(call),
# "packets": pkts
}
stats_dict["aprsd"]["watch_list"] = new_list
packet_list = aprsd_rpc_client.RPCClient().get_packet_list()
rx = tx = 0
types = {}
if packet_list:
rx = packet_list.total_rx()
tx = packet_list.total_tx()
types_copy = packet_list.types.copy()
for key in types_copy:
types[str(key)] = dict(types_copy[key])
stats_dict["packets"] = {
"sent": tx,
"received": rx,
"types": types,
}
if track:
size_tracker = len(track)
else:
size_tracker = 0
result = {
"time": now.strftime(time_format), "time": now.strftime(time_format),
"size_tracker": size_tracker, "stats": stats_obj.data,
"stats": stats_dict,
} }
return stats
return result
@app.route("/stats") @app.route("/stats")
def stats(): def stats():
LOG.debug("/stats called") LOG.debug("/stats called")
return json.dumps(_stats()) return json.dumps(_stats(), cls=aprsd_json.SimpleJSONEncoder)
@app.route("/") @app.route("/")
def index(): def index():
stats = _stats() stats = _stats()
wl = aprsd_rpc_client.RPCClient().get_watch_list()
if wl and wl.is_enabled():
watch_count = len(wl)
watch_age = wl.max_delta()
else:
watch_count = 0
watch_age = 0
sl = aprsd_rpc_client.RPCClient().get_seen_list()
if sl:
seen_count = len(sl)
else:
seen_count = 0
pm = plugin.PluginManager() pm = plugin.PluginManager()
plugins = pm.get_plugins() plugins = pm.get_plugins()
plugin_count = len(plugins) plugin_count = len(plugins)
@ -152,7 +75,7 @@ def index():
transport = "aprs-is" transport = "aprs-is"
aprs_connection = ( aprs_connection = (
"APRS-IS Server: <a href='http://status.aprs2.net' >" "APRS-IS Server: <a href='http://status.aprs2.net' >"
"{}</a>".format(stats["stats"]["aprs-is"]["server"]) "{}</a>".format(stats["stats"]["APRSClientStats"]["server_string"])
) )
else: else:
# We might be connected to a KISS socket? # We might be connected to a KISS socket?
@ -173,13 +96,13 @@ def index():
) )
) )
stats["transport"] = transport stats["stats"]["APRSClientStats"]["transport"] = transport
stats["aprs_connection"] = aprs_connection stats["stats"]["APRSClientStats"]["aprs_connection"] = aprs_connection
entries = conf.conf_to_dict() entries = conf.conf_to_dict()
return flask.render_template( return flask.render_template(
"index.html", "index.html",
initial_stats=stats, initial_stats=json.dumps(stats, cls=aprsd_json.SimpleJSONEncoder),
aprs_connection=aprs_connection, aprs_connection=aprs_connection,
callsign=CONF.callsign, callsign=CONF.callsign,
version=aprsd.__version__, version=aprsd.__version__,
@ -187,9 +110,6 @@ def index():
entries, indent=4, entries, indent=4,
sort_keys=True, default=str, sort_keys=True, default=str,
), ),
watch_count=watch_count,
watch_age=watch_age,
seen_count=seen_count,
plugin_count=plugin_count, plugin_count=plugin_count,
# oslo_out=generate_oslo() # oslo_out=generate_oslo()
) )
@ -209,19 +129,10 @@ def messages():
@auth.login_required @auth.login_required
@app.route("/packets") @app.route("/packets")
def get_packets(): def get_packets():
LOG.debug("/packets called") stats = _stats()
packet_list = aprsd_rpc_client.RPCClient().get_packet_list() stats_dict = stats["stats"]
if packet_list: packets = stats_dict.get("PacketList", {})
tmp_list = [] return json.dumps(packets, cls=aprsd_json.SimpleJSONEncoder)
pkts = packet_list.copy()
for key in pkts:
pkt = packet_list.get(key)
if pkt:
tmp_list.append(pkt.json)
return json.dumps(tmp_list)
else:
return json.dumps([])
@auth.login_required @auth.login_required
@ -273,23 +184,34 @@ def save():
return json.dumps({"messages": "saved"}) return json.dumps({"messages": "saved"})
@app.route("/log_entries", methods=["POST"])
def log_entries():
"""The url that the server can call to update the logs."""
entries = request.json
LOG.info(f"Log entries called {len(entries)}")
for entry in entries:
logging_queue.put(entry)
return json.dumps({"messages": "saved"})
class LogUpdateThread(threads.APRSDThread): class LogUpdateThread(threads.APRSDThread):
def __init__(self): def __init__(self, logging_queue=None):
super().__init__("LogUpdate") super().__init__("LogUpdate")
self.logging_queue = logging_queue
def loop(self): def loop(self):
if sio: if sio:
log_entries = aprsd_rpc_client.RPCClient().get_log_entries() try:
log_entry = self.logging_queue.get(block=True, timeout=1)
if log_entries: if log_entry:
LOG.info(f"Sending log entries! {len(log_entries)}")
for entry in log_entries:
sio.emit( sio.emit(
"log_entry", entry, "log_entry",
log_entry,
namespace="/logs", namespace="/logs",
) )
time.sleep(5) except queue.Empty:
pass
return True return True
@ -297,17 +219,17 @@ class LoggingNamespace(socketio.Namespace):
log_thread = None log_thread = None
def on_connect(self, sid, environ): def on_connect(self, sid, environ):
global sio global sio, logging_queue
LOG.debug(f"LOG on_connect {sid}") LOG.info(f"LOG on_connect {sid}")
sio.emit( sio.emit(
"connected", {"data": "/logs Connected"}, "connected", {"data": "/logs Connected"},
namespace="/logs", namespace="/logs",
) )
self.log_thread = LogUpdateThread() self.log_thread = LogUpdateThread(logging_queue=logging_queue)
self.log_thread.start() self.log_thread.start()
def on_disconnect(self, sid): def on_disconnect(self, sid):
LOG.debug(f"LOG Disconnected {sid}") LOG.info(f"LOG Disconnected {sid}")
if self.log_thread: if self.log_thread:
self.log_thread.stop() self.log_thread.stop()
@ -332,7 +254,7 @@ if __name__ == "__main__":
async_mode = "threading" async_mode = "threading"
sio = socketio.Server(logger=True, async_mode=async_mode) sio = socketio.Server(logger=True, async_mode=async_mode)
app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app) app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app)
log_level = init_app(log_level="DEBUG") log_level = init_app()
log.setup_logging(log_level) log.setup_logging(log_level)
sio.register_namespace(LoggingNamespace("/logs")) sio.register_namespace(LoggingNamespace("/logs"))
CONF.log_opt_values(LOG, logging.DEBUG) CONF.log_opt_values(LOG, logging.DEBUG)
@ -352,7 +274,7 @@ if __name__ == "uwsgi_file_aprsd_wsgi":
sio = socketio.Server(logger=True, async_mode=async_mode) sio = socketio.Server(logger=True, async_mode=async_mode)
app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app) app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app)
log_level = init_app( log_level = init_app(
log_level="DEBUG", # log_level="DEBUG",
config_file="/config/aprsd.conf", config_file="/config/aprsd.conf",
# Commented out for local development. # Commented out for local development.
# config_file=cli_helper.DEFAULT_CONFIG_FILE # config_file=cli_helper.DEFAULT_CONFIG_FILE
@ -371,7 +293,7 @@ if __name__ == "aprsd.wsgi":
app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app) app.wsgi_app = socketio.WSGIApp(sio, app.wsgi_app)
log_level = init_app( log_level = init_app(
log_level="DEBUG", # log_level="DEBUG",
config_file="/config/aprsd.conf", config_file="/config/aprsd.conf",
# config_file=cli_helper.DEFAULT_CONFIG_FILE, # config_file=cli_helper.DEFAULT_CONFIG_FILE,
) )

View File

@ -9,7 +9,7 @@ alabaster==0.7.16 # via sphinx
autoflake==1.5.3 # via gray autoflake==1.5.3 # via gray
babel==2.14.0 # via sphinx babel==2.14.0 # via sphinx
black==24.3.0 # via gray black==24.3.0 # via gray
build==1.1.1 # via pip-tools build==1.2.1 # via pip-tools
cachetools==5.3.3 # via tox cachetools==5.3.3 # via tox
certifi==2024.2.2 # via requests certifi==2024.2.2 # via requests
cfgv==3.4.0 # via pre-commit cfgv==3.4.0 # via pre-commit
@ -23,7 +23,7 @@ coverage[toml]==7.4.4 # via pytest-cov
distlib==0.3.8 # via virtualenv distlib==0.3.8 # via virtualenv
docutils==0.20.1 # via sphinx docutils==0.20.1 # via sphinx
exceptiongroup==1.2.0 # via pytest exceptiongroup==1.2.0 # via pytest
filelock==3.13.1 # via tox, virtualenv filelock==3.13.3 # via tox, virtualenv
fixit==2.1.0 # via gray fixit==2.1.0 # via gray
flake8==7.0.0 # via -r dev-requirements.in, pep8-naming flake8==7.0.0 # via -r dev-requirements.in, pep8-naming
gray==0.14.0 # via -r dev-requirements.in gray==0.14.0 # via -r dev-requirements.in
@ -33,12 +33,12 @@ imagesize==1.4.1 # via sphinx
iniconfig==2.0.0 # via pytest iniconfig==2.0.0 # via pytest
isort==5.13.2 # via -r dev-requirements.in, gray isort==5.13.2 # via -r dev-requirements.in, gray
jinja2==3.1.3 # via sphinx jinja2==3.1.3 # via sphinx
libcst==1.2.0 # via fixit libcst==1.3.1 # via fixit
markupsafe==2.1.5 # via jinja2 markupsafe==2.1.5 # via jinja2
mccabe==0.7.0 # via flake8 mccabe==0.7.0 # via flake8
moreorless==0.4.0 # via fixit moreorless==0.4.0 # via fixit
mypy==1.9.0 # via -r dev-requirements.in mypy==1.9.0 # via -r dev-requirements.in
mypy-extensions==1.0.0 # via black, mypy, typing-inspect mypy-extensions==1.0.0 # via black, mypy
nodeenv==1.8.0 # via pre-commit nodeenv==1.8.0 # via pre-commit
packaging==24.0 # via black, build, fixit, pyproject-api, pytest, sphinx, tox packaging==24.0 # via black, build, fixit, pyproject-api, pytest, sphinx, tox
pathspec==0.12.1 # via black, trailrunner pathspec==0.12.1 # via black, trailrunner
@ -71,8 +71,7 @@ toml==0.10.2 # via autoflake
tomli==2.0.1 # via black, build, coverage, fixit, mypy, pip-tools, pyproject-api, pyproject-hooks, pytest, tox tomli==2.0.1 # via black, build, coverage, fixit, mypy, pip-tools, pyproject-api, pyproject-hooks, pytest, tox
tox==4.14.2 # via -r dev-requirements.in tox==4.14.2 # via -r dev-requirements.in
trailrunner==1.4.0 # via fixit trailrunner==1.4.0 # via fixit
typing-extensions==4.10.0 # via black, libcst, mypy, typing-inspect typing-extensions==4.11.0 # via black, mypy
typing-inspect==0.9.0 # via libcst
unify==0.5 # via gray unify==0.5 # via gray
untokenize==0.1.1 # via unify untokenize==0.1.1 # via unify
urllib3==2.2.1 # via requests urllib3==2.2.1 # via requests

View File

@ -29,7 +29,6 @@ kiss3
attrs attrs
dataclasses dataclasses
oslo.config oslo.config
rpyc>=6.0.0
# Pin this here so it doesn't require a compile on # Pin this here so it doesn't require a compile on
# raspi # raspi
shellingham shellingham

View File

@ -22,7 +22,7 @@ dataclasses-json==0.6.4 # via -r requirements.in
debtcollector==3.0.0 # via oslo-config debtcollector==3.0.0 # via oslo-config
deprecated==1.2.14 # via click-params deprecated==1.2.14 # via click-params
dnspython==2.6.1 # via eventlet dnspython==2.6.1 # via eventlet
eventlet==0.36.0 # via -r requirements.in eventlet==0.36.1 # via -r requirements.in
flask==3.0.2 # via -r requirements.in, flask-httpauth, flask-socketio flask==3.0.2 # via -r requirements.in, flask-httpauth, flask-socketio
flask-httpauth==4.8.0 # via -r requirements.in flask-httpauth==4.8.0 # via -r requirements.in
flask-socketio==5.3.6 # via -r requirements.in flask-socketio==5.3.6 # via -r requirements.in
@ -47,7 +47,6 @@ oslo-i18n==6.3.0 # via oslo-config
packaging==24.0 # via marshmallow packaging==24.0 # via marshmallow
pbr==6.0.0 # via -r requirements.in, oslo-i18n, stevedore pbr==6.0.0 # via -r requirements.in, oslo-i18n, stevedore
pluggy==1.4.0 # via -r requirements.in pluggy==1.4.0 # via -r requirements.in
plumbum==1.8.2 # via rpyc
pygments==2.17.2 # via rich pygments==2.17.2 # via rich
pyserial==3.5 # via pyserial-asyncio pyserial==3.5 # via pyserial-asyncio
pyserial-asyncio==0.6 # via kiss3 pyserial-asyncio==0.6 # via kiss3
@ -58,7 +57,6 @@ pyyaml==6.0.1 # via -r requirements.in, oslo-config
requests==2.31.0 # via -r requirements.in, oslo-config, update-checker requests==2.31.0 # via -r requirements.in, oslo-config, update-checker
rfc3986==2.0.0 # via oslo-config rfc3986==2.0.0 # via oslo-config
rich==12.6.0 # via -r requirements.in rich==12.6.0 # via -r requirements.in
rpyc==6.0.0 # via -r requirements.in
rush==2021.4.0 # via -r requirements.in rush==2021.4.0 # via -r requirements.in
shellingham==1.5.4 # via -r requirements.in, click-completion shellingham==1.5.4 # via -r requirements.in, click-completion
simple-websocket==1.0.0 # via python-engineio simple-websocket==1.0.0 # via python-engineio
@ -67,12 +65,12 @@ soupsieve==2.5 # via beautifulsoup4
stevedore==5.2.0 # via oslo-config stevedore==5.2.0 # via oslo-config
tabulate==0.9.0 # via -r requirements.in tabulate==0.9.0 # via -r requirements.in
thesmuggler==1.0.1 # via -r requirements.in thesmuggler==1.0.1 # via -r requirements.in
typing-extensions==4.10.0 # via typing-inspect typing-extensions==4.11.0 # via typing-inspect
typing-inspect==0.9.0 # via dataclasses-json typing-inspect==0.9.0 # via dataclasses-json
update-checker==0.18.0 # via -r requirements.in update-checker==0.18.0 # via -r requirements.in
urllib3==2.2.1 # via requests urllib3==2.2.1 # via requests
validators==0.22.0 # via click-params validators==0.22.0 # via click-params
werkzeug==3.0.1 # via -r requirements.in, flask werkzeug==3.0.2 # via -r requirements.in, flask
wrapt==1.16.0 # via -r requirements.in, debtcollector, deprecated wrapt==1.16.0 # via -r requirements.in, debtcollector, deprecated
wsproto==1.2.0 # via simple-websocket wsproto==1.2.0 # via simple-websocket
zipp==3.18.1 # via importlib-metadata zipp==3.18.1 # via importlib-metadata

View File

@ -1,54 +0,0 @@
from unittest import mock
from oslo_config import cfg
from aprsd import packets
from aprsd.packets import tracker
from aprsd.plugins import query as query_plugin
from .. import fake, test_plugin
CONF = cfg.CONF
class TestQueryPlugin(test_plugin.TestPlugin):
@mock.patch("aprsd.packets.tracker.PacketTrack.flush")
def test_query_flush(self, mock_flush):
packet = fake.fake_packet(message="!delete")
CONF.callsign = fake.FAKE_TO_CALLSIGN
CONF.save_enabled = True
CONF.query_plugin.callsign = fake.FAKE_FROM_CALLSIGN
query = query_plugin.QueryPlugin()
query.enabled = True
expected = "Deleted ALL pending msgs."
actual = query.filter(packet)
mock_flush.assert_called_once()
self.assertEqual(expected, actual)
@mock.patch("aprsd.packets.tracker.PacketTrack.restart_delayed")
def test_query_restart_delayed(self, mock_restart):
CONF.callsign = fake.FAKE_TO_CALLSIGN
CONF.save_enabled = True
CONF.query_plugin.callsign = fake.FAKE_FROM_CALLSIGN
track = tracker.PacketTrack()
track.data = {}
packet = fake.fake_packet(message="!4")
query = query_plugin.QueryPlugin()
expected = "No pending msgs to resend"
actual = query.filter(packet)
mock_restart.assert_not_called()
self.assertEqual(expected, actual)
mock_restart.reset_mock()
# add a message
pkt = packets.MessagePacket(
from_call=self.fromcall,
to_call="testing",
msgNo=self.ack,
)
track.add(pkt)
actual = query.filter(packet)
mock_restart.assert_called_once()

View File

@ -1,3 +1,5 @@
from unittest import mock
from oslo_config import cfg from oslo_config import cfg
import aprsd import aprsd
@ -11,7 +13,9 @@ CONF = cfg.CONF
class TestVersionPlugin(test_plugin.TestPlugin): class TestVersionPlugin(test_plugin.TestPlugin):
def test_version(self): @mock.patch("aprsd.stats.app.APRSDStats.uptime")
def test_version(self, mock_stats):
mock_stats.return_value = "00:00:00"
expected = f"APRSD ver:{aprsd.__version__} uptime:00:00:00" expected = f"APRSD ver:{aprsd.__version__} uptime:00:00:00"
CONF.callsign = fake.FAKE_TO_CALLSIGN CONF.callsign = fake.FAKE_TO_CALLSIGN
version = version_plugin.VersionPlugin() version = version_plugin.VersionPlugin()
@ -31,10 +35,3 @@ class TestVersionPlugin(test_plugin.TestPlugin):
) )
actual = version.filter(packet) actual = version.filter(packet)
self.assertEqual(expected, actual) self.assertEqual(expected, actual)
packet = fake.fake_packet(
message="Version",
msg_number=1,
)
actual = version.filter(packet)
self.assertEqual(expected, actual)

View File

@ -6,7 +6,7 @@ from oslo_config import cfg
from aprsd import conf # noqa: F401 from aprsd import conf # noqa: F401
from aprsd import packets from aprsd import packets
from aprsd import plugin as aprsd_plugin from aprsd import plugin as aprsd_plugin
from aprsd import plugins, stats from aprsd import plugins
from aprsd.packets import core from aprsd.packets import core
from . import fake from . import fake
@ -89,7 +89,6 @@ class TestPlugin(unittest.TestCase):
self.config_and_init() self.config_and_init()
def tearDown(self) -> None: def tearDown(self) -> None:
stats.APRSDStats._instance = None
packets.WatchList._instance = None packets.WatchList._instance = None
packets.SeenList._instance = None packets.SeenList._instance = None
packets.PacketTrack._instance = None packets.PacketTrack._instance = None