1
0
mirror of https://github.com/craigerl/aprsd.git synced 2024-12-20 16:41:13 -05:00

Unify the logging to file and stdout

This patch updates the logging facility to ensure that
logging to a file works even when --quiet mode is selected.
Also update the listen and list-plugins command to show
a console.status line while waiting for results to come in.
This commit is contained in:
Hemna 2021-12-10 10:45:51 -05:00
parent 450bacfe99
commit 592b328956
6 changed files with 94 additions and 71 deletions

View File

@ -11,6 +11,7 @@ import sys
import click import click
import requests import requests
from rich.console import Console
import aprsd import aprsd
from aprsd import cli_helper, utils from aprsd import cli_helper, utils
@ -21,6 +22,7 @@ from aprsd.aprsd import cli
# setup the global logger # setup the global logger
# logging.basicConfig(level=logging.DEBUG) # level=10 # logging.basicConfig(level=logging.DEBUG) # level=10
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
console = Console()
@cli.command() @cli.command()
@ -42,36 +44,40 @@ LOG = logging.getLogger("APRSD")
@cli_helper.process_standard_options_no_config @cli_helper.process_standard_options_no_config
def healthcheck(ctx, health_url, timeout): def healthcheck(ctx, health_url, timeout):
"""Check the health of the running aprsd server.""" """Check the health of the running aprsd server."""
LOG.debug(f"APRSD HealthCheck version: {aprsd.__version__}") console.log(f"APRSD HealthCheck version: {aprsd.__version__}")
with console.status(f"APRSD HealthCheck version: {aprsd.__version__}") as status:
try: try:
url = health_url url = health_url
response = requests.get(url, timeout=timeout) response = requests.get(url, timeout=timeout)
response.raise_for_status() response.raise_for_status()
except Exception as ex: except Exception as ex:
LOG.error(f"Failed to fetch healthcheck url '{url}' : '{ex}'") console.log(f"Failed to fetch healthcheck url '{url}' : '{ex}'")
sys.exit(-1)
else:
stats = json.loads(response.text)
LOG.debug(stats)
email_thread_last_update = stats["stats"]["email"]["thread_last_update"]
delta = utils.parse_delta_str(email_thread_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"Email thread is very old! {d}")
sys.exit(-1) sys.exit(-1)
else:
import time
time.sleep(2)
status.update("PISS")
time.sleep(2)
aprsis_last_update = stats["stats"]["aprs-is"]["last_update"] stats = json.loads(response.text)
delta = utils.parse_delta_str(aprsis_last_update) email_thread_last_update = stats["stats"]["email"]["thread_last_update"]
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
sys.exit(0) if email_thread_last_update != "never":
delta = utils.parse_delta_str(email_thread_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
console.log(f"Email thread is very old! {d}")
sys.exit(-1)
aprsis_last_update = stats["stats"]["aprs-is"]["last_update"]
delta = utils.parse_delta_str(aprsis_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
sys.exit(0)

View File

@ -227,6 +227,8 @@ def list_plugins(ctx):
# now find any from pypi? # now find any from pypi?
installed_plugins = get_installed_plugins() installed_plugins = get_installed_plugins()
show_pypi_plugins(installed_plugins, console) with console.status("Fetching pypi.org plugins"):
show_pypi_plugins(installed_plugins, console)
show_installed_plugins(installed_plugins, console) with console.status("Looking for installed aprsd plugins"):
show_installed_plugins(installed_plugins, console)

View File

@ -10,6 +10,7 @@ import time
import aprslib import aprslib
import click import click
from rich.console import Console
# local imports here # local imports here
import aprsd import aprsd
@ -22,6 +23,7 @@ from aprsd.aprsd import cli
# setup the global logger # setup the global logger
# logging.basicConfig(level=logging.DEBUG) # level=10 # logging.basicConfig(level=logging.DEBUG) # level=10
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
console = Console()
def signal_handler(sig, frame): def signal_handler(sig, frame):
@ -113,7 +115,7 @@ def listen(
resp = packet.get("response", None) resp = packet.get("response", None)
if resp == "ack": if resp == "ack":
ack_num = packet.get("msgNo") ack_num = packet.get("msgNo")
LOG.info(f"We saw an ACK {ack_num} Ignoring") console.log(f"We saw an ACK {ack_num} Ignoring")
messaging.log_packet(packet) messaging.log_packet(packet)
else: else:
message = packet.get("message_text", None) message = packet.get("message_text", None)
@ -125,6 +127,7 @@ def listen(
message, message,
fromcall=fromcall, fromcall=fromcall,
ack=msg_number, ack=msg_number,
console=console,
) )
# Initialize the client factory and create # Initialize the client factory and create
@ -148,7 +151,8 @@ def listen(
# This will register a packet consumer with aprslib # This will register a packet consumer with aprslib
# When new packets come in the consumer will process # When new packets come in the consumer will process
# the packet # the packet
aprs_client.consumer(rx_packet, raw=False) with console.status("Listening for packets"):
aprs_client.consumer(rx_packet, raw=False)
except aprslib.exceptions.ConnectionDrop: except aprslib.exceptions.ConnectionDrop:
LOG.error("Connection dropped, reconnecting") LOG.error("Connection dropped, reconnecting")
time.sleep(5) time.sleep(5)

View File

@ -1,9 +1,7 @@
import datetime import datetime
import json import json
import logging import logging
from logging import NullHandler
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import sys
import threading import threading
import time import time
@ -11,6 +9,7 @@ import aprslib
from aprslib.exceptions import LoginError from aprslib.exceptions import LoginError
import flask import flask
from flask import request from flask import request
from flask.logging import default_handler
import flask_classful import flask_classful
from flask_httpauth import HTTPBasicAuth from flask_httpauth import HTTPBasicAuth
from flask_socketio import Namespace, SocketIO from flask_socketio import Namespace, SocketIO
@ -21,6 +20,7 @@ from aprsd import client
from aprsd import config as aprsd_config from aprsd import config as aprsd_config
from aprsd import log, messaging, packets, plugin, stats, threads, utils from aprsd import log, messaging, packets, plugin, stats, threads, utils
from aprsd.clients import aprsis from aprsd.clients import aprsis
from aprsd.logging import logging as aprsd_logging
LOG = logging.getLogger("APRSD") LOG = logging.getLogger("APRSD")
@ -552,6 +552,15 @@ class LoggingNamespace(Namespace):
def setup_logging(config, flask_app, loglevel, quiet): def setup_logging(config, flask_app, loglevel, quiet):
flask_log = logging.getLogger("werkzeug") flask_log = logging.getLogger("werkzeug")
flask_app.logger.removeHandler(default_handler)
flask_log.removeHandler(default_handler)
log_level = aprsd_config.LOG_LEVELS[loglevel]
flask_log.setLevel(log_level)
date_format = config["aprsd"].get(
"dateformat",
aprsd_config.DEFAULT_DATE_FORMAT,
)
if not config["aprsd"]["web"].get("logging_enabled", False): if not config["aprsd"]["web"].get("logging_enabled", False):
# disable web logging # disable web logging
@ -559,28 +568,30 @@ def setup_logging(config, flask_app, loglevel, quiet):
flask_app.logger.disabled = True flask_app.logger.disabled = True
return return
log_level = aprsd_config.LOG_LEVELS[loglevel] if config["aprsd"].get("rich_logging", False) and not quiet:
LOG.setLevel(log_level) log_format = "%(message)s"
log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT) log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
date_format = config["aprsd"].get("dateformat", aprsd_config.DEFAULT_DATE_FORMAT) rh = aprsd_logging.APRSDRichHandler(
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format) show_thread=True, thread_width=15,
rich_tracebacks=True, omit_repeated_times=False,
)
rh.setFormatter(log_formatter)
flask_log.addHandler(rh)
log_file = config["aprsd"].get("logfile", None) log_file = config["aprsd"].get("logfile", None)
if log_file: if log_file:
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4) log_format = config["aprsd"].get(
else: "logformat",
fh = NullHandler() aprsd_config.DEFAULT_LOG_FORMAT,
)
fh.setFormatter(log_formatter) log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
for handler in flask_app.logger.handlers: fh = RotatingFileHandler(
handler.setFormatter(log_formatter) log_file, maxBytes=(10248576 * 5),
print(handler) backupCount=4,
)
flask_log.addHandler(fh) fh.setFormatter(log_formatter)
flask_log.addHandler(fh)
if not quiet:
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(log_formatter)
flask_log.addHandler(sh)
def init_flask(config, loglevel, quiet): def init_flask(config, loglevel, quiet):

View File

@ -18,16 +18,12 @@ logging_queue = queue.Queue()
def setup_logging(config, loglevel, quiet): def setup_logging(config, loglevel, quiet):
log_level = aprsd_config.LOG_LEVELS[loglevel] log_level = aprsd_config.LOG_LEVELS[loglevel]
LOG.setLevel(log_level) LOG.setLevel(log_level)
if config["aprsd"].get("rich_logging", False):
log_format = "%(message)s"
else:
log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT)
date_format = config["aprsd"].get("dateformat", aprsd_config.DEFAULT_DATE_FORMAT) date_format = config["aprsd"].get("dateformat", aprsd_config.DEFAULT_DATE_FORMAT)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
log_file = config["aprsd"].get("logfile", None)
rich_logging = False rich_logging = False
if config["aprsd"].get("rich_logging", False): if config["aprsd"].get("rich_logging", False) and not quiet:
log_format = "%(message)s"
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
rh = aprsd_logging.APRSDRichHandler( rh = aprsd_logging.APRSDRichHandler(
show_thread=True, thread_width=15, show_thread=True, thread_width=15,
rich_tracebacks=True, omit_repeated_times=False, rich_tracebacks=True, omit_repeated_times=False,
@ -36,17 +32,17 @@ def setup_logging(config, loglevel, quiet):
LOG.addHandler(rh) LOG.addHandler(rh)
rich_logging = True rich_logging = True
if log_file: log_file = config["aprsd"].get("logfile", None)
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
else:
fh = NullHandler()
fh.setFormatter(log_formatter) if log_file:
LOG.addHandler(fh) log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
fh.setFormatter(log_formatter)
LOG.addHandler(fh)
imap_logger = None imap_logger = None
if config.get("aprsd.email.enabled", default=False) and config.get("aprsd.email.imap.debug", default=False): if config.get("aprsd.email.enabled", default=False) and config.get("aprsd.email.imap.debug", default=False):
imap_logger = logging.getLogger("imapclient.imaplib") imap_logger = logging.getLogger("imapclient.imaplib")
imap_logger.setLevel(log_level) imap_logger.setLevel(log_level)
imap_logger.addHandler(fh) imap_logger.addHandler(fh)

View File

@ -527,6 +527,7 @@ def log_packet(packet):
def log_message( def log_message(
header, raw, message, tocall=None, fromcall=None, msg_num=None, header, raw, message, tocall=None, fromcall=None, msg_num=None,
retry_number=None, ack=None, packet_type=None, uuid=None, retry_number=None, ack=None, packet_type=None, uuid=None,
console=None,
): ):
""" """
@ -565,4 +566,7 @@ def log_message(
log_list.append(f" UUID : {uuid}") log_list.append(f" UUID : {uuid}")
log_list.append(f"{header} _______________ Complete") log_list.append(f"{header} _______________ Complete")
LOG.info("\n".join(log_list)) if console:
console.log("\n".join(log_list))
else:
LOG.info("\n".join(log_list))