Merge pull request #77 from craigerl/logs

Unify the logging to file and stdout
This commit is contained in:
Walter A. Boring IV 2021-12-10 11:12:48 -05:00 committed by GitHub
commit 6a1cea63e4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 94 additions and 71 deletions

View File

@ -11,6 +11,7 @@ import sys
import click
import requests
from rich.console import Console
import aprsd
from aprsd import cli_helper, utils
@ -21,6 +22,7 @@ from aprsd.aprsd import cli
# setup the global logger
# logging.basicConfig(level=logging.DEBUG) # level=10
LOG = logging.getLogger("APRSD")
console = Console()
@cli.command()
@ -42,36 +44,40 @@ LOG = logging.getLogger("APRSD")
@cli_helper.process_standard_options_no_config
def healthcheck(ctx, health_url, timeout):
"""Check the health of the running aprsd server."""
LOG.debug(f"APRSD HealthCheck version: {aprsd.__version__}")
try:
url = health_url
response = requests.get(url, timeout=timeout)
response.raise_for_status()
except Exception as ex:
LOG.error(f"Failed to fetch healthcheck url '{url}' : '{ex}'")
sys.exit(-1)
else:
stats = json.loads(response.text)
LOG.debug(stats)
email_thread_last_update = stats["stats"]["email"]["thread_last_update"]
delta = utils.parse_delta_str(email_thread_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"Email thread is very old! {d}")
console.log(f"APRSD HealthCheck version: {aprsd.__version__}")
with console.status(f"APRSD HealthCheck version: {aprsd.__version__}") as status:
try:
url = health_url
response = requests.get(url, timeout=timeout)
response.raise_for_status()
except Exception as ex:
console.log(f"Failed to fetch healthcheck url '{url}' : '{ex}'")
sys.exit(-1)
else:
import time
time.sleep(2)
status.update("PISS")
time.sleep(2)
aprsis_last_update = stats["stats"]["aprs-is"]["last_update"]
delta = utils.parse_delta_str(aprsis_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
stats = json.loads(response.text)
email_thread_last_update = stats["stats"]["email"]["thread_last_update"]
sys.exit(0)
if email_thread_last_update != "never":
delta = utils.parse_delta_str(email_thread_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
console.log(f"Email thread is very old! {d}")
sys.exit(-1)
aprsis_last_update = stats["stats"]["aprs-is"]["last_update"]
delta = utils.parse_delta_str(aprsis_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
sys.exit(0)

View File

@ -227,6 +227,8 @@ def list_plugins(ctx):
# now find any from pypi?
installed_plugins = get_installed_plugins()
show_pypi_plugins(installed_plugins, console)
with console.status("Fetching pypi.org plugins"):
show_pypi_plugins(installed_plugins, console)
show_installed_plugins(installed_plugins, console)
with console.status("Looking for installed aprsd plugins"):
show_installed_plugins(installed_plugins, console)

View File

@ -10,6 +10,7 @@ import time
import aprslib
import click
from rich.console import Console
# local imports here
import aprsd
@ -22,6 +23,7 @@ from aprsd.aprsd import cli
# setup the global logger
# logging.basicConfig(level=logging.DEBUG) # level=10
LOG = logging.getLogger("APRSD")
console = Console()
def signal_handler(sig, frame):
@ -113,7 +115,7 @@ def listen(
resp = packet.get("response", None)
if resp == "ack":
ack_num = packet.get("msgNo")
LOG.info(f"We saw an ACK {ack_num} Ignoring")
console.log(f"We saw an ACK {ack_num} Ignoring")
messaging.log_packet(packet)
else:
message = packet.get("message_text", None)
@ -125,6 +127,7 @@ def listen(
message,
fromcall=fromcall,
ack=msg_number,
console=console,
)
# Initialize the client factory and create
@ -148,7 +151,8 @@ def listen(
# This will register a packet consumer with aprslib
# When new packets come in the consumer will process
# the packet
aprs_client.consumer(rx_packet, raw=False)
with console.status("Listening for packets"):
aprs_client.consumer(rx_packet, raw=False)
except aprslib.exceptions.ConnectionDrop:
LOG.error("Connection dropped, reconnecting")
time.sleep(5)

View File

@ -1,9 +1,7 @@
import datetime
import json
import logging
from logging import NullHandler
from logging.handlers import RotatingFileHandler
import sys
import threading
import time
@ -11,6 +9,7 @@ import aprslib
from aprslib.exceptions import LoginError
import flask
from flask import request
from flask.logging import default_handler
import flask_classful
from flask_httpauth import HTTPBasicAuth
from flask_socketio import Namespace, SocketIO
@ -21,6 +20,7 @@ from aprsd import client
from aprsd import config as aprsd_config
from aprsd import log, messaging, packets, plugin, stats, threads, utils
from aprsd.clients import aprsis
from aprsd.logging import logging as aprsd_logging
LOG = logging.getLogger("APRSD")
@ -552,6 +552,15 @@ class LoggingNamespace(Namespace):
def setup_logging(config, flask_app, loglevel, quiet):
flask_log = logging.getLogger("werkzeug")
flask_app.logger.removeHandler(default_handler)
flask_log.removeHandler(default_handler)
log_level = aprsd_config.LOG_LEVELS[loglevel]
flask_log.setLevel(log_level)
date_format = config["aprsd"].get(
"dateformat",
aprsd_config.DEFAULT_DATE_FORMAT,
)
if not config["aprsd"]["web"].get("logging_enabled", False):
# disable web logging
@ -559,28 +568,30 @@ def setup_logging(config, flask_app, loglevel, quiet):
flask_app.logger.disabled = True
return
log_level = aprsd_config.LOG_LEVELS[loglevel]
LOG.setLevel(log_level)
log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT)
date_format = config["aprsd"].get("dateformat", aprsd_config.DEFAULT_DATE_FORMAT)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
if config["aprsd"].get("rich_logging", False) and not quiet:
log_format = "%(message)s"
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
rh = aprsd_logging.APRSDRichHandler(
show_thread=True, thread_width=15,
rich_tracebacks=True, omit_repeated_times=False,
)
rh.setFormatter(log_formatter)
flask_log.addHandler(rh)
log_file = config["aprsd"].get("logfile", None)
if log_file:
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
else:
fh = NullHandler()
fh.setFormatter(log_formatter)
for handler in flask_app.logger.handlers:
handler.setFormatter(log_formatter)
print(handler)
flask_log.addHandler(fh)
if not quiet:
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(log_formatter)
flask_log.addHandler(sh)
log_format = config["aprsd"].get(
"logformat",
aprsd_config.DEFAULT_LOG_FORMAT,
)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
fh = RotatingFileHandler(
log_file, maxBytes=(10248576 * 5),
backupCount=4,
)
fh.setFormatter(log_formatter)
flask_log.addHandler(fh)
def init_flask(config, loglevel, quiet):

View File

@ -18,16 +18,12 @@ logging_queue = queue.Queue()
def setup_logging(config, loglevel, quiet):
log_level = aprsd_config.LOG_LEVELS[loglevel]
LOG.setLevel(log_level)
if config["aprsd"].get("rich_logging", False):
log_format = "%(message)s"
else:
log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT)
date_format = config["aprsd"].get("dateformat", aprsd_config.DEFAULT_DATE_FORMAT)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
log_file = config["aprsd"].get("logfile", None)
rich_logging = False
if config["aprsd"].get("rich_logging", False):
if config["aprsd"].get("rich_logging", False) and not quiet:
log_format = "%(message)s"
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
rh = aprsd_logging.APRSDRichHandler(
show_thread=True, thread_width=15,
rich_tracebacks=True, omit_repeated_times=False,
@ -36,17 +32,17 @@ def setup_logging(config, loglevel, quiet):
LOG.addHandler(rh)
rich_logging = True
if log_file:
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
else:
fh = NullHandler()
log_file = config["aprsd"].get("logfile", None)
fh.setFormatter(log_formatter)
LOG.addHandler(fh)
if log_file:
log_format = config["aprsd"].get("logformat", aprsd_config.DEFAULT_LOG_FORMAT)
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
fh.setFormatter(log_formatter)
LOG.addHandler(fh)
imap_logger = None
if config.get("aprsd.email.enabled", default=False) and config.get("aprsd.email.imap.debug", default=False):
imap_logger = logging.getLogger("imapclient.imaplib")
imap_logger.setLevel(log_level)
imap_logger.addHandler(fh)

View File

@ -527,6 +527,7 @@ def log_packet(packet):
def log_message(
header, raw, message, tocall=None, fromcall=None, msg_num=None,
retry_number=None, ack=None, packet_type=None, uuid=None,
console=None,
):
"""
@ -565,4 +566,7 @@ def log_message(
log_list.append(f" UUID : {uuid}")
log_list.append(f"{header} _______________ Complete")
LOG.info("\n".join(log_list))
if console:
console.log("\n".join(log_list))
else:
LOG.info("\n".join(log_list))