Compare commits

..

12 Commits

Author SHA1 Message Date
Walter A. Boring IV 1828342ef2
Merge pull request #164 from craigerl/client_rework
Refactor client and drivers
2024-05-23 12:00:04 -04:00
Hemna b317d0eb63 Refactor client and drivers
this patch refactors the client, drivers and client factory
to use the same Protocol mechanism used by the stats collector
to construct the proper client to be used according to
the configuration
2024-05-23 11:38:27 -04:00
Walter A. Boring IV 63962acfe6
Merge pull request #167 from craigerl/docker-rework
Refactor Dockerfile
2024-05-23 11:37:50 -04:00
Walter A. Boring IV 44a72e813e
Merge pull request #166 from craigerl/dependabot/pip/requests-2.32.0
Bump requests from 2.31.0 to 2.32.0
2024-05-23 10:59:46 -04:00
Hemna afeb11a085 Refactor Dockerfile
This patch reworks the main Dockerfile to do builds for
both the pypi upstream release of aprsd as well as the
github repo branch of aprsd for development.  This eliminates
the need for Dockerfile-dev.

This patch also installs aprsd as a user in the container image
instead of as root.
2024-05-23 10:58:46 -04:00
dependabot[bot] 18fb2a9e2b
---
updated-dependencies:
- dependency-name: requests
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-21 05:54:04 +00:00
Hemna fa2d2d965d updated requirements 2024-05-18 11:20:05 -04:00
Hemna 2abf8bc750 Use newer python -m build to build aprsd wheel
This patch changes the Makefile to make use of the
more modern mechanism in python to build a package
and wheel.
2024-05-18 11:19:10 -04:00
Hemna f15974131c Eliminate need for PBR
This patch also removes the setup.cfg and replaces it with
the pyproject.toml.

This also renames the dev-requirements.txt to requirements-dev.txt

To install dev
pip install -e ".[dev]"
2024-05-18 11:19:07 -04:00
Walter A. Boring IV 4d1dfadbde
Merge pull request #163 from craigerl/dependabot/pip/jinja2-3.1.4
Bump jinja2 from 3.1.3 to 3.1.4
2024-05-07 20:01:37 -04:00
Hemna 93a9cce0c0 Put an upper bound on the QueueHandler queue
This patch overrides the base QueueHandler class
from logging to ensure that the queue doesn't grow
infinitely.  That can be a problem when there is
no consumer pulling items out of the queue.
the queue is now capped at 200 entries max.
2024-05-07 20:00:17 -04:00
dependabot[bot] 321260ff7a
Bump jinja2 from 3.1.3 to 3.1.4
Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/pallets/jinja/releases)
- [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst)
- [Commits](https://github.com/pallets/jinja/compare/3.1.3...3.1.4)

---
updated-dependencies:
- dependency-name: jinja2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-05-06 20:55:03 +00:00
43 changed files with 1231 additions and 852 deletions

View File

@ -43,8 +43,9 @@ jobs:
with:
context: "{{defaultContext}}:docker"
platforms: linux/amd64,linux/arm64
file: ./Dockerfile-dev
file: ./Dockerfile
build-args: |
INSTALL_TYPE=github
BRANCH=${{ steps.extract_branch.outputs.branch }}
BUILDX_QEMU_ENV=true
push: true

View File

@ -53,8 +53,9 @@ jobs:
with:
context: "{{defaultContext}}:docker"
platforms: linux/amd64,linux/arm64
file: ./Dockerfile-dev
file: ./Dockerfile
build-args: |
INSTALL_TYPE=github
BRANCH=${{ steps.branch-name.outputs.current_branch }}
BUILDX_QEMU_ENV=true
push: true

View File

@ -1,9 +1,12 @@
CHANGES
=======
* Put an upper bound on the QueueHandler queue
v3.4.0
------
* Updated Changelog for 3.4.0
* Change setup.h
* Fixed docker setup.sh comparison
* Fixed unit tests failing with WatchList

View File

@ -17,7 +17,7 @@ Makefile.venv:
help: # Help for the Makefile
@egrep -h '\s##\s' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
dev: REQUIREMENTS_TXT = requirements.txt dev-requirements.txt
dev: REQUIREMENTS_TXT = requirements.txt requirements-dev.txt
dev: venv ## Create a python virtual environment for development of aprsd
run: venv ## Create a virtual environment for running aprsd commands
@ -39,7 +39,6 @@ clean-build: ## remove build artifacts
clean-pyc: ## remove Python file artifacts
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test: ## remove test and coverage artifacts
@ -57,7 +56,7 @@ test: dev ## Run all the tox tests
build: test ## Make the build artifact prior to doing an upload
$(VENV)/pip install twine
$(VENV)/python3 setup.py sdist bdist_wheel
$(VENV)/python3 -m build
$(VENV)/twine check dist/*
upload: build ## Upload a new version of the plugin
@ -81,8 +80,8 @@ docker-dev: test ## Make a development docker container tagged with hemna6969/a
update-requirements: dev ## Update the requirements.txt and dev-requirements.txt files
rm requirements.txt
rm dev-requirements.txt
rm requirements-dev.txt
touch requirements.txt
touch dev-requirements.txt
touch requirements-dev.txt
$(VENV)/pip-compile --resolver backtracking --annotation-style=line requirements.in
$(VENV)/pip-compile --resolver backtracking --annotation-style=line dev-requirements.in
$(VENV)/pip-compile --resolver backtracking --annotation-style=line requirements-dev.in

View File

@ -10,7 +10,10 @@
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from importlib.metadata import PackageNotFoundError, version
__version__ = pbr.version.VersionInfo("aprsd").version_string()
try:
__version__ = version("aprsd")
except PackageNotFoundError:
pass

View File

@ -1,461 +0,0 @@
import abc
import datetime
import logging
import threading
import time
import aprslib
from aprslib.exceptions import LoginError
from oslo_config import cfg
import wrapt
from aprsd import exception
from aprsd.clients import aprsis, fake, kiss
from aprsd.packets import core
from aprsd.utils import singleton, trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
TRANSPORT_APRSIS = "aprsis"
TRANSPORT_TCPKISS = "tcpkiss"
TRANSPORT_SERIALKISS = "serialkiss"
TRANSPORT_FAKE = "fake"
# Main must create this from the ClientFactory
# object such that it's populated with the
# Correct config
factory = None
@singleton
class APRSClientStats:
lock = threading.Lock()
@wrapt.synchronized(lock)
def stats(self, serializable=False):
client = factory.create()
stats = {
"transport": client.transport(),
"filter": client.filter,
"connected": client.connected,
}
if client.transport() == TRANSPORT_APRSIS:
stats["server_string"] = client.client.server_string
keepalive = client.client.aprsd_keepalive
if serializable:
keepalive = keepalive.isoformat()
stats["server_keepalive"] = keepalive
elif client.transport() == TRANSPORT_TCPKISS:
stats["host"] = CONF.kiss_tcp.host
stats["port"] = CONF.kiss_tcp.port
elif client.transport() == TRANSPORT_SERIALKISS:
stats["device"] = CONF.kiss_serial.device
return stats
class Client:
"""Singleton client class that constructs the aprslib connection."""
_instance = None
_client = None
connected = False
filter = None
lock = threading.Lock()
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
cls._instance._create_client()
return cls._instance
@abc.abstractmethod
def stats(self) -> dict:
pass
def set_filter(self, filter):
self.filter = filter
if self._client:
self._client.set_filter(filter)
@property
def client(self):
if not self._client:
self._create_client()
return self._client
def _create_client(self):
self._client = self.setup_connection()
if self.filter:
LOG.info("Creating APRS client filter")
self._client.set_filter(self.filter)
def stop(self):
if self._client:
LOG.info("Stopping client connection.")
self._client.stop()
def send(self, packet: core.Packet):
"""Send a packet to the network."""
self.client.send(packet)
@wrapt.synchronized(lock)
def reset(self):
"""Call this to force a rebuild/reconnect."""
LOG.info("Resetting client connection.")
if self._client:
self._client.close()
del self._client
self._create_client()
else:
LOG.warning("Client not initialized, nothing to reset.")
# Recreate the client
LOG.info(f"Creating new client {self.client}")
@abc.abstractmethod
def setup_connection(self):
pass
@staticmethod
@abc.abstractmethod
def is_enabled():
pass
@staticmethod
@abc.abstractmethod
def transport():
pass
@abc.abstractmethod
def decode_packet(self, *args, **kwargs):
pass
@abc.abstractmethod
def consumer(self, callback, blocking=False, immortal=False, raw=False):
pass
@abc.abstractmethod
def is_alive(self):
pass
@abc.abstractmethod
def close(self):
pass
class APRSISClient(Client):
_client = None
def __init__(self):
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
self.max_delta = datetime.timedelta(**max_timeout)
def stats(self) -> dict:
stats = {}
if self.is_configured():
stats = {
"server_string": self._client.server_string,
"sever_keepalive": self._client.aprsd_keepalive,
"filter": self.filter,
}
return stats
@staticmethod
def is_enabled():
# Defaults to True if the enabled flag is non existent
try:
return CONF.aprs_network.enabled
except KeyError:
return False
@staticmethod
def is_configured():
if APRSISClient.is_enabled():
# Ensure that the config vars are correctly set
if not CONF.aprs_network.login:
LOG.error("Config aprs_network.login not set.")
raise exception.MissingConfigOptionException(
"aprs_network.login is not set.",
)
if not CONF.aprs_network.password:
LOG.error("Config aprs_network.password not set.")
raise exception.MissingConfigOptionException(
"aprs_network.password is not set.",
)
if not CONF.aprs_network.host:
LOG.error("Config aprs_network.host not set.")
raise exception.MissingConfigOptionException(
"aprs_network.host is not set.",
)
return True
return True
def _is_stale_connection(self):
delta = datetime.datetime.now() - self._client.aprsd_keepalive
if delta > self.max_delta:
LOG.error(f"Connection is stale, last heard {delta} ago.")
return True
def is_alive(self):
if self._client:
return self._client.is_alive() and not self._is_stale_connection()
else:
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
return False
def close(self):
if self._client:
self._client.stop()
self._client.close()
@staticmethod
def transport():
return TRANSPORT_APRSIS
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
return core.factory(args[0])
def setup_connection(self):
user = CONF.aprs_network.login
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
self.connected = False
backoff = 1
aprs_client = None
while not self.connected:
try:
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
# Force the log to be the same
aprs_client.logger = LOG
aprs_client.connect()
self.connected = True
backoff = 1
except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'")
self.connected = False
time.sleep(backoff)
except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
self.connected = False
time.sleep(backoff)
# Don't allow the backoff to go to inifinity.
if backoff > 5:
backoff = 5
else:
backoff += 1
continue
self._client = aprs_client
return aprs_client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
self._client.consumer(
callback, blocking=blocking,
immortal=immortal, raw=raw,
)
class KISSClient(Client):
_client = None
def stats(self) -> dict:
stats = {}
if self.is_configured():
return {
"transport": self.transport(),
}
return stats
@staticmethod
def is_enabled():
"""Return if tcp or serial KISS is enabled."""
if CONF.kiss_serial.enabled:
return True
if CONF.kiss_tcp.enabled:
return True
return False
@staticmethod
def is_configured():
# Ensure that the config vars are correctly set
if KISSClient.is_enabled():
transport = KISSClient.transport()
if transport == TRANSPORT_SERIALKISS:
if not CONF.kiss_serial.device:
LOG.error("KISS serial enabled, but no device is set.")
raise exception.MissingConfigOptionException(
"kiss_serial.device is not set.",
)
elif transport == TRANSPORT_TCPKISS:
if not CONF.kiss_tcp.host:
LOG.error("KISS TCP enabled, but no host is set.")
raise exception.MissingConfigOptionException(
"kiss_tcp.host is not set.",
)
return True
return False
def is_alive(self):
if self._client:
return self._client.is_alive()
else:
return False
def close(self):
if self._client:
self._client.stop()
@staticmethod
def transport():
if CONF.kiss_serial.enabled:
return TRANSPORT_SERIALKISS
if CONF.kiss_tcp.enabled:
return TRANSPORT_TCPKISS
def decode_packet(self, *args, **kwargs):
"""We get a frame, which has to be decoded."""
LOG.debug(f"kwargs {kwargs}")
frame = kwargs["frame"]
LOG.debug(f"Got an APRS Frame '{frame}'")
# try and nuke the * from the fromcall sign.
# frame.header._source._ch = False
# payload = str(frame.payload.decode())
# msg = f"{str(frame.header)}:{payload}"
# msg = frame.tnc2
# LOG.debug(f"Decoding {msg}")
raw = aprslib.parse(str(frame))
packet = core.factory(raw)
if isinstance(packet, core.ThirdParty):
return packet.subpacket
else:
return packet
def setup_connection(self):
self._client = kiss.KISS3Client()
self.connected = True
return self._client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
self._client.consumer(callback)
class APRSDFakeClient(Client, metaclass=trace.TraceWrapperMetaclass):
def stats(self) -> dict:
return {}
@staticmethod
def is_enabled():
if CONF.fake_client.enabled:
return True
return False
@staticmethod
def is_configured():
return APRSDFakeClient.is_enabled()
def is_alive(self):
return True
def close(self):
pass
def setup_connection(self):
self.connected = True
return fake.APRSDFakeClient()
@staticmethod
def transport():
return TRANSPORT_FAKE
def decode_packet(self, *args, **kwargs):
LOG.debug(f"kwargs {kwargs}")
pkt = kwargs["packet"]
LOG.debug(f"Got an APRS Fake Packet '{pkt}'")
return pkt
class ClientFactory:
_instance = None
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
return cls._instance
def __init__(self):
self._builders = {}
def register(self, key, builder):
self._builders[key] = builder
def create(self, key=None):
if not key:
if APRSISClient.is_enabled():
key = TRANSPORT_APRSIS
elif KISSClient.is_enabled():
key = KISSClient.transport()
elif APRSDFakeClient.is_enabled():
key = TRANSPORT_FAKE
builder = self._builders.get(key)
if not builder:
raise ValueError(key)
return builder()
def is_client_enabled(self):
"""Make sure at least one client is enabled."""
enabled = False
for key in self._builders.keys():
try:
enabled |= self._builders[key].is_enabled()
except KeyError:
pass
return enabled
def is_client_configured(self):
enabled = False
for key in self._builders.keys():
try:
enabled |= self._builders[key].is_configured()
except KeyError:
pass
except exception.MissingConfigOptionException as ex:
LOG.error(ex.message)
return False
except exception.ConfigOptionBogusDefaultException as ex:
LOG.error(ex.message)
return False
return enabled
@staticmethod
def setup():
"""Create and register all possible client objects."""
global factory
factory = ClientFactory()
factory.register(TRANSPORT_APRSIS, APRSISClient)
factory.register(TRANSPORT_TCPKISS, KISSClient)
factory.register(TRANSPORT_SERIALKISS, KISSClient)
factory.register(TRANSPORT_FAKE, APRSDFakeClient)

13
aprsd/client/__init__.py Normal file
View File

@ -0,0 +1,13 @@
from aprsd.client import aprsis, factory, fake, kiss
TRANSPORT_APRSIS = "aprsis"
TRANSPORT_TCPKISS = "tcpkiss"
TRANSPORT_SERIALKISS = "serialkiss"
TRANSPORT_FAKE = "fake"
client_factory = factory.ClientFactory()
client_factory.register(aprsis.APRSISClient)
client_factory.register(kiss.KISSClient)
client_factory.register(fake.APRSDFakeClient)

132
aprsd/client/aprsis.py Normal file
View File

@ -0,0 +1,132 @@
import datetime
import logging
import time
from aprslib.exceptions import LoginError
from oslo_config import cfg
from aprsd import client, exception
from aprsd.client import base
from aprsd.client.drivers import aprsis
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSISClient(base.APRSClient):
_client = None
def __init__(self):
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
self.max_delta = datetime.timedelta(**max_timeout)
def stats(self) -> dict:
stats = {}
if self.is_configured():
stats = {
"server_string": self._client.server_string,
"sever_keepalive": self._client.aprsd_keepalive,
"filter": self.filter,
}
return stats
@staticmethod
def is_enabled():
# Defaults to True if the enabled flag is non existent
try:
return CONF.aprs_network.enabled
except KeyError:
return False
@staticmethod
def is_configured():
if APRSISClient.is_enabled():
# Ensure that the config vars are correctly set
if not CONF.aprs_network.login:
LOG.error("Config aprs_network.login not set.")
raise exception.MissingConfigOptionException(
"aprs_network.login is not set.",
)
if not CONF.aprs_network.password:
LOG.error("Config aprs_network.password not set.")
raise exception.MissingConfigOptionException(
"aprs_network.password is not set.",
)
if not CONF.aprs_network.host:
LOG.error("Config aprs_network.host not set.")
raise exception.MissingConfigOptionException(
"aprs_network.host is not set.",
)
return True
return True
def _is_stale_connection(self):
delta = datetime.datetime.now() - self._client.aprsd_keepalive
if delta > self.max_delta:
LOG.error(f"Connection is stale, last heard {delta} ago.")
return True
def is_alive(self):
if self._client:
return self._client.is_alive() and not self._is_stale_connection()
else:
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
return False
def close(self):
if self._client:
self._client.stop()
self._client.close()
@staticmethod
def transport():
return client.TRANSPORT_APRSIS
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
return core.factory(args[0])
def setup_connection(self):
user = CONF.aprs_network.login
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
self.connected = False
backoff = 1
aprs_client = None
while not self.connected:
try:
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
# Force the log to be the same
aprs_client.logger = LOG
aprs_client.connect()
self.connected = True
backoff = 1
except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'")
self.connected = False
time.sleep(backoff)
except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
self.connected = False
time.sleep(backoff)
# Don't allow the backoff to go to inifinity.
if backoff > 5:
backoff = 5
else:
backoff += 1
continue
self._client = aprs_client
return aprs_client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
self._client.consumer(
callback, blocking=blocking,
immortal=immortal, raw=raw,
)

105
aprsd/client/base.py Normal file
View File

@ -0,0 +1,105 @@
import abc
import logging
import threading
from oslo_config import cfg
import wrapt
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSClient:
"""Singleton client class that constructs the aprslib connection."""
_instance = None
_client = None
connected = False
filter = None
lock = threading.Lock()
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
cls._instance._create_client()
return cls._instance
@abc.abstractmethod
def stats(self) -> dict:
pass
def set_filter(self, filter):
self.filter = filter
if self._client:
self._client.set_filter(filter)
@property
def client(self):
if not self._client:
self._create_client()
return self._client
def _create_client(self):
self._client = self.setup_connection()
if self.filter:
LOG.info("Creating APRS client filter")
self._client.set_filter(self.filter)
def stop(self):
if self._client:
LOG.info("Stopping client connection.")
self._client.stop()
def send(self, packet: core.Packet):
"""Send a packet to the network."""
self.client.send(packet)
@wrapt.synchronized(lock)
def reset(self):
"""Call this to force a rebuild/reconnect."""
LOG.info("Resetting client connection.")
if self._client:
self._client.close()
del self._client
self._create_client()
else:
LOG.warning("Client not initialized, nothing to reset.")
# Recreate the client
LOG.info(f"Creating new client {self.client}")
@abc.abstractmethod
def setup_connection(self):
pass
@staticmethod
@abc.abstractmethod
def is_enabled():
pass
@staticmethod
@abc.abstractmethod
def transport():
pass
@abc.abstractmethod
def decode_packet(self, *args, **kwargs):
pass
@abc.abstractmethod
def consumer(self, callback, blocking=False, immortal=False, raw=False):
pass
@abc.abstractmethod
def is_alive(self):
pass
@abc.abstractmethod
def close(self):
pass

88
aprsd/client/factory.py Normal file
View File

@ -0,0 +1,88 @@
import logging
from typing import Callable, Protocol, runtime_checkable
from aprsd import exception
from aprsd.packets import core
LOG = logging.getLogger("APRSD")
@runtime_checkable
class Client(Protocol):
def __init__(self):
pass
def connect(self) -> bool:
pass
def disconnect(self) -> bool:
pass
def decode_packet(self, *args, **kwargs) -> type[core.Packet]:
pass
def is_enabled(self) -> bool:
pass
def is_configured(self) -> bool:
pass
def transport(self) -> str:
pass
def send(self, message: str) -> bool:
pass
def setup_connection(self) -> None:
pass
class ClientFactory:
_instance = None
clients = []
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
return cls._instance
def __init__(self):
self.clients: list[Callable] = []
def register(self, aprsd_client: Callable):
if isinstance(aprsd_client, Client):
raise ValueError("Client must be a subclass of Client protocol")
self.clients.append(aprsd_client)
def create(self, key=None):
for client in self.clients:
if client.is_enabled():
return client()
raise Exception("No client is configured!!")
def is_client_enabled(self):
"""Make sure at least one client is enabled."""
enabled = False
for client in self.clients:
if client.is_enabled():
enabled = True
return enabled
def is_client_configured(self):
enabled = False
for client in self.clients:
try:
if client.is_configured():
enabled = True
except exception.MissingConfigOptionException as ex:
LOG.error(ex.message)
return False
except exception.ConfigOptionBogusDefaultException as ex:
LOG.error(ex.message)
return False
return enabled

48
aprsd/client/fake.py Normal file
View File

@ -0,0 +1,48 @@
import logging
from oslo_config import cfg
from aprsd import client
from aprsd.client import base
from aprsd.client.drivers import fake as fake_driver
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSDFakeClient(base.APRSClient, metaclass=trace.TraceWrapperMetaclass):
def stats(self) -> dict:
return {}
@staticmethod
def is_enabled():
if CONF.fake_client.enabled:
return True
return False
@staticmethod
def is_configured():
return APRSDFakeClient.is_enabled()
def is_alive(self):
return True
def close(self):
pass
def setup_connection(self):
self.connected = True
return fake_driver.APRSDFakeClient()
@staticmethod
def transport():
return client.TRANSPORT_FAKE
def decode_packet(self, *args, **kwargs):
LOG.debug(f"kwargs {kwargs}")
pkt = kwargs["packet"]
LOG.debug(f"Got an APRS Fake Packet '{pkt}'")
return pkt

103
aprsd/client/kiss.py Normal file
View File

@ -0,0 +1,103 @@
import logging
import aprslib
from oslo_config import cfg
from aprsd import client, exception
from aprsd.client import base
from aprsd.client.drivers import kiss
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class KISSClient(base.APRSClient):
_client = None
def stats(self) -> dict:
stats = {}
if self.is_configured():
return {
"transport": self.transport(),
}
return stats
@staticmethod
def is_enabled():
"""Return if tcp or serial KISS is enabled."""
if CONF.kiss_serial.enabled:
return True
if CONF.kiss_tcp.enabled:
return True
return False
@staticmethod
def is_configured():
# Ensure that the config vars are correctly set
if KISSClient.is_enabled():
transport = KISSClient.transport()
if transport == client.TRANSPORT_SERIALKISS:
if not CONF.kiss_serial.device:
LOG.error("KISS serial enabled, but no device is set.")
raise exception.MissingConfigOptionException(
"kiss_serial.device is not set.",
)
elif transport == client.TRANSPORT_TCPKISS:
if not CONF.kiss_tcp.host:
LOG.error("KISS TCP enabled, but no host is set.")
raise exception.MissingConfigOptionException(
"kiss_tcp.host is not set.",
)
return True
return False
def is_alive(self):
if self._client:
return self._client.is_alive()
else:
return False
def close(self):
if self._client:
self._client.stop()
@staticmethod
def transport():
if CONF.kiss_serial.enabled:
return client.TRANSPORT_SERIALKISS
if CONF.kiss_tcp.enabled:
return client.TRANSPORT_TCPKISS
def decode_packet(self, *args, **kwargs):
"""We get a frame, which has to be decoded."""
LOG.debug(f"kwargs {kwargs}")
frame = kwargs["frame"]
LOG.debug(f"Got an APRS Frame '{frame}'")
# try and nuke the * from the fromcall sign.
# frame.header._source._ch = False
# payload = str(frame.payload.decode())
# msg = f"{str(frame.header)}:{payload}"
# msg = frame.tnc2
# LOG.debug(f"Decoding {msg}")
raw = aprslib.parse(str(frame))
packet = core.factory(raw)
if isinstance(packet, core.ThirdParty):
return packet.subpacket
else:
return packet
def setup_connection(self):
self._client = kiss.KISS3Client()
self.connected = True
return self._client
def consumer(self, callback, blocking=False, immortal=False, raw=False):
self._client.consumer(callback)

38
aprsd/client/stats.py Normal file
View File

@ -0,0 +1,38 @@
import threading
from oslo_config import cfg
import wrapt
from aprsd import client
from aprsd.utils import singleton
CONF = cfg.CONF
@singleton
class APRSClientStats:
lock = threading.Lock()
@wrapt.synchronized(lock)
def stats(self, serializable=False):
cl = client.client_factory.create()
stats = {
"transport": cl.transport(),
"filter": cl.filter,
"connected": cl.connected,
}
if cl.transport() == client.TRANSPORT_APRSIS:
stats["server_string"] = cl.client.server_string
keepalive = cl.client.aprsd_keepalive
if serializable:
keepalive = keepalive.isoformat()
stats["server_keepalive"] = keepalive
elif cl.transport() == client.TRANSPORT_TCPKISS:
stats["host"] = CONF.kiss_tcp.host
stats["port"] = CONF.kiss_tcp.port
elif cl.transport() == client.TRANSPORT_SERIALKISS:
stats["device"] = CONF.kiss_serial.device
return stats

View File

@ -8,8 +8,9 @@ import logging
import click
from oslo_config import cfg
from aprsd import cli_helper, conf, packets, plugin
# local imports here
from aprsd import cli_helper, client, conf, packets, plugin
from aprsd.client import base
from aprsd.main import cli
from aprsd.utils import trace
@ -96,7 +97,7 @@ def test_plugin(
if CONF.trace_enabled:
trace.setup_tracing(["method", "api"])
client.Client()
base.APRSClient()
pm = plugin.PluginManager()
if load_all:

View File

@ -15,7 +15,8 @@ from rich.console import Console
# local imports here
import aprsd
from aprsd import cli_helper, client, packets, plugin, threads
from aprsd import cli_helper, packets, plugin, threads
from aprsd.client import client_factory
from aprsd.main import cli
from aprsd.packets import collector as packet_collector
from aprsd.packets import log as packet_log
@ -179,15 +180,14 @@ def listen(
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
# Make sure we have 1 client transport enabled
if not client.factory.is_client_enabled():
if not client_factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
# Creates the client object
LOG.info("Creating client connection")
aprs_client = client.factory.create()
aprs_client = client_factory.create()
LOG.info(aprs_client)
LOG.debug(f"Filter by '{filter}'")

View File

@ -8,8 +8,9 @@ import click
from oslo_config import cfg
import aprsd
from aprsd import cli_helper, client, packets
from aprsd import cli_helper, packets
from aprsd import conf # noqa : F401
from aprsd.client import client_factory
from aprsd.main import cli
from aprsd.packets import collector
from aprsd.threads import tx
@ -102,7 +103,7 @@ def send_message(
def rx_packet(packet):
global got_ack, got_response
cl = client.factory.create()
cl = client_factory.create()
packet = cl.decode_packet(packet)
collector.PacketCollector().rx(packet)
packet.log("RX")
@ -130,8 +131,7 @@ def send_message(
sys.exit(0)
try:
client.ClientFactory.setup()
client.factory.create().client
client_factory.create().client
except LoginError:
sys.exit(-1)
@ -163,7 +163,7 @@ def send_message(
# This will register a packet consumer with aprslib
# When new packets come in the consumer will process
# the packet
aprs_client = client.factory.create().client
aprs_client = client_factory.create().client
aprs_client.consumer(rx_packet, raw=False)
except aprslib.exceptions.ConnectionDrop:
LOG.error("Connection dropped, reconnecting")

View File

@ -6,9 +6,10 @@ import click
from oslo_config import cfg
import aprsd
from aprsd import cli_helper, client
from aprsd import cli_helper
from aprsd import main as aprsd_main
from aprsd import packets, plugin, threads, utils
from aprsd.client import client_factory
from aprsd.main import cli
from aprsd.packets import collector as packet_collector
from aprsd.packets import seen_list
@ -49,14 +50,13 @@ def server(ctx, flush):
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
if not client.factory.is_client_enabled():
if not client_factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
# Creates the client object
LOG.info("Creating client connection")
aprs_client = client.factory.create()
aprs_client = client_factory.create()
LOG.info(aprs_client)
# Create the initial PM singleton and Register plugins
@ -79,18 +79,14 @@ def server(ctx, flush):
LOG.info(p)
# Make sure we have 1 client transport enabled
if not client.factory.is_client_enabled():
if not client_factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
if not client.factory.is_client_configured():
if not client_factory.is_client_configured():
LOG.error("APRS client is not properly configured in config file.")
sys.exit(-1)
# Creates the client object
# LOG.info("Creating client connection")
# client.factory.create().client
# Now load the msgTrack from disk if any
packets.PacketList()
if flush:

View File

@ -21,6 +21,7 @@ import aprsd
from aprsd import (
cli_helper, client, packets, plugin_utils, stats, threads, utils,
)
from aprsd.client import client_factory, kiss
from aprsd.main import cli
from aprsd.threads import aprsd as aprsd_threads
from aprsd.threads import keep_alive, rx, tx
@ -380,8 +381,8 @@ def _get_transport(stats):
"APRS-IS Server: <a href='http://status.aprs2.net' >"
"{}</a>".format(stats["APRSClientStats"]["server_string"])
)
elif client.KISSClient.is_enabled():
transport = client.KISSClient.transport()
elif kiss.KISSClient.is_enabled():
transport = kiss.KISSClient.transport()
if transport == client.TRANSPORT_TCPKISS:
aprs_connection = (
"TCPKISS://{}:{}".format(
@ -637,13 +638,12 @@ def webchat(ctx, flush, port):
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
# Make sure we have 1 client transport enabled
if not client.factory.is_client_enabled():
if not client_factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
if not client.factory.is_client_configured():
if not client_factory.is_client_configured():
LOG.error("APRS client is not properly configured in config file.")
sys.exit(-1)

View File

@ -12,7 +12,22 @@ from aprsd.conf import log as conf_log
CONF = cfg.CONF
# LOG = logging.getLogger("APRSD")
LOG = logger
logging_queue = queue.Queue()
class QueueLatest(queue.Queue):
"""Custom Queue to keep only the latest N items.
This prevents the queue from blowing up in size.
"""
def put(self, *args, **kwargs):
try:
super().put(*args, **kwargs)
except queue.Full:
self.queue.popleft()
super().put(*args, **kwargs)
logging_queue = QueueLatest(maxsize=200)
class InterceptHandler(logging.Handler):
@ -59,6 +74,10 @@ def setup_logging(loglevel=None, quiet=False):
"werkzeug._internal",
"socketio",
"urllib3.connectionpool",
"chardet",
"chardet.charsetgroupprober",
"chardet.eucjpprober",
"chardet.mbcharsetprober",
]
# We don't really want to see the aprslib parsing debug output.

View File

@ -148,7 +148,7 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
watch_list = CONF.watch_list.callsigns
# make sure the timeout is set or this doesn't work
if watch_list:
aprs_client = client.factory.create().client
aprs_client = client.client_factory.create().client
filter_str = "b/{}".format("/".join(watch_list))
aprs_client.set_filter(filter_str)
else:

View File

@ -1,5 +1,5 @@
from aprsd import client as aprs_client
from aprsd import plugin
from aprsd.client import stats as client_stats
from aprsd.packets import packet_list, seen_list, tracker, watch_list
from aprsd.plugins import email
from aprsd.stats import app, collector
@ -16,5 +16,5 @@ stats_collector.register_producer(tracker.PacketTrack)
stats_collector.register_producer(plugin.PluginManager)
stats_collector.register_producer(aprsd.APRSDThreadList)
stats_collector.register_producer(email.EmailStats)
stats_collector.register_producer(aprs_client.APRSClientStats)
stats_collector.register_producer(client_stats.APRSClientStats)
stats_collector.register_producer(seen_list.SeenList)

View File

@ -5,6 +5,7 @@ from oslo_config import cfg
import aprsd
from aprsd import utils
from aprsd.log import log as aprsd_log
CONF = cfg.CONF
@ -32,6 +33,7 @@ class APRSDStats:
def stats(self, serializable=False) -> dict:
current, peak = tracemalloc.get_traced_memory()
uptime = self.uptime()
qsize = aprsd_log.logging_queue.qsize()
if serializable:
uptime = str(uptime)
stats = {
@ -42,5 +44,6 @@ class APRSDStats:
"memory_current_str": utils.human_size(current),
"memory_peak": int(peak),
"memory_peak_str": utils.human_size(peak),
"loging_queue": qsize,
}
return stats

View File

@ -5,7 +5,9 @@ import tracemalloc
from oslo_config import cfg
from aprsd import client, packets, utils
from aprsd import packets, utils
from aprsd.client import client_factory
from aprsd.log import log as aprsd_log
from aprsd.stats import collector
from aprsd.threads import APRSDThread, APRSDThreadList
@ -59,7 +61,7 @@ class KeepAliveThread(APRSDThread):
keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{}"
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
).format(
stats_json["APRSDStats"]["callsign"],
stats_json["APRSDStats"]["uptime"],
@ -73,6 +75,7 @@ class KeepAliveThread(APRSDThread):
stats_json["APRSDStats"]["memory_current_str"],
stats_json["APRSDStats"]["memory_peak_str"],
len(thread_list),
aprsd_log.logging_queue.qsize(),
)
LOG.info(keepalive)
if "APRSDThreadList" in stats_json:
@ -87,7 +90,7 @@ class KeepAliveThread(APRSDThread):
LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
# check the APRS connection
cl = client.factory.create()
cl = client_factory.create()
# Reset the connection if it's dead and this isn't our
# First time through the loop.
# The first time through the loop can happen at startup where
@ -95,7 +98,7 @@ class KeepAliveThread(APRSDThread):
# to make it's connection the first time.
if not cl.is_alive() and self.cntr > 0:
LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
client.factory.create().reset()
client_factory.create().reset()
# else:
# # See if we should reset the aprs-is client
# # Due to losing a keepalive from them

View File

@ -19,7 +19,6 @@ def send_log_entries(force=False):
if CONF.admin.web_enabled:
if force or LogEntries().is_purge_ready():
entries = LogEntries().get_all_and_purge()
print(f"Sending log entries {len(entries)}")
if entries:
try:
requests.post(
@ -27,9 +26,8 @@ def send_log_entries(force=False):
json=entries,
auth=(CONF.admin.user, CONF.admin.password),
)
except Exception as ex:
LOG.warning(f"Failed to send log entries {len(entries)}")
LOG.warning(ex)
except Exception:
LOG.warning(f"Failed to send log entries. len={len(entries)}")
class LogEntries:

View File

@ -6,7 +6,8 @@ import time
import aprslib
from oslo_config import cfg
from aprsd import client, packets, plugin
from aprsd import packets, plugin
from aprsd.client import client_factory
from aprsd.packets import collector
from aprsd.packets import log as packet_log
from aprsd.threads import APRSDThread, tx
@ -20,7 +21,7 @@ class APRSDRXThread(APRSDThread):
def __init__(self, packet_queue):
super().__init__("RX_PKT")
self.packet_queue = packet_queue
self._client = client.factory.create()
self._client = client_factory.create()
def stop(self):
self.thread_stop = True
@ -29,7 +30,7 @@ class APRSDRXThread(APRSDThread):
def loop(self):
if not self._client:
self._client = client.factory.create()
self._client = client_factory.create()
time.sleep(1)
return True
# setup the consumer of messages and block until a messages

View File

@ -9,9 +9,9 @@ from rush.limiters import periodic
from rush.stores import dictionary
import wrapt
from aprsd import client
from aprsd import conf # noqa
from aprsd import threads as aprsd_threads
from aprsd.client import client_factory
from aprsd.packets import collector, core
from aprsd.packets import log as packet_log
from aprsd.packets import tracker
@ -80,7 +80,7 @@ def _send_direct(packet, aprs_client=None):
if aprs_client:
cl = aprs_client
else:
cl = client.factory.create()
cl = client_factory.create()
packet.update_timestamp()
packet_log.log(packet, tx=True)
@ -247,7 +247,7 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
send(pkt, direct=True)
except Exception as e:
LOG.error(f"Failed to send beacon: {e}")
client.factory.create().reset()
client_factory.create().reset()
time.sleep(5)
self._loop_cnt += 1

View File

@ -1,83 +0,0 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --annotation-style=line dev-requirements.in
#
add-trailing-comma==3.1.0 # via gray
alabaster==0.7.16 # via sphinx
autoflake==1.5.3 # via gray
babel==2.14.0 # via sphinx
black==24.4.0 # via gray
build==1.2.1 # via pip-tools
cachetools==5.3.3 # via tox
certifi==2024.2.2 # via requests
cfgv==3.4.0 # via pre-commit
chardet==5.2.0 # via tox
charset-normalizer==3.3.2 # via requests
click==8.1.7 # via black, fixit, moreorless, pip-tools
colorama==0.4.6 # via tox
commonmark==0.9.1 # via rich
configargparse==1.7 # via gray
coverage[toml]==7.5.0 # via pytest-cov
distlib==0.3.8 # via virtualenv
docutils==0.21.2 # via sphinx
exceptiongroup==1.2.1 # via pytest
filelock==3.13.4 # via tox, virtualenv
fixit==2.1.0 # via gray
flake8==7.0.0 # via -r dev-requirements.in, pep8-naming
gray==0.14.0 # via -r dev-requirements.in
identify==2.5.36 # via pre-commit
idna==3.7 # via requests
imagesize==1.4.1 # via sphinx
iniconfig==2.0.0 # via pytest
isort==5.13.2 # via -r dev-requirements.in, gray
jinja2==3.1.3 # via sphinx
libcst==1.3.1 # via fixit
markupsafe==2.1.5 # via jinja2
mccabe==0.7.0 # via flake8
moreorless==0.4.0 # via fixit
mypy==1.9.0 # via -r dev-requirements.in
mypy-extensions==1.0.0 # via black, mypy
nodeenv==1.8.0 # via pre-commit
packaging==24.0 # via black, build, fixit, pyproject-api, pytest, sphinx, tox
pathspec==0.12.1 # via black, trailrunner
pep8-naming==0.13.3 # via -r dev-requirements.in
pip-tools==7.4.1 # via -r dev-requirements.in
platformdirs==4.2.1 # via black, tox, virtualenv
pluggy==1.5.0 # via pytest, tox
pre-commit==3.7.0 # via -r dev-requirements.in
pycodestyle==2.11.1 # via flake8
pyflakes==3.2.0 # via autoflake, flake8
pygments==2.17.2 # via rich, sphinx
pyproject-api==1.6.1 # via tox
pyproject-hooks==1.0.0 # via build, pip-tools
pytest==8.1.1 # via -r dev-requirements.in, pytest-cov
pytest-cov==5.0.0 # via -r dev-requirements.in
pyupgrade==3.15.2 # via gray
pyyaml==6.0.1 # via libcst, pre-commit
requests==2.31.0 # via sphinx
rich==12.6.0 # via gray
snowballstemmer==2.2.0 # via sphinx
sphinx==7.3.7 # via -r dev-requirements.in
sphinxcontrib-applehelp==1.0.8 # via sphinx
sphinxcontrib-devhelp==1.0.6 # via sphinx
sphinxcontrib-htmlhelp==2.0.5 # via sphinx
sphinxcontrib-jsmath==1.0.1 # via sphinx
sphinxcontrib-qthelp==1.0.7 # via sphinx
sphinxcontrib-serializinghtml==1.1.10 # via sphinx
tokenize-rt==5.2.0 # via add-trailing-comma, pyupgrade
toml==0.10.2 # via autoflake
tomli==2.0.1 # via black, build, coverage, fixit, mypy, pip-tools, pyproject-api, pyproject-hooks, pytest, sphinx, tox
tox==4.14.2 # via -r dev-requirements.in
trailrunner==1.4.0 # via fixit
typing-extensions==4.11.0 # via black, mypy
unify==0.5 # via gray
untokenize==0.1.1 # via unify
urllib3==2.2.1 # via requests
virtualenv==20.26.0 # via pre-commit, tox
wheel==0.43.0 # via pip-tools
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@ -1,10 +1,18 @@
FROM python:3.11-slim as build
ARG VERSION=3.4.0
# pass this in as 'dev' if you want to install from github repo vs pypi
ARG INSTALL_TYPE=pypi
ARG BRANCH=master
ARG BUILDX_QEMU_ENV
ENV APRSD_BRANCH=${BRANCH:-master}
ENV TZ=${TZ:-US/Eastern}
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
ENV APRSD_PIP_VERSION=${VERSION}
ENV PATH="${PATH}:/app/.local/bin"
ENV PIP_DEFAULT_TIMEOUT=100 \
# Allow statements and log messages to immediately appear
@ -35,16 +43,22 @@ FROM build as final
WORKDIR /app
RUN pip3 install -U pip
RUN pip3 install aprsd==$APRSD_PIP_VERSION
RUN pip install gevent uwsgi
RUN which aprsd
RUN mkdir /config
RUN chown -R appuser:appgroup /app
RUN chown -R appuser:appgroup /config
USER appuser
RUN echo "PATH=\$PATH:/usr/games" >> /app/.bashrc
RUN if [ "$INSTALL_TYPE" = "pypi" ]; then \
pip3 install aprsd==$APRSD_PIP_VERSION; \
elif [ "$INSTALL_TYPE" = "github" ]; then \
git clone -b $APRSD_BRANCH https://github.com/craigerl/aprsd; \
cd /app/aprsd && pip install -e .; \
ls -al /app/.local/lib/python3.11/site-packages/aprsd*; \
fi
RUN pip install gevent uwsgi
RUN echo "PATH=\$PATH:/usr/games:/app/.local/bin" >> /app/.bashrc
RUN which aprsd
RUN aprsd sample-config > /config/aprsd.conf
RUN aprsd --version
ADD bin/setup.sh /app
ADD bin/admin.sh /app

View File

@ -1,60 +0,0 @@
FROM python:3.11-slim as build
ARG BRANCH=master
ARG BUILDX_QEMU_ENV
ENV APRSD_BRANCH=${BRANCH:-master}
ENV PIP_DEFAULT_TIMEOUT=100 \
# Allow statements and log messages to immediately appear
PYTHONUNBUFFERED=1 \
# disable a pip version check to reduce run-time & log-spam
PIP_DISABLE_PIP_VERSION_CHECK=1 \
# cache is useless in docker image, so disable to reduce image size
PIP_NO_CACHE_DIR=1
RUN set -ex \
# Create a non-root user
&& addgroup --system --gid 1001 appgroup \
&& useradd --uid 1001 --gid 1001 -s /usr/bin/bash -m -d /app appuser \
# Upgrade the package index and install security upgrades
&& apt-get update \
&& apt-get upgrade -y \
&& apt-get install -y git build-essential curl libffi-dev fortune \
python3-dev libssl-dev libxml2-dev libxslt-dev telnet sudo \
# Install dependencies
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y
### Final stage
FROM build as final
WORKDIR /app
RUN git clone -b $APRSD_BRANCH https://github.com/craigerl/aprsd
RUN pip install -U pip
RUN cd aprsd && pip install --no-cache-dir .
RUN pip install gevent uwsgi==2.0.24
RUN which aprsd
RUN mkdir /config
RUN chown -R appuser:appgroup /app
RUN chown -R appuser:appgroup /config
USER appuser
RUN echo "PATH=\$PATH:/usr/games" >> /app/.bashrc
RUN which aprsd
RUN aprsd sample-config > /config/aprsd.conf
ADD bin/setup.sh /app
ADD bin/admin.sh /app
EXPOSE 8000
EXPOSE 8001
VOLUME ["/config"]
# CMD ["gunicorn", "aprsd.wsgi:app", "--host", "0.0.0.0", "--port", "8000"]
ENTRYPOINT ["/app/setup.sh"]
CMD ["server"]
# Set the user to run the application
USER appuser

View File

@ -90,7 +90,8 @@ then
# Use this script to locally build the docker image
docker buildx build --push --platform $PLATFORMS \
-t hemna6969/aprsd:$TAG \
-f Dockerfile-dev --build-arg branch=$BRANCH \
--build-arg INSTALL_TYPE=github \
--build-arg branch=$BRANCH \
--build-arg BUILDX_QEMU_ENV=true \
--no-cache .
else
@ -101,6 +102,5 @@ else
--build-arg BUILDX_QEMU_ENV=true \
-t hemna6969/aprsd:$VERSION \
-t hemna6969/aprsd:$TAG \
-t hemna6969/aprsd:latest \
-f Dockerfile .
-t hemna6969/aprsd:latest .
fi

View File

@ -1,5 +1,162 @@
[project]
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name = "aprsd"
description = "APRSd is a APRS-IS server that can be used to connect to APRS-IS and send and receive APRS packets."
# Specify which Python versions you support. In contrast to the
# 'Programming Language' classifiers in this file, 'pip install' will check this
# and refuse to install the project if the version does not match. See
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
requires-python = ">=3.8"
dynamic = ["version", "dependencies", "optional-dependencies"]
# This is an optional longer description of your project that represents
# the body of text which users will see when they visit PyPI.
#
# Often, this is the same as your README, so you can just read it in from
# that file directly.
#
# This field corresponds to the "Description" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-optional
readme = {file = "README.rst", content-type = "text/x-rst"}
# This is either text indicating the license for the distribution, or a file
# that contains the license.
# https://packaging.python.org/en/latest/specifications/core-metadata/#license
license = {file = "LICENSE"}
# This should be your name or the name of the organization who originally
# authored the project, and a valid email address corresponding to the name
# listed.
authors = [
{name = "Craig Lamparter", email = "craig@craiger.org"},
{name = "Walter A. Boring IV", email = "waboring@hemna.com"},
{name = "Emre Saglam", email = "emresaglam@gmail.com"},
{name = "Jason Martin", email= "jhmartin@toger.us"},
{name = "John", email="johng42@users.noreply.github.com"},
{name = "Martiros Shakhzadyan", email="vrzh@vrzh.net"},
{name = "Zoe Moore", email="zoenb@mailbox.org"},
{name = "ranguli", email="hello@joshmurphy.ca"},
]
# This should be your name or the names of the organization who currently
# maintains the project, and a valid email address corresponding to the name
# listed.
maintainers = [
{name = "Craig Lamparter", email = "craig@craiger.org"},
{name = "Walter A. Boring IV", email = "waboring@hemna.com"},
]
# This field adds keywords for your project which will appear on the
# project page. What does your project relate to?
#
# Note that this is a list of additional keywords, separated
# by commas, to be used to assist searching for the distribution in a
# larger catalog.
keywords = [
"aprs",
"aprs-is",
"aprsd",
"aprsd-server",
"aprsd-client",
"aprsd-socket",
"aprsd-socket-server",
"aprsd-socket-client",
]
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see https://pypi.org/classifiers/
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Topic :: Communications :: Ham Radio",
"Topic :: Communications :: Internet Relay Chat",
"Topic :: Internet",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of this field vs pip's requirements files see:
# https://packaging.python.org/discussions/install-requires-vs-requirements/
[tool.setuptools.dynamic]
dependencies = {file = ["./requirements.txt"]}
optional-dependencies.dev = {file = ["./requirements-dev.txt"]}
# List additional groups of dependencies here (e.g. development
# dependencies). Users will be able to install these using the "extras"
# syntax, for example:
#
# $ pip install sampleproject[dev]
#
# Optional dependencies the project provides. These are commonly
# referred to as "extras". For a more extensive definition see:
# https://packaging.python.org/en/latest/specifications/dependency-specifiers/#extras
# [project.optional-dependencies]
# List URLs that are relevant to your project
#
# This field corresponds to the "Project-URL" and "Home-Page" metadata fields:
# https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use
# https://packaging.python.org/specifications/core-metadata/#home-page-optional
#
# Examples listed include a pattern for specifying where the package tracks
# issues, where the source is hosted, where to say thanks to the package
# maintainers, and where to support the project financially. The key is
# what's used to render the link text on PyPI.
[project.urls]
"Homepage" = "https://github.com/craigerl/aprsd"
"Bug Reports" = "https://github.com/craigerl/aprsd/issues"
"Source" = "https://github.com/craigerl/aprsd"
# The following would provide a command line executable called `sample`
# which executes the function `main` from this package when invoked.
[project.scripts]
aprsd = "aprsd.main:main"
[project.entry-points."oslo.config.opts"]
"aprsd.conf" = "aprsd.conf.opts:list_opts"
[project.entry-points."oslo.config.opts.defaults"]
"aprsd.conf" = "aprsd.conf:set_lib_defaults"
# If you are using a different build backend, you will need to change this.
[tool.setuptools]
# If there are data files included in your packages that need to be
# installed, specify them here.
py-modules = ["aprsd"]
package-data = {"sample" = ["*.dat"]}
[build-system]
requires = ["setuptools>=46.0", "wheel"]
requires = [
"setuptools>=69.5.0",
"setuptools_scm>=0",
"wheel",
]
build-backend = "setuptools.build_meta"
[tool.isort]
@ -14,3 +171,5 @@ skip_gitignore = true
[tool.coverage.run]
branch = true
[tool.setuptools_scm]

View File

@ -1,16 +1,20 @@
build
check-manifest
flake8
gray
isort
mypy
pep8-naming
pytest
pytest-cov
pip
pip-tools
pre-commit
Sphinx
tox
wheel
# Twine is used for uploading packages to pypi
# but it induces an install of cryptography
# This is sucky for rpi systems.
# twine
pre-commit
pytest
pytest-cov
gray
pip
pip-tools

216
requirements-dev.txt Normal file
View File

@ -0,0 +1,216 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --annotation-style=line requirements-dev.in
#
add-trailing-comma==3.1.0
# via gray
alabaster==0.7.16
# via sphinx
autoflake==1.5.3
# via gray
babel==2.15.0
# via sphinx
black==24.4.2
# via gray
build==1.2.1
# via
# -r requirements-dev.in
# check-manifest
# pip-tools
cachetools==5.3.3
# via tox
certifi==2024.2.2
# via requests
cfgv==3.4.0
# via pre-commit
chardet==5.2.0
# via tox
charset-normalizer==3.3.2
# via requests
check-manifest==0.49
# via -r requirements-dev.in
click==8.1.7
# via
# black
# fixit
# moreorless
# pip-tools
colorama==0.4.6
# via tox
commonmark==0.9.1
# via rich
configargparse==1.7
# via gray
coverage[toml]==7.5.1
# via pytest-cov
distlib==0.3.8
# via virtualenv
docutils==0.21.2
# via sphinx
exceptiongroup==1.2.1
# via pytest
filelock==3.14.0
# via
# tox
# virtualenv
fixit==2.1.0
# via gray
flake8==7.0.0
# via
# -r requirements-dev.in
# pep8-naming
gray==0.15.0
# via -r requirements-dev.in
identify==2.5.36
# via pre-commit
idna==3.7
# via requests
imagesize==1.4.1
# via sphinx
iniconfig==2.0.0
# via pytest
isort==5.13.2
# via
# -r requirements-dev.in
# gray
jinja2==3.1.4
# via sphinx
libcst==1.3.1
# via fixit
markupsafe==2.1.5
# via jinja2
mccabe==0.7.0
# via flake8
moreorless==0.4.0
# via fixit
mypy==1.10.0
# via -r requirements-dev.in
mypy-extensions==1.0.0
# via
# black
# mypy
nodeenv==1.8.0
# via pre-commit
packaging==24.0
# via
# black
# build
# fixit
# pyproject-api
# pytest
# sphinx
# tox
pathspec==0.12.1
# via
# black
# trailrunner
pep8-naming==0.14.1
# via -r requirements-dev.in
pip-tools==7.4.1
# via -r requirements-dev.in
platformdirs==4.2.2
# via
# black
# tox
# virtualenv
pluggy==1.5.0
# via
# pytest
# tox
pre-commit==3.7.1
# via -r requirements-dev.in
pycodestyle==2.11.1
# via flake8
pyflakes==3.2.0
# via
# autoflake
# flake8
pygments==2.18.0
# via
# rich
# sphinx
pyproject-api==1.6.1
# via tox
pyproject-hooks==1.1.0
# via
# build
# pip-tools
pytest==8.2.0
# via
# -r requirements-dev.in
# pytest-cov
pytest-cov==5.0.0
# via -r requirements-dev.in
pyupgrade==3.15.2
# via gray
pyyaml==6.0.1
# via
# libcst
# pre-commit
requests==2.32.0
# via sphinx
rich==12.6.0
# via gray
snowballstemmer==2.2.0
# via sphinx
sphinx==7.3.7
# via -r requirements-dev.in
sphinxcontrib-applehelp==1.0.8
# via sphinx
sphinxcontrib-devhelp==1.0.6
# via sphinx
sphinxcontrib-htmlhelp==2.0.5
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.7
# via sphinx
sphinxcontrib-serializinghtml==1.1.10
# via sphinx
tokenize-rt==5.2.0
# via
# add-trailing-comma
# pyupgrade
toml==0.10.2
# via autoflake
tomli==2.0.1
# via
# black
# build
# check-manifest
# coverage
# fixit
# mypy
# pip-tools
# pyproject-api
# pytest
# sphinx
# tox
tox==4.15.0
# via -r requirements-dev.in
trailrunner==1.4.0
# via fixit
typing-extensions==4.11.0
# via
# black
# mypy
unify==0.5
# via gray
untokenize==0.1.1
# via unify
urllib3==2.2.1
# via requests
virtualenv==20.26.2
# via
# pre-commit
# tox
wheel==0.43.0
# via
# -r requirements-dev.in
# pip-tools
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@ -1,38 +1,32 @@
aprslib>=0.7.0
click
click-params
flask
werkzeug
flask-httpauth
imapclient
pluggy
pbr
pyyaml
requests
pytz
tzlocal
six
thesmuggler
update_checker
flask-socketio
python-socketio
gevent
eventlet
tabulate
# Pinned due to gray needing 12.6.0
rich~=12.6.0
# For the list-plugins pypi.org search scraping
beautifulsoup4
wrapt
# kiss3 uses attrs
kiss3
attrs
click
click-params
dataclasses
oslo.config
# Pin this here so it doesn't require a compile on
# raspi
shellingham
geopy
rush
dataclasses-json
eventlet
flask
flask-httpauth
flask-socketio
geopy
gevent
imapclient
kiss3
loguru
oslo.config
pluggy
python-socketio
pyyaml
pytz
requests
# Pinned due to gray needing 12.6.0
rich~=12.6.0
rush
shellingham
six
tabulate
thesmuggler
tzlocal
update_checker
wrapt

View File

@ -4,79 +4,179 @@
#
# pip-compile --annotation-style=line requirements.in
#
aprslib==0.7.2 # via -r requirements.in
attrs==23.2.0 # via -r requirements.in, ax253, kiss3, rush
ax253==0.1.5.post1 # via kiss3
beautifulsoup4==4.12.3 # via -r requirements.in
bidict==0.23.1 # via python-socketio
bitarray==2.9.2 # via ax253, kiss3
blinker==1.7.0 # via flask
certifi==2024.2.2 # via requests
charset-normalizer==3.3.2 # via requests
click==8.1.7 # via -r requirements.in, click-completion, click-params, flask
click-completion==0.5.2 # via -r requirements.in
click-params==0.5.0 # via -r requirements.in
commonmark==0.9.1 # via rich
dataclasses==0.6 # via -r requirements.in
dataclasses-json==0.6.4 # via -r requirements.in
debtcollector==3.0.0 # via oslo-config
deprecated==1.2.14 # via click-params
dnspython==2.6.1 # via eventlet
eventlet==0.36.1 # via -r requirements.in
flask==3.0.3 # via -r requirements.in, flask-httpauth, flask-socketio
flask-httpauth==4.8.0 # via -r requirements.in
flask-socketio==5.3.6 # via -r requirements.in
geographiclib==2.0 # via geopy
geopy==2.4.1 # via -r requirements.in
gevent==24.2.1 # via -r requirements.in
greenlet==3.0.3 # via eventlet, gevent
h11==0.14.0 # via wsproto
idna==3.7 # via requests
imapclient==3.0.1 # via -r requirements.in
importlib-metadata==7.1.0 # via ax253, kiss3
itsdangerous==2.2.0 # via flask
jinja2==3.1.3 # via click-completion, flask
kiss3==8.0.0 # via -r requirements.in
loguru==0.7.2 # via -r requirements.in
markupsafe==2.1.5 # via jinja2, werkzeug
marshmallow==3.21.1 # via dataclasses-json
mypy-extensions==1.0.0 # via typing-inspect
netaddr==1.2.1 # via oslo-config
oslo-config==9.4.0 # via -r requirements.in
oslo-i18n==6.3.0 # via oslo-config
packaging==24.0 # via marshmallow
pbr==6.0.0 # via -r requirements.in, oslo-i18n, stevedore
pluggy==1.5.0 # via -r requirements.in
pygments==2.17.2 # via rich
pyserial==3.5 # via pyserial-asyncio
pyserial-asyncio==0.6 # via kiss3
python-engineio==4.9.0 # via python-socketio
python-socketio==5.11.2 # via -r requirements.in, flask-socketio
pytz==2024.1 # via -r requirements.in
pyyaml==6.0.1 # via -r requirements.in, oslo-config
requests==2.31.0 # via -r requirements.in, oslo-config, update-checker
rfc3986==2.0.0 # via oslo-config
rich==12.6.0 # via -r requirements.in
rush==2021.4.0 # via -r requirements.in
shellingham==1.5.4 # via -r requirements.in, click-completion
simple-websocket==1.0.0 # via python-engineio
six==1.16.0 # via -r requirements.in, click-completion
soupsieve==2.5 # via beautifulsoup4
stevedore==5.2.0 # via oslo-config
tabulate==0.9.0 # via -r requirements.in
thesmuggler==1.0.1 # via -r requirements.in
typing-extensions==4.11.0 # via typing-inspect
typing-inspect==0.9.0 # via dataclasses-json
tzlocal==5.2 # via -r requirements.in
update-checker==0.18.0 # via -r requirements.in
urllib3==2.2.1 # via requests
validators==0.22.0 # via click-params
werkzeug==3.0.2 # via -r requirements.in, flask
wrapt==1.16.0 # via -r requirements.in, debtcollector, deprecated
wsproto==1.2.0 # via simple-websocket
zipp==3.18.1 # via importlib-metadata
zope-event==5.0 # via gevent
zope-interface==6.3 # via gevent
aprslib==0.7.2
# via -r requirements.in
attrs==23.2.0
# via
# ax253
# kiss3
# rush
ax253==0.1.5.post1
# via kiss3
beautifulsoup4==4.12.3
# via -r requirements.in
bidict==0.23.1
# via python-socketio
bitarray==2.9.2
# via
# ax253
# kiss3
blinker==1.8.2
# via flask
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via
# -r requirements.in
# click-params
# flask
click-params==0.5.0
# via -r requirements.in
commonmark==0.9.1
# via rich
dataclasses==0.6
# via -r requirements.in
dataclasses-json==0.6.6
# via -r requirements.in
debtcollector==3.0.0
# via oslo-config
deprecated==1.2.14
# via click-params
dnspython==2.6.1
# via eventlet
eventlet==0.36.1
# via -r requirements.in
flask==3.0.3
# via
# -r requirements.in
# flask-httpauth
# flask-socketio
flask-httpauth==4.8.0
# via -r requirements.in
flask-socketio==5.3.6
# via -r requirements.in
geographiclib==2.0
# via geopy
geopy==2.4.1
# via -r requirements.in
gevent==24.2.1
# via -r requirements.in
greenlet==3.0.3
# via
# eventlet
# gevent
h11==0.14.0
# via wsproto
idna==3.7
# via requests
imapclient==3.0.1
# via -r requirements.in
importlib-metadata==7.1.0
# via
# ax253
# kiss3
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via flask
kiss3==8.0.0
# via -r requirements.in
loguru==0.7.2
# via -r requirements.in
markupsafe==2.1.5
# via
# jinja2
# werkzeug
marshmallow==3.21.2
# via dataclasses-json
mypy-extensions==1.0.0
# via typing-inspect
netaddr==1.2.1
# via oslo-config
oslo-config==9.4.0
# via -r requirements.in
oslo-i18n==6.3.0
# via oslo-config
packaging==24.0
# via marshmallow
pbr==6.0.0
# via
# oslo-i18n
# stevedore
pluggy==1.5.0
# via -r requirements.in
pygments==2.18.0
# via rich
pyserial==3.5
# via pyserial-asyncio
pyserial-asyncio==0.6
# via kiss3
python-engineio==4.9.0
# via python-socketio
python-socketio==5.11.2
# via
# -r requirements.in
# flask-socketio
pytz==2024.1
# via -r requirements.in
pyyaml==6.0.1
# via
# -r requirements.in
# oslo-config
requests==2.32.0
# via
# -r requirements.in
# oslo-config
# update-checker
rfc3986==2.0.0
# via oslo-config
rich==12.6.0
# via -r requirements.in
rush==2021.4.0
# via -r requirements.in
shellingham==1.5.4
# via -r requirements.in
simple-websocket==1.0.0
# via python-engineio
six==1.16.0
# via -r requirements.in
soupsieve==2.5
# via beautifulsoup4
stevedore==5.2.0
# via oslo-config
tabulate==0.9.0
# via -r requirements.in
thesmuggler==1.0.1
# via -r requirements.in
typing-extensions==4.11.0
# via typing-inspect
typing-inspect==0.9.0
# via dataclasses-json
tzlocal==5.2
# via -r requirements.in
update-checker==0.18.0
# via -r requirements.in
urllib3==2.2.1
# via requests
validators==0.22.0
# via click-params
werkzeug==3.0.3
# via flask
wrapt==1.16.0
# via
# -r requirements.in
# debtcollector
# deprecated
wsproto==1.2.0
# via simple-websocket
zipp==3.18.2
# via importlib-metadata
zope-event==5.0
# via gevent
zope-interface==6.4
# via gevent
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View File

@ -1,51 +0,0 @@
[metadata]
name = aprsd
long_description = file: README.rst
long_description_content_type = text/x-rst
url = http://aprsd.readthedocs.org
author = Craig Lamparter
author_email = something@somewhere.com
license = Apache
license_file = LICENSE
classifier =
License :: OSI Approved :: Apache Software License
Topic :: Communications :: Ham Radio
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
description_file =
README.rst
project_urls =
Source=https://github.com/craigerl/aprsd
Tracker=https://github.com/craigerl/aprsd/issues
summary = Amateur radio APRS daemon which listens for messages and responds
[global]
setup-hooks =
pbr.hooks.setup_hook
[files]
packages =
aprsd
[entry_points]
console_scripts =
aprsd = aprsd.main:main
oslo.config.opts =
aprsd.conf = aprsd.conf.opts:list_opts
oslo.config.opts.defaults =
aprsd.conf = aprsd.conf:set_lib_defaults
[build_sphinx]
source-dir = docs
build-dir = docs/_build
all_files = 1
[upload_sphinx]
upload-dir = docs/_build
[bdist_wheel]
universal = 1

View File

@ -17,12 +17,4 @@
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(setup_requires=["pbr"], pbr=True)
setuptools.setup()

View File

@ -62,9 +62,9 @@ class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
expected = packets.NULL_MESSAGE
self.assertEqual(expected, actual)
@mock.patch("aprsd.client.ClientFactory", autospec=True)
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
def test_watchlist_not_in_watchlist(self, mock_factory):
client.factory = mock_factory
client.client_factory = mock_factory
self.config_and_init()
plugin = fake.FakeWatchListPlugin()
@ -92,9 +92,9 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
expected = packets.NULL_MESSAGE
self.assertEqual(expected, actual)
@mock.patch("aprsd.client.ClientFactory", autospec=True)
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
def test_callsign_not_in_watchlist(self, mock_factory):
client.factory = mock_factory
client.client_factory = mock_factory
self.config_and_init(watchlist_enabled=False)
plugin = notify_plugin.NotifySeenPlugin()
@ -106,10 +106,10 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
expected = packets.NULL_MESSAGE
self.assertEqual(expected, actual)
@mock.patch("aprsd.client.ClientFactory", autospec=True)
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
@mock.patch("aprsd.packets.WatchList.is_old")
def test_callsign_in_watchlist_not_old(self, mock_is_old, mock_factory):
client.factory = mock_factory
client.client_factory = mock_factory
mock_is_old.return_value = False
self.config_and_init(
watchlist_enabled=True,
@ -126,10 +126,10 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
expected = packets.NULL_MESSAGE
self.assertEqual(expected, actual)
@mock.patch("aprsd.client.ClientFactory", autospec=True)
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
@mock.patch("aprsd.packets.WatchList.is_old")
def test_callsign_in_watchlist_old_same_alert_callsign(self, mock_is_old, mock_factory):
client.factory = mock_factory
client.client_factory = mock_factory
mock_is_old.return_value = True
self.config_and_init(
watchlist_enabled=True,
@ -147,10 +147,10 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
expected = packets.NULL_MESSAGE
self.assertEqual(expected, actual)
@mock.patch("aprsd.client.ClientFactory", autospec=True)
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
@mock.patch("aprsd.packets.WatchList.is_old")
def test_callsign_in_watchlist_old_send_alert(self, mock_is_old, mock_factory):
client.factory = mock_factory
client.client_factory = mock_factory
mock_is_old.return_value = True
notify_callsign = fake.FAKE_TO_CALLSIGN
fromcall = "WB4BOR"

10
tox.ini
View File

@ -23,7 +23,7 @@ install_command = pip install {opts} {packages}
extras = tests
deps = coverage: coverage
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements-dev.txt
pytestmain: git+https://github.com/pytest-dev/pytest.git@main
commands =
pytest -v --cov-report term-missing --cov=aprsd {posargs}
@ -34,7 +34,7 @@ commands =
skip_install = true
deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements-dev.txt
{toxinidir}/.
changedir = {toxinidir}/docs
commands =
@ -57,7 +57,7 @@ passenv = FAST8_NUM_COMMITS
[testenv:lint]
skip_install = true
deps =
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements-dev.txt
commands =
flake8 aprsd tests
@ -85,14 +85,14 @@ python =
# and standard formatting
skip_install = true
deps =
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements-dev.txt
commands =
gray aprsd tests
[testenv:type-check]
skip_install = true
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements-dev.txt
commands =
mypy --ignore-missing-imports --install-types aprsd