1
0
mirror of https://github.com/craigerl/aprsd.git synced 2025-07-07 11:25:15 -04:00

Compare commits

..

No commits in common. "master" and "v3.2.0" have entirely different histories.

207 changed files with 14639 additions and 12161 deletions

View File

@ -1,15 +0,0 @@
name: Update Authors
on:
push:
branches:
- master
jobs:
run:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: wow-actions/update-authors@v1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
template: '{{email}} : {{commits}}'
path: 'AUTHORS'

View File

@ -1,84 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
schedule:
- cron: '36 8 * * 0'
jobs:
analyze:
name: Analyze
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners
# Consider using larger runners for possible analysis time improvements.
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
permissions:
# required for all workflows
security-events: write
# only required for workflows in private repositories
actions: read
contents: read
strategy:
fail-fast: false
matrix:
language: [ 'javascript-typescript', 'python' ]
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@ -18,9 +18,10 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get Branch Name
id: branch-name
uses: tj-actions/branch-names@v8
uses: tj-actions/branch-names@v7
- name: Extract Branch
id: extract_branch
run: |
@ -29,22 +30,21 @@ jobs:
run: |
echo "Selected Branch '${{ steps.extract_branch.outputs.branch }}'"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: Login to Docker HUB
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build the Docker image
uses: docker/build-push-action@v6
uses: docker/build-push-action@v3
with:
context: "{{defaultContext}}:docker"
platforms: linux/amd64,linux/arm64
file: ./Dockerfile
file: ./Dockerfile-dev
build-args: |
INSTALL_TYPE=github
BRANCH=${{ steps.extract_branch.outputs.branch }}
BUILDX_QEMU_ENV=true
push: true

View File

@ -7,7 +7,7 @@ on:
branches:
- "**"
tags:
- "*.*.*"
- "v*.*.*"
pull_request:
branches:
- "master"
@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12"]
python-version: ["3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@ -35,26 +35,26 @@ jobs:
needs: tox
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get Branch Name
id: branch-name
uses: tj-actions/branch-names@v8
uses: tj-actions/branch-names@v6
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: Login to Docker HUB
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build the Docker image
uses: docker/build-push-action@v6
uses: docker/build-push-action@v3
with:
context: "{{defaultContext}}:docker"
platforms: linux/amd64,linux/arm64
file: ./Dockerfile
file: ./Dockerfile-dev
build-args: |
INSTALL_TYPE=github
BRANCH=${{ steps.branch-name.outputs.current_branch }}
BUILDX_QEMU_ENV=true
push: true

View File

@ -7,11 +7,11 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11"]
python-version: ["3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies

View File

@ -6,7 +6,7 @@ on:
aprsd_version:
required: true
options:
- 4.0.0
- 3.0.0
logLevel:
description: 'Log level'
required: true
@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@v3
- name: Get Branch Name
id: branch-name
uses: tj-actions/branch-names@v8
uses: tj-actions/branch-names@v6
- name: Setup QEMU
uses: docker/setup-qemu-action@v2
- name: Setup Docker Buildx
@ -41,10 +41,9 @@ jobs:
platforms: linux/amd64,linux/arm64
file: ./Dockerfile
build-args: |
INSTALL_TYPE=pypi
VERSION=${{ inputs.aprsd_version }}
BUILDX_QEMU_ENV=true
push: true
tags: |
hemna6969/aprsd:${{ inputs.aprsd_version }}
hemna6969/aprsd:v${{ inputs.aprsd_version }}
hemna6969/aprsd:latest

8
.gitignore vendored
View File

@ -58,11 +58,3 @@ AUTHORS
.idea
Makefile.venv
# Copilot
.DS_Store
.python-version
.fleet
.vscode
.envrc
.doit.db

View File

@ -1,4 +0,0 @@
Craig Lamparter <craig@craiger.org> <craiger@hpe.com>
Craig Lamparter <craig@craiger.org> craigerl <craig@craiger.org>
Craig Lamparter <craig@craiger.org> craigerl <craiger@hpe.com>
Walter A. Boring IV <waboring@hemna.com> Hemna <waboring@hemna.com>

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v3.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@ -10,32 +10,13 @@ repos:
- id: check-case-conflict
- id: check-docstring-first
- id: check-builtin-literals
- id: check-illegal-windows-names
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v2.7.0
rev: v1.16.0
hooks:
- id: setup-cfg-fmt
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.0
- repo: https://github.com/dizballanze/gray
rev: v0.10.1
hooks:
- id: ruff
###### Relevant part below ######
- id: ruff
args: ["check", "--select", "I", "--fix"]
###### Relevant part above ######
- id: ruff-format
- repo: https://github.com/astral-sh/uv-pre-commit
# uv version.
rev: 0.5.16
hooks:
# Compile requirements
- id: pip-compile
name: pip-compile requirements.in
args: [--resolver, backtracking, --annotation-style=line, requirements.in, -o, requirements.txt]
- id: pip-compile
name: pip-compile requirements-dev.in
args: [--resolver, backtracking, --annotation-style=line, requirements-dev.in, -o, requirements-dev.txt]
files: ^requirements-dev\.(in|txt)$
- id: gray

View File

@ -1 +0,0 @@
waboring@hemna.com : 1

View File

@ -1,31 +0,0 @@
# CONTRIBUTING
Code contributions are welcomed and appreciated. Just submit a PR!
The current build environment uses `pre-commit`, and `uv`.
### Environment setup:
```console
pip install uv
uv venv
uv pip install pip-tools
git clone git@github.com:craigerl/aprsd.git
cd aprsd
pre-commit install
# Optionally run the pre-commit scripts at any time
pre-commit run --all-files
```
### Running and testing:
From the aprstastic directory:
```console
cd aprsd
uv pip install -e .
# Running
uv run aprsd
```

835
ChangeLog Normal file
View File

@ -0,0 +1,835 @@
CHANGES
=======
v3.2.0
------
* minor cleanup prior to release
* Webchat: fix input maxlength
* WebChat: cleanup some console.logs
* WebChat: flash a dupe message
* Webchat: Fix issue accessing msg.id
* Webchat: Fix chat css on older browsers
* WebChat: new tab should get focus
* Bump gevent from 23.9.0.post1 to 23.9.1
* Webchat: Fix pep8 errors
* Webchat: Added tab notifications and raw packet
* WebChat: Prevent sending message without callsign
* WebChat: fixed content area scrolling
* Webchat: tweaks to UI for expanding chat
* Webchat: Fixed bug deleteing first tab
* Ensure Keepalive doesn't reset client at startup
* Ensure parse\_delta\_str doesn't puke
* WebChat: Send GPS Beacon working
* webchat: got active tab onclick working
* webchat: set to\_call to value of tab when selected
* Center the webchat input form
* Update index.html to use chat.css
* Deleted webchat mobile pages
* Added close X on webchat tabs
* Reworked webchat with new UI
* Updated the webchat UI to look like iMessage
* Restore previous conversations in webchat
* Remove VIM from Dockerfile
* recreate client during reset()
* updated github workflows
* Updated documentation build
* Removed admin\_web.py
* Removed some RPC server log noise
* Fixed admin page packet date
* RPC Server logs the client IP on failed auth
* Start keepalive thread first
* fixed an issue in the mobile webchat
* Added dupe checkig code to webchat mobile
* click on the div after added
* Webchat suppress to display of dupe messages
* Convert webchat internet urls to local static urls
* Make use of webchat gps config options
* Added new webchat config section
* fixed webchat logging.logformat typeoh
v3.1.3
------
* prep for 3.1.3
* Forcefully allow development webchat flask
v3.1.2
------
* Updated Changelog for 3.1.2
* Added support for ThirdParty packet types
* Disable the Send GPS Beacon button
* Removed adhoc ssl support in webchat
v3.1.1
------
* Updated Changelog for v3.1.1
* Fixed pep8 failures
* re-enable USWeatherPlugin to use mapClick
* Fix sending packets over KISS interface
* Use config web\_ip for running admin ui from module
* remove loop log
* Max out the client reconnect backoff to 5
* Update the Dockerfile
v3.1.0
------
* Changelog updates for v3.1.0
* Use CONF.admin.web\_port for single launch web admin
* Fixed sio namespace registration
* Update Dockerfile-dev to include uwsgi
* Fixed pep8
* change port to 8000
* replacement of flask-socketio with python-socketio
* Change how fetch-stats gets it's defaults
* Ensure fetch-stats ip is a string
* Add info logging for rpc server calls
* updated wsgi config default /config/aprsd.conf
* Added timing after each thread loop
* Update docker bin/admin.sh
* Removed flask-classful from webchat
* Remove flask pinning
* removed linux/arm/v8
* Update master build to include linux/arm/v8
* Update Dockerfile-dev to fix plugin permissions
* update manual build github
* Update requirements for upgraded cryptography
* Added more libs for Dockerfile-dev
* Replace Dockerfile-dev with python3 slim
* Moved logging to log for wsgi.py
* Changed weather plugin regex pattern
* Limit the float values to 3 decimal places
* Fixed rain numbers from aprslib
* Fixed rpc client initialization
* Fix in for aprslib issue #80
* Try and fix Dockerfile-dev
* Fixed pep8 errors
* Populate stats object with threads info
* added counts to the fetch-stats table
* Added the fetch-stats command
* Replace ratelimiter with rush
* Added some utilities to Dockerfile-dev
* add arm64 for manual github build
* Added manual master build
* Update master-build.yml
* Add github manual trigger for master build
* Fixed unit tests for Location plugin
* USe new tox and update githubworkflows
* Updated requirements
* force tox to 4.3.5
* Update github workflows
* Fixed pep8 violation
* Added rpc server for listen
* Update location plugin and reworked requirements
* Fixed .readthedocs.yaml format
* Add .readthedocs.yaml
* Example plugin wrong function
* Ensure conf is imported for threads/tx
* Update Dockerfile to help build cryptography
v3.0.3
------
* Update Changelog to 3.0.3
* cleanup some debug messages
* Fixed loading of plugins for server
* Don't load help plugin for listen command
* Added listen args
* Change listen command plugins
* Added listen.sh for docker
* Update Listen command
* Update Dockerfile
* Add ratelimiting for acks and other packets
v3.0.2
------
* Update Changelog for 3.0.2
* Import RejectPacket
v3.0.1
------
* 3.0.1
* Add support to Reject messages
* Update Docker builds for 3.0.0
v3.0.0
------
* Update Changelog for 3.0.0
* Ensure server command main thread doesn't exit
* Fixed save directory default
* Fixed pep8 failure
* Cleaned up KISS interfaces use of old config
* reworked usage of importlib.metadata
* Added new docs files for 3.0.0
* Removed url option from healthcheck in dev
* Updated Healthcheck to use rpc to call aprsd
* Updated docker/bin/run.sh to use new conf
* Added ObjectPacket
* Update regex processing and regex for plugins
* Change ordering of starting up of server command
* Update documentation and README
* Decouple admin web interface from server command
* Dockerfile now produces aprsd.conf
* Fix some unit tests and loading of CONF w/o file
* Added missing conf
* Removed references to old custom config
* Convert config to oslo\_config
* Added rain formatting unit tests to WeatherPacket
* Fix Rain reporting in WeatherPacket send
* Removed Packet.send()
* Removed watchlist plugins
* Fix PluginManager.get\_plugins
* Cleaned up PluginManager
* Cleaned up PluginManager
* Update routing for weatherpacket
* Fix some WeatherPacket formatting
* Fix pep8 violation
* Add packet filtering for aprsd listen
* Added WeatherPacket encoding
* Updated webchat and listen for queue based RX
* reworked collecting and reporting stats
* Removed unused threading code
* Change RX packet processing to enqueu
* Make tracking objectstores work w/o initializing
* Cleaned up packet transmit class attributes
* Fix packets timestamp to int
* More messaging -> packets cleanup
* Cleaned out all references to messaging
* Added contructing a GPSPacket for sending
* cleanup webchat
* Reworked all packet processing
* Updated plugins and plugin interfaces for Packet
* Started using dataclasses to describe packets
v2.6.1
------
* v2.6.1
* Fixed position report for webchat beacon
* Try and fix broken 32bit qemu builds on 64bit system
* Add unit tests for webchat
* remove armv7 build RUST sucks
* Fix for Collections change in 3.10
v2.6.0
------
* Update workflow again
* Update Dockerfile to 22.04
* Update Dockerfile and build.sh
* Update workflow
* Prep for 2.6.0 release
* Update requirements
* Removed Makefile comment
* Update Makefile for dev vs. run environments
* Added pyopenssl for https for webchat
* change from device-detector to user-agents
* Remove twine from dev-requirements
* Update to latest Makefile.venv
* Refactored threads a bit
* Mark packets as acked in MsgTracker
* remove dev setting for template
* Add GPS beacon to mobile page
* Allow werkzeug for admin interface
* Allow werkzeug for admin interface
* Add support for mobile browsers for webchat
* Ignore callsign case while processing packets
* remove linux/arm/v7 for official builds for now
* added workflow for building specific version
* Allow passing in version to the Dockerfile
* Send GPS Beacon from webchat interface
* specify Dockerfile-dev
* Fixed build.sh
* Build on the source not released aprsd
* Remove email validation
* Add support for building linux/arm/v7
* Remove python 3.7 from docker build github
* Fixed failing unit tests
* change github workflow
* Removed TimeOpenCageDataPlugin
* Dump config with aprsd dev test-plugin
* Updated requirements
* Got webchat working with KISS tcp
* Added click auto\_envvar\_prefix
* Update aprsd thread base class to use queue
* Update packets to use wrapt
* Add remving existing requirements
* Try sending raw APRSFrames to aioax25
* Use new aprsd.callsign as the main callsign
* Fixed access to threads refactor
* Added webchat command
* Moved log.py to logging
* Moved trace.py to utils
* Fixed pep8 errors
* Refactored threads.py
* Refactor utils to directory
* remove arm build for now
* Added rustc and cargo to Dockerfile
* remove linux/arm/v6 from docker platform build
* Only tag master build as master
* Remove docker build from test
* create master-build.yml
* Added container build action
* Update docs on using Docker
* Update dev-requirements pip-tools
* Fix typo in docker-compose.yml
* Fix PyPI scraping
* Allow web interface when running in Docker
* Fix typo on exception
* README formatting fixes
* Bump dependencies to fix python 3.10
* Fixed up config option checking for KISS
* Fix logging issue with log messages
* for 2.5.9
v2.5.9
------
* FIX: logging exceptions
* Updated build and run for rich lib
* update build for 2.5.8
v2.5.8
------
* For 2.5.8
* Removed debug code
* Updated list-plugins
* Renamed virtualenv dir to .aprsd-venv
* Added unit tests for dev test-plugin
* Send Message command defaults to config
v2.5.7
------
* Updated Changelog
* Fixed an KISS config disabled issue
* Fixed a bug with multiple notify plugins enabled
* Unify the logging to file and stdout
* Added new feature to list-plugins command
* more README.rst cleanup
* Updated README examples
v2.5.6
------
* Changelog
* Tightened up the packet logging
* Added unit tests for USWeatherPlugin, USMetarPlugin
* Added test\_location to test LocationPlugin
* Updated pytest output
* Added py39 to tox for tests
* Added NotifyPlugin unit tests and more
* Small cleanup on packet logging
* Reduced the APRSIS connection reset to 2 minutes
* Fixed the NotifyPlugin
* Fixed some pep8 errors
* Add tracing for dev command
* Added python rich library based logging
* Added LOG\_LEVEL env variable for the docker
v2.5.5
------
* Update requirements to use aprslib 0.7.0
* fixed the failure during loading for objectstore
* updated docker build
v2.5.4
------
* Updated Changelog
* Fixed dev command missing initialization
v2.5.3
------
* Fix admin logging tab
v2.5.2
------
* Added new list-plugins command
* Don't require check-version command to have a config
* Healthcheck command doesn't need the aprsd.yml config
* Fix test failures
* Removed requirement for aprs.fi key
* Updated Changelog
v2.5.1
------
* Removed stock plugin
* Removed the stock plugin
v2.5.0
------
* Updated for v2.5.0
* Updated Dockerfile's and build script for docker
* Cleaned up some verbose output & colorized output
* Reworked all the common arguments
* Fixed test-plugin
* Ensure common params are honored
* pep8
* Added healthcheck to the cmds
* Removed the need for FROMCALL in dev test-plugin
* Pep8 failures
* Refactor the cli
* Updated Changelog for 4.2.3
* Fixed a problem with send-message command
v2.4.2
------
* Updated Changelog
* Be more careful picking data to/from disk
* Updated Changelog
v2.4.1
------
* Ensure plugins are last to be loaded
* Fixed email connecting to smtp server
v2.4.0
------
* Updated Changelog for 2.4.0 release
* Converted MsgTrack to ObjectStoreMixin
* Fixed unit tests
* Make sure SeenList update has a from in packet
* Ensure PacketList is initialized
* Added SIGTERM to signal\_handler
* Enable configuring where to save the objectstore data
* PEP8 cleanup
* Added objectstore Mixin
* Added -num option to aprsd-dev test-plugin
* Only call stop\_threads if it exists
* Added new SeenList
* Added plugin version to stats reporting
* Added new HelpPlugin
* Updated aprsd-dev to use config for logfile format
* Updated build.sh
* removed usage of config.check\_config\_option
* Fixed send-message after config/client rework
* Fixed issue with flask config
* Added some server startup info logs
* Increase email delay to +10
* Updated dev to use plugin manager
* Fixed notify plugins
* Added new Config object
* Fixed email plugin's use of globals
* Refactored client classes
* Refactor utils usage
* 2.3.1 Changelog
v2.3.1
------
* Fixed issue of aprs-is missing keepalive
* Fixed packet processing issue with aprsd send-message
v2.3.0
------
* Prep 2.3.0
* Enable plugins to return message object
* Added enabled flag for every plugin object
* Ensure plugin threads are valid
* Updated Dockerfile to use v2.3.0
* Removed fixed size on logging queue
* Added Logfile tab in Admin ui
* Updated Makefile clean target
* Added self creating Makefile help target
* Update dev.py
* Allow passing in aprsis\_client
* Fixed a problem with the AVWX plugin not working
* Remove some noisy trace in email plugin
* Fixed issue at startup with notify plugin
* Fixed email validation
* Removed values from forms
* Added send-message to the main admin UI
* Updated requirements
* Cleaned up some pep8 failures
* Upgraded the send-message POC to use websockets
* New Admin ui send message page working
* Send Message via admin Web interface
* Updated Admin UI to show KISS connections
* Got TX/RX working with aioax25+direwolf over TCP
* Rebased from master
* Added the ability to use direwolf KISS socket
* Update Dockerfile to use 2.2.1
v2.2.1
------
* Update Changelog for 2.2.1
* Silence some log noise
v2.2.0
------
* Updated Changelog for v2.2.0
* Updated overview image
* Removed Black code style reference
* Removed TXThread
* Added days to uptime string formatting
* Updated select timeouts
* Rebase from master and run gray
* Added tracking plugin processing
* Added threads functions to APRSDPluginBase
* Refactor Message processing and MORE
* Use Gray instead of Black for code formatting
* Updated tox.ini
* Fixed LOG.debug issue in weather plugin
* Updated slack channel link
* Cleanup of the README.rst
* Fixed aprsd-dev
v2.1.0
------
* Prep for v2.1.0
* Enable multiple replies for plugins
* Put in a fix for aprslib parse exceptions
* Fixed time plugin
* Updated the charts Added the packets chart
* Added showing symbol images to watch list
v2.0.0
------
* Updated docs for 2.0.0
* Reworked the notification threads and admin ui
* Fixed small bug with packets get\_packet\_type
* Updated overview images
* Move version string output to top of log
* Add new watchlist feature
* Fixed the Ack thread not resending acks
* reworked the admin ui to use semenatic ui more
* Added messages count to admin messages list
* Add admin UI tabs for charts, messages, config
* Removed a noisy debug log
* Dump out the config during startup
* Added message counts for each plugin
* Bump urllib3 from 1.26.4 to 1.26.5
* Added aprsd version checking
* Updated INSTALL.txt
* Update my callsign
* Update README.rst
* Update README.rst
* Bump urllib3 from 1.26.3 to 1.26.4
* Prep for v1.6.1 release
v1.6.1
------
* Removed debug log for KeepAlive thread
* ignore Makefile.venv
* Reworked Makefile to use Makefile.venv
* Fixed version unit tests
* Updated stats output for KeepAlive thread
* Update Dockerfile-dev to work with startup
* Force all the graphs to 0 minimum
* Added email messages graphs
* Reworked the stats dict output and healthcheck
* Added callsign to the web index page
* Added log config for flask and lnav config file
* Added showing APRS-IS server to stats
* Provide an initial datapoint on rendering index
* Make the index page behind auth
* Bump pygments from 2.7.3 to 2.7.4
* Added acks with messages graphs
* Updated web stats index to show messages and ram usage
* Added aprsd web index page
* Bump lxml from 4.6.2 to 4.6.3
* Bump jinja2 from 2.11.2 to 2.11.3
* Bump urllib3 from 1.26.2 to 1.26.3
* Added log format and dateformat to config file
* Added Dockerfile-dev and updated build.sh
* Require python 3.7 and >
* Added plugin live reload and StockPlugin
* Updated Dockerfile and build.sh
* Updated Dockerfile for multiplatform builds
* Updated Dockerfile for multiplatform builds
* Dockerfile: Make creation of /config quiet failure
* Updated README docs
v1.6.0
------
* 1.6.0 release prep
* Updated path of run.sh for docker build
* Moved docker related stuffs to docker dir
* Removed some noisy debug log
* Bump cryptography from 3.3.1 to 3.3.2
* Wrap another server call with try except
* Wrap all imap calls with try except blocks
* Bump bleach from 3.2.1 to 3.3.0
* EmailThread was exiting because of IMAP timeout, added exceptions for this
* Added memory tracing in keeplive
* Fixed tox pep8 failure for trace
* Added tracing facility
* Fixed email login issue
* duplicate email messages from RF would generate usage response
* Enable debug logging for smtp and imap
* more debug around email thread
* debug around EmailThread hanging or vanishing
* Fixed resend email after config rework
* Added flask messages web UI and basic auth
* Fixed an issue with LocationPlugin
* Cleaned up the KeepAlive output
* updated .gitignore
* Added healthcheck app
* Add flask and flask\_classful reqs
* Added Flask web thread and stats collection
* First hack at flask
* Allow email to be disabled
* Reworked the config file and options
* Updated documentation and config output
* Fixed extracting lat/lon
* Added openweathermap weather plugin
* Added new time plugins
* Fixed TimePlugin timezone issue
* remove fortune white space
* fix git with install.txt
* change query char from ? to !
* Updated readme to include readthedocs link
* Added aprsd-dev plugin test cli and WxPlugin
v1.5.1
------
* Updated Changelog for v1.5.1
* Updated README to fix pypi page
* Update INSTALL.txt
v1.5.0
------
* Updated Changelog for v1.5.0 release
* Fix tox tests
* fix usage statement
* Enabled some emailthread messages and added timestamp
* Fixed main server client initialization
* test plugin expect responses update to match query output
* Fixed the queryPlugin unit test
* Removed flask code
* Changed default log level to INFO
* fix plugin tests to expect new strings
* fix query command syntax ?, ?3, ?d(elete), ?a(ll)
* Fixed latitude reporting in locationPlugin
* get rid of some debug noise from tracker and email delay
* fixed sample-config double print
* make sample config easier to interpret
* Fixed comments
* Added the ability to add comments to the config file
* Updated docker run.sh script
* Added --raw format for sending messages
* Fixed --quiet option
* Added send-message login checking and --no-ack
* Added new config for aprs.fi API Key
* Added a fix for failed logins to APRS-IS
* Fixed unit test for fortune plugin
* Fixed fortune plugin failures
* getting out of git hell with client.py problems
* Extend APRS.IS object to change login string
* Extend APRS.IS object to change login string
* expect different reply from query plugin
* update query plugin to resend last N messages. syntax: ?rN
* Added unit test for QueryPlugin
* Updated MsgTrack restart\_delayed
* refactor Plugin objects to plugins directory
* Updated README with more workflow details
* change query character syntax, don't reply that we're resending stuff
* Added APRSD system diagram to docs
* Disable MX record validation
* Added some more badges to readme files
* Updated build for docs tox -edocs
* switch command characters for query plugin
* Fix broken test
* undo git disaster
* swap Query command characters a bit
* Added Sphinx based documentation
* refactor Plugin objects to plugins directory
* Updated Makefile
* removed double-quote-string-fixer
* Lots of fixes
* Added more pre-commit hook tests
* Fixed email shortcut lookup
* Added Makefile for easy dev setup
* Added Makefile for easy dev setup
* Cleaned out old ack\_dict
* add null reply for send\_email
* Updated README with more workflow details
* backout my patch that broke tox, trying to push to craiger-test branch
* Fixed failures caused by last commit
* don't tell radio emails were sent, ack is enuf
* Updated README to include development env
* Added pre-commit hooks
* Update Changelog for v1.5.0
* Added QueryPlugin resend all delayed msgs or Flush
* Added QueryPlugin
* Added support to save/load MsgTrack on exit/start
* Creation of MsgTrack object and other stuff
* Added FortunePlugin unit test
* Added some plugin unit tests
* reworked threading
* Reworked messaging lib
v1.1.0
------
* Refactored the main process\_packet method
* Update README with version 1.1.0 related info
* Added fix for an unknown packet type
* Ensure fortune is installed
* Updated docker-compose
* Added Changelog
* Fixed issue when RX ack
* Updated the aprsd-slack-plugin required version
* Updated README.rst
* Fixed send-message with email command and others
* Update .gitignore
* Big patch
* Major refactor
* Updated the Dockerfile to use alpine
v1.0.1
------
* Fix unknown characterset emails
* Updated loggin timestamp to include []
* Updated README with a TOC
* Updates for building containers
* Don't use the dirname for the plugin path search
* Reworked Plugin loading
* Updated README with development information
* Fixed an issue with weather plugin
v1.0.0
------
* Rewrote the README.md to README.rst
* Fixed the usage string after plugins introduced
* Created plugin.py for Command Plugins
* Refactor networking and commands
* get rid of some debug statements
* yet another unicode problem, in resend\_email fixed
* reset default email check delay to 60, fix a few comments
* Update tox environment to fix formatting python errors
* fixed fortune. yet another unicode issue, tested in py3 and py2
* lose some logging statements
* completely off urllib now, tested locate/weather in py2 and py3
* add urllib import back until i replace all calls with requests
* cleaned up weather code after switch to requests ... from urllib. works on py2 and py3
* switch from urlib to requests for weather, tested in py3 and py2. still need to update locate, and all other http calls
* imap tags are unicode in py3. .decode tags
* Update INSTALL.txt
* Initial conversion to click
* Reconnect on socket timeout
* clean up code around closed\_socket and reconnect
* Update INSTALL.txt
* Fixed all pep8 errors and some py3 errors
* fix check\_email\_thread to do proper threading, take delay as arg
* found another .decode that didn't include errors='ignore'
* some failed attempts at getting the first txt or html from a multipart message, currently sends the last
* fix parse\_email unicode probs by using body.decode(errors='ignore').. again
* fix parse\_email unicode probs by using body.decode(errors='ignore')
* clean up code around closed\_socket and reconnect
* socket timeout 5 minutes
* Detect closed socket, reconnect, with a bit more grace
* can detect closed socket and reconnect now
* Update INSTALL.txt
* more debugging messages trying to find rare tight loop in main
* Update INSTALL.txt
* main loop went into tight loop, more debug prints
* main loop went into tight loop, added debug print before every continue
* Update INSTALL.txt
* Update INSTALL.txt
* George Carlin profanity filter
* added decaying email check timer which resets with activity
* Fixed all pep8 errors and some py3 errors
* Fixed all pep8 errors and some py3 errors
* Reconnect on socket timeout
* socket reconnect on timeout testing
* socket timeout of 300 instead of 60
* Reconnect on socket timeout
* socket reconnect on timeout testing
* Fixed all pep8 errors and some py3 errors
* fix check\_email\_thread to do proper threading, take delay as arg
* INSTALL.txt for the average person
* fix bugs after beautification and yaml config additions. Convert to sockets. case insensitive commands
* fix INBOX
* Update README.md
* Added tox support
* Fixed SMTP settings
* Created fake\_aprs.py
* select inbox if gmail server
* removed ASS
* Added a try block around imap login
* Added port and fixed telnet user
* Require ~/.aprsd/config.yml
* updated README for install and usage instructions
* added test to ensure shortcuts in config.yml
* added exit if missing config file
* Added reading of a config file
* update readme
* update readme
* sanitize readme
* readme again again
* readme again again
* readme again
* readme
* readme update
* First stab at migrating this to a pytpi repo
* First stab at migrating this to a pytpi repo
* Added password, callsign and host
* Added argparse for cli options
* comments
* Cleaned up trailing whitespace
* add tweaked fuzzyclock
* make tn a global
* Added standard python main()
* tweaks to readme
* drop virtenv on first line
* sanitize readme a bit more
* sanitize readme a bit more
* sanitize readme
* added weather and location 3
* added weather and location 2
* added weather and location
* mapme
* de-localize
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* de-localize
* Update README.md
* Update README.md
* Update aprsd.py
* Add files via upload
* Update README.md
* Update aprsd.py
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Update README.md
* Add files via upload
* Initial commit

File diff suppressed because it is too large Load Diff

View File

@ -27,10 +27,9 @@ pip install -e .
# CONFIGURE
# Now configure aprsd HERE
mkdir -p ~/.config/aprsd
./aprsd sample-config > ~/.config/aprsd/aprsd.conf # generates a config template
./aprsd sample-config # generates a config.yml template
vi ~/.config/aprsd/aprsd.conf # copy/edit config here
vi ~/.config/aprsd/config.yml # copy/edit config here
aprsd server

View File

@ -1,5 +1,5 @@
WORKDIR?=.
VENVDIR ?= $(WORKDIR)/.venv
VENVDIR ?= $(WORKDIR)/.aprsd-venv
.DEFAULT_GOAL := help
@ -17,33 +17,29 @@ Makefile.venv:
help: # Help for the Makefile
@egrep -h '\s##\s' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
dev: REQUIREMENTS_TXT = requirements.txt requirements-dev.txt
dev: REQUIREMENTS_TXT = requirements.txt dev-requirements.txt
dev: venv ## Create a python virtual environment for development of aprsd
run: venv ## Create a virtual environment for running aprsd commands
changelog: dev
npm i -g auto-changelog
auto-changelog -l false --sort-commits date -o ChangeLog.md
docs: changelog
m2r --overwrite ChangeLog.md
docs: dev
cp README.rst docs/readme.rst
mv ChangeLog.rst docs/changelog.rst
cp Changelog docs/changelog.rst
tox -edocs
clean: clean-dev clean-test clean-build clean-pyc ## remove all build, test, coverage and Python artifacts
clean: clean-build clean-pyc clean-test clean-dev ## remove all build, test, coverage and Python artifacts
clean-build: ## remove build artifacts
rm -fr build/
rm -fr dist/
rm -fr .eggs/
find . -name '*.egg-info' -exec rm -fr {} +
find . -name '*.egg' -exec rm -fr {} +
find . -name '*.egg' -exec rm -f {} +
clean-pyc: ## remove Python file artifacts
find . -name '*.pyc' -exec rm -fr {} +
find . -name '*.pyo' -exec rm -fr {} +
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test: ## remove test and coverage artifacts
@ -59,9 +55,9 @@ clean-dev:
test: dev ## Run all the tox tests
tox -p all
build: test changelog ## Make the build artifact prior to doing an upload
build: test ## Make the build artifact prior to doing an upload
$(VENV)/pip install twine
$(VENV)/python3 -m build
$(VENV)/python3 setup.py sdist bdist_wheel
$(VENV)/twine check dist/*
upload: build ## Upload a new version of the plugin
@ -85,8 +81,8 @@ docker-dev: test ## Make a development docker container tagged with hemna6969/a
update-requirements: dev ## Update the requirements.txt and dev-requirements.txt files
rm requirements.txt
rm requirements-dev.txt
rm dev-requirements.txt
touch requirements.txt
touch requirements-dev.txt
$(VENV)/pip-compile --resolver backtracking --annotation-style=line requirements.in
$(VENV)/pip-compile --resolver backtracking --annotation-style=line requirements-dev.in
touch dev-requirements.txt
$(VENV)/pip-compile --resolver backtracking --annotation-style line requirements.in
$(VENV)/pip-compile --resolver backtracking --annotation-style line dev-requirements.in

454
README.md
View File

@ -1,454 +0,0 @@
# APRSD - Ham radio APRS-IS Message platform software
## KM6LYW and WB4BOR
[![pypi](https://badge.fury.io/py/aprsd.svg)](https://badge.fury.io/py/aprsd)
[![versions](https://img.shields.io/pypi/pyversions/aprsd.svg)](https://pypi.org/pypi/aprsd)
[![slack](https://img.shields.io/badge/slack-@hemna/aprsd-blue.svg?logo=slack)](https://hemna.slack.com/app_redirect?channel=C01KQSCP5RP)
![issues](https://img.shields.io/github/issues/craigerl/aprsd)
![commit](https://img.shields.io/github/last-commit/craigerl/aprsd)
[![imports](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://timothycrosley.github.io/isort/)
[![down](https://static.pepy.tech/personalized-badge/aprsd?period=month&units=international_system&left_color=black&right_color=orange&left_text=Downloads)](https://pepy.tech/project/aprsd)
[APRSD](http://github.com/craigerl/aprsd) is a Ham radio
[APRS](http://aprs.org) message platform built with python.
![image](./aprsd_logo.png)
# Table of Contents
1. [APRSD - Ham radio APRS-IS Message platform software](#aprsd---ham-radio-aprs-is-message-platform-software)
2. [What is APRSD](#what-is-aprsd)
3. [APRSD Plugins/Extensions](#aprsd-pluginsextensions)
4. [List of existing plugins - APRS Message processing/responders](#list-of-existing-plugins---aprs-message-processingresponders)
5. [List of existing extensions - Add new capabilities to APRSD](#list-of-existing-extensions---add-new-capabilities-to-aprsd)
6. [APRSD Overview Diagram](#aprsd-overview-diagram)
7. [Typical use case](#typical-use-case)
8. [Installation](#installation)
9. [Example usage](#example-usage)
10. [Help](#help)
11. [Commands](#commands)
12. [Configuration](#configuration)
13. [server](#server)
14. [Current list plugins](#current-list-plugins)
15. [Current list extensions](#current-list-extensions)
16. [send-message](#send-message)
17. [Development](#development)
18. [Release](#release)
19. [Building your own APRSD plugins](#building-your-own-aprsd-plugins)
20. [Overview](#overview)
21. [Docker Container](#docker-container)
22. [Building](#building)
23. [Official Build](#official-build)
24. [Development Build](#development-build)
25. [Running the container](#running-the-container)
26. [Activity](#activity)
27. [Star History](#star-history)
---
> [!WARNING]
> Legal operation of this software requires an amateur radio license and a valid call sign.
> [!NOTE]
> Star this repo to follow our progress! This code is under active development, and contributions are both welcomed and appreciated. See [CONTRIBUTING.md](<https://github.com/craigerl/aprsd/blob/master/CONTRIBUTING.md>) for details.
### What is APRSD
APRSD is a python application for interacting with the APRS network and Ham radios with KISS interfaces and
providing APRS services for HAM radio operators.
APRSD currently has 4 main commands to use.
- server - Connect to APRS and listen/respond to APRS messages
- send-message - Send a message to a callsign via APRS_IS.
- listen - Listen to packets on the APRS-IS Network based on FILTER.
- check-version - check the version of aprsd
- sample-config - generate a sample config file
- dev - helpful for testing new aprsd plugins under development
- dump-stats - output the stats of a running aprsd server command
- list-plugins - list the built in plugins, available plugins on pypi.org and installed plugins
- list-extensions - list the available extensions on pypi.org and installed extensions
Each of those commands can connect to the APRS-IS network if internet
connectivity is available. If internet is not available, then APRS can
be configured to talk to a TCP KISS TNC for radio connectivity directly.
Please [read the docs](https://aprsd.readthedocs.io) to learn more!
### APRSD Plugins/Extensions
APRSD Has the ability to add plugins and extensions. Plugins add new message filters that can look for specific messages and respond. For example, the aprsd-email-plugin adds the ability to send/recieve email to/from an APRS callsign. Extensions add new unique capabilities to APRSD itself. For example the aprsd-admin-extension adds a web interface command that shows the running status of the aprsd server command. aprsd-webchat-extension is a new web based APRS 'chat' command.
You can see the [available plugins/extensions on pypi here:](https://pypi.org/search/?q=aprsd) [https://pypi.org/search/?q=aprsd](https://pypi.org/search/?q=aprsd)
> [!NOTE]
> aprsd admin and webchat commands have been extracted into separate extensions.
* [See admin extension here](https://github.com/hemna/aprsd-admin-extension) <div id="admin logo" align="left"><img src="https://raw.githubusercontent.com/hemna/aprsd-admin-extension/refs/heads/master/screenshot.png" alt="Web Admin" width="340"/></div>
* [See webchat extension here](https://github.com/hemna/aprsd-webchat-extension) <div id="webchat logo" align="left"><img src="https://raw.githubusercontent.com/hemna/aprsd-webchat-extension/master/screenshot.png" alt="Webchat" width="340"/></div>
### List of existing plugins - APRS Message processing/responders
- [aprsd-email-plugin](https://github.com/hemna/aprsd-email-plugin) - send/receive email!
- [aprsd-location-plugin](https://github.com/hemna/aprsd-location-plugin) - get latest GPS location.
- [aprsd-locationdata-plugin](https://github.com/hemna/aprsd-locationdata-plugin) - get latest GPS location
- [aprsd-digipi-plugin](https://github.com/hemna/aprsd-digipi-plugin) - Look for digipi beacon packets
- [aprsd-w3w-plugin](https://github.com/hemna/aprsd-w3w-plugin) - get your w3w coordinates
- [aprsd-mqtt-plugin](https://github.com/hemna/aprsd-mqtt-plugin) - send aprs packets to an MQTT topic
- [aprsd-telegram-plugin](https://github.com/hemna/aprsd-telegram-plugin) - send/receive messages to telegram
- [aprsd-borat-plugin](https://github.com/hemna/aprsd-borat-plugin) - get Borat quotes
- [aprsd-wxnow-plugin](https://github.com/hemna/aprsd-wxnow-plugin) - get closest N weather station reports
- [aprsd-weewx-plugin](https://github.com/hemna/aprsd-weewx-plugin) - get weather from your weewx weather station
- [aprsd-slack-plugin](https://github.com/hemna/aprsd-slack-plugin) - send/receive messages to a slack channel
- [aprsd-sentry-plugin](https://github.com/hemna/aprsd-sentry-plugin) -
- [aprsd-repeat-plugins](https://github.com/hemna/aprsd-repeat-plugins) - plugins for the REPEAT service. Get nearest Ham radio repeaters!
- [aprsd-twitter-plugin](https://github.com/hemna/aprsd-twitter-plugin) - make tweets from your Ham Radio!
- [aprsd-timeopencage-plugin](https://github.com/hemna/aprsd-timeopencage-plugin) - Get local time for a callsign
- [aprsd-stock-plugin](https://github.com/hemna/aprsd-stock-plugin) - get stock quotes from your Ham radio
### List of existing extensions - Add new capabilities to APRSD
- [aprsd-admin-extension](https://github.com/hemna/aprsd-admin-extension) - Web Administration page for APRSD
- [aprsd-webchat-extension](https://github.com/hemna/aprsd-webchat-extension) - Web page for APRS Messaging
- [aprsd-irc-extension](https://github.com/hemna/aprsd-irc-extension) - an IRC like server command for APRS
### APRSD Overview Diagram
![image](https://raw.githubusercontent.com/craigerl/aprsd/master/docs/_static/aprsd_overview.svg?sanitize=true)
### Typical use case
APRSD\'s typical use case is that of providing an APRS wide service to
all HAM radio operators. For example the callsign \'REPEAT\' on the APRS
network is actually an instance of APRSD that can provide a list of HAM
repeaters in the area of the callsign that sent the message.
Ham radio operator using an APRS enabled HAM radio sends a message to
check the weather. An APRS message is sent, and then picked up by APRSD.
The APRS packet is decoded, and the message is sent through the list of
plugins for processing. For example, the WeatherPlugin picks up the
message, fetches the weather for the area around the user who sent the
request, and then responds with the weather conditions in that area.
Also includes a watch list of HAM callsigns to look out for. The watch
list can notify you when a HAM callsign in the list is seen and now
available to message on the APRS network.
### Installation
To install `aprsd`, use Pip:
`pip install aprsd`
### Example usage
`aprsd -h`
### Help
:
└─> aprsd -h
Usage: aprsd [OPTIONS] COMMAND [ARGS]...
Options:
--version Show the version and exit.
-h, --help Show this message and exit.
Commands:
check-version Check this version against the latest in pypi.org.
completion Show the shell completion code
dev Development type subcommands
fetch-stats Fetch stats from a APRSD admin web interface.
healthcheck Check the health of the running aprsd server.
list-extensions List the built in plugins available to APRSD.
list-plugins List the built in plugins available to APRSD.
listen Listen to packets on the APRS-IS Network based on FILTER.
sample-config Generate a sample Config file from aprsd and all...
send-message Send a message to a callsign via APRS_IS.
server Start the aprsd server gateway process.
version Show the APRSD version.
### Commands
### Configuration
This command outputs a sample config yml formatted block that you can
edit and use to pass in to `aprsd` with `-c`. By default aprsd looks in
`~/.config/aprsd/aprsd.yml`
`aprsd sample-config`
└─> aprsd sample-config
...
### server
This is the main server command that will listen to APRS-IS servers and
look for incomming commands to the callsign configured in the config
file
└─[$] > aprsd server --help
Usage: aprsd server [OPTIONS]
Start the aprsd server gateway process.
Options:
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
The log level to use for aprsd.log
[default: INFO]
-c, --config TEXT The aprsd config file to use for options.
[default:
/Users/i530566/.config/aprsd/aprsd.yml]
--quiet Don't log to stdout
-f, --flush Flush out all old aged messages on disk.
[default: False]
-h, --help Show this message and exit.
└─> aprsd server
Registering LogMonitorThread
2025-01-06 16:27:12.398 | MainThread | INFO | APRSD is up to date | aprsd.cmds.server:server:82
2025-01-06 16:27:12.398 | MainThread | INFO | APRSD Started version: 3.5.1.dev0+g72d068c.d20250102 | aprsd.cmds.server:server:83
2025-01-06 16:27:12.398 | MainThread | INFO | Creating client connection | aprsd.cmds.server:server:101
2025-01-06 16:27:12.398 | MainThread | INFO | Creating aprslib client(noam.aprs2.net:14580) and logging in WB4BOR-1. | aprsd.client.aprsis:setup_connection:136
2025-01-06 16:27:12.398 | MainThread | INFO | Attempting connection to noam.aprs2.net:14580 | aprslib.inet:_connect:226
2025-01-06 16:27:12.473 | MainThread | INFO | Connected to ('44.135.208.225', 14580) | aprslib.inet:_connect:233
2025-01-06 16:27:12.617 | MainThread | INFO | Login successful | aprsd.client.drivers.aprsis:_send_login:154
2025-01-06 16:27:12.618 | MainThread | INFO | Connected to T2BC | aprsd.client.drivers.aprsis:_send_login:156
2025-01-06 16:27:12.618 | MainThread | INFO | <aprsd.client.aprsis.APRSISClient object at 0x103a36480> | aprsd.cmds.server:server:103
2025-01-06 16:27:12.618 | MainThread | INFO | Loading Plugin Manager and registering plugins | aprsd.cmds.server:server:117
2025-01-06 16:27:12.619 | MainThread | INFO | Loading APRSD Plugins | aprsd.plugin:setup_plugins:492
#### Current list plugins
└─> aprsd list-plugins
🐍 APRSD Built-in Plugins 🐍
┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Plugin Name ┃ Info ┃ Type ┃ Plugin Path ┃
┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ AVWXWeatherPlugin │ AVWX weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.AVWXWeatherPlugin │
│ FortunePlugin │ Give me a fortune │ RegexCommand │ aprsd.plugins.fortune.FortunePlugin │
│ NotifySeenPlugin │ Notify me when a CALLSIGN is recently seen on APRS-IS │ WatchList │ aprsd.plugins.notify.NotifySeenPlugin │
│ OWMWeatherPlugin │ OpenWeatherMap weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.OWMWeatherPlugin │
│ PingPlugin │ reply with a Pong! │ RegexCommand │ aprsd.plugins.ping.PingPlugin │
│ TimeOWMPlugin │ Current time of GPS beacon's timezone. Uses OpenWeatherMap │ RegexCommand │ aprsd.plugins.time.TimeOWMPlugin │
│ TimePlugin │ What is the current local time. │ RegexCommand │ aprsd.plugins.time.TimePlugin │
│ USMetarPlugin │ USA only METAR of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USMetarPlugin │
│ USWeatherPlugin │ Provide USA only weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USWeatherPlugin │
│ VersionPlugin │ What is the APRSD Version │ RegexCommand │ aprsd.plugins.version.VersionPlugin │
└───────────────────┴────────────────────────────────────────────────────────────┴──────────────┴─────────────────────────────────────────┘
Pypi.org APRSD Installable Plugin Packages
Install any of the following plugins with
'pip install <Plugin Package Name>'
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
┃ Plugin Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
│ 📂 aprsd-assistant-plugin │ APRSd plugin for hosting the APRS Assistant chatbot │ 0.0.3 │ 2024-10-20T02:59:39 │ No │
│ │ (aprs-assistant) │ │ │ │
│ 📂 aprsd-borat-plugin │ Borat quotes for aprsd plugin │ 0.1.1.dev1 │ 2024-01-19T16:04:38 │ No │
│ 📂 aprsd-locationdata-plugin │ Fetch location information from a callsign │ 0.3.0 │ 2024-02-06T17:20:43 │ No │
│ 📂 aprsd-mqtt-plugin │ APRSD MQTT Plugin sends APRS packets to mqtt queue │ 0.2.0 │ 2023-04-17T16:01:50 │ No │
│ 📂 aprsd-repeat-plugins │ APRSD Plugins for the REPEAT service │ 1.2.0 │ 2023-01-10T17:15:36 │ No │
│ 📂 aprsd-sentry-plugin │ Ham radio APRSD plugin that does.... │ 0.1.2 │ 2022-12-02T19:07:33 │ No │
│ 📂 aprsd-slack-plugin │ Amateur radio APRS daemon which listens for messages and │ 1.2.0 │ 2023-01-10T19:21:33 │ No │
│ │ responds │ │ │ │
│ 📂 aprsd-stock-plugin │ Ham Radio APRSD Plugin for fetching stock quotes │ 0.1.3 │ 2022-12-02T18:56:19 │ Yes │
│ 📂 aprsd-telegram-plugin │ Ham Radio APRS APRSD plugin for Telegram IM service │ 0.1.3 │ 2022-12-02T19:07:15 │ No │
│ 📂 aprsd-timeopencage-plugin │ APRSD plugin for fetching time based on GPS location │ 0.2.0 │ 2023-01-10T17:07:11 │ No │
│ 📂 aprsd-twitter-plugin │ Python APRSD plugin to send tweets │ 0.5.0 │ 2023-01-10T16:51:47 │ No │
│ 📂 aprsd-weewx-plugin │ HAM Radio APRSD that reports weather from a weewx weather │ 0.3.2 │ 2023-04-20T20:16:19 │ No │
│ │ station. │ │ │ │
│ 📂 aprsd-wxnow-plugin │ APRSD Plugin for getting the closest wx reports to last │ 0.2.0 │ 2023-10-08T01:27:29 │ Yes │
│ │ beacon │ │ │ │
└──────────────────────────────┴──────────────────────────────────────────────────────────────┴────────────┴─────────────────────┴────────────┘
🐍 APRSD Installed 3rd party Plugins 🐍
┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Package Name ┃ Plugin Name ┃ Version ┃ Type ┃ Plugin Path ┃
┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ aprsd-stock-plugin │ YahooStockQuote │ 0.1.3 │ RegexCommand │ aprsd_stock_plugin.stock.YahooStockQuote │
│ aprsd-wxnow-plugin │ WXNowPlugin │ 0.2.0 │ RegexCommand │ aprsd_wxnow_plugin.conf.opts.WXNowPlugin │
└────────────────────┴─────────────────┴─────────┴──────────────┴──────────────────────────────────────────┘
#### Current list extensions
└─> aprsd list-extensions
Pypi.org APRSD Installable Extension Packages
Install any of the following extensions by running
'pip install <Plugin Package Name>'
┏━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
┃ Extension Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
│ 📂 aprsd-admin-extension │ Administration extension for the Ham radio APRSD Server │ 1.0.1 │ 2025-01-06T21:57:24 │ Yes │
│ 📂 aprsd-irc-extension │ An Extension to Ham radio APRSD Daemon to act like an irc server │ 0.0.5 │ 2024-04-09T11:28:47 │ No │
│ │ for APRS │ │ │ │
└──────────────────────────┴─────────────────────────────────────────────────────────────────────┴─────────┴─────────────────────┴────────────┘
### send-message
This command is typically used for development to send another aprsd
instance test messages
└─[$] > aprsd send-message -h
Usage: aprsd send-message [OPTIONS] TOCALLSIGN COMMAND...
Send a message to a callsign via APRS_IS.
Options:
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
The log level to use for aprsd.log
[default: INFO]
-c, --config TEXT The aprsd config file to use for options.
[default:
/Users/i530566/.config/aprsd/aprsd.yml]
--quiet Don't log to stdout
--aprs-login TEXT What callsign to send the message from.
[env var: APRS_LOGIN]
--aprs-password TEXT the APRS-IS password for APRS_LOGIN [env
var: APRS_PASSWORD]
-n, --no-ack Don't wait for an ack, just sent it to APRS-
IS and bail. [default: False]
-w, --wait-response Wait for a response to the message?
[default: False]
--raw TEXT Send a raw message. Implies --no-ack
-h, --help Show this message and exit.
### Development
- `git clone git@github.com:craigerl/aprsd.git`
- `cd aprsd`
- `make`
#### Workflow
While working aprsd, The workflow is as follows:
- Checkout a new branch to work on by running
`git checkout -b mybranch`
- Make your changes to the code
- Run Tox with the following options:
- `tox -epep8`
- `tox -efmt`
- `tox -p`
- Commit your changes. This will run the pre-commit hooks which does
checks too
`git commit`
- Once you are done with all of your commits, then push up the branch
to github with:
`git push -u origin mybranch`
- Create a pull request from your branch so github tests can run and
we can do a code review.
#### Release
To do release to pypi:
- Tag release with:
`git tag -v1.XX -m "New release"`
- Push release tag:
`git push origin master --tags`
- Do a test build and verify build is valid by running:
`make build`
- Once twine is happy, upload release to pypi:
`make upload`
#### Building your own APRSD plugins
APRSD plugins are the mechanism by which APRSD can respond to APRS
Messages. The plugins are loaded at server startup and can also be
loaded at listen startup. When a packet is received by APRSD, it is
passed to each of the plugins in the order they were registered in the
config file. The plugins can then decide what to do with the packet.
When a plugin is called, it is passed a APRSD Packet object. The plugin
can then do something with the packet and return a reply message if
desired. If a plugin does not want to reply to the packet, it can just
return None. When a plugin does return a reply message, APRSD will send
the reply message to the appropriate destination.
For example, when a \'ping\' message is received, the PingPlugin will
return a reply message of \'pong\'. When APRSD receives the \'pong\'
message, it will be sent back to the original caller of the ping
message.
APRSD plugins are simply python packages that can be installed from
pypi.org. They are installed into the aprsd virtualenv and can be
imported by APRSD at runtime. The plugins are registered in the config
file and loaded at startup of the aprsd server command or the aprsd
listen command.
#### Overview
You can build your own plugins by following the instructions in the
[Building your own APRSD plugins](#building-your-own-aprsd-plugins)
section.
Plugins are called by APRSD when packe
### Docker Container
### Building
There are 2 versions of the container Dockerfile that can be used. The
main Dockerfile, which is for building the official release container
based off of the pip install version of aprsd and the Dockerfile-dev,
which is used for building a container based off of a git branch of the
repo.
### Official Build
`docker build -t hemna6969/aprsd:latest .`
### Development Build
`docker build -t hemna6969/aprsd:latest -f Dockerfile-dev .`
### Running the container
There is a `docker-compose.yml` file in the `docker/` directory that can
be used to run your container. To provide the container an `aprsd.conf`
configuration file, change your `docker-compose.yml` as shown below:
volumes:
- $HOME/.config/aprsd:/config
To install plugins at container start time, pass in a list of
comma-separated list of plugins on PyPI using the `APRSD_PLUGINS`
environment variable in the `docker-compose.yml` file. Note that version
constraints may also be provided. For example:
environment:
- APRSD_PLUGINS=aprsd-slack-plugin>=1.0.2,aprsd-twitter-plugin
### Activity
![Alt](https://repobeats.axiom.co/api/embed/8b96657861770a15f0b851a5eebafb34d0e0b3d3.svg "Repobeats analytics image")
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=craigerl/aprsd&type=Date)](https://star-history.com/#craigerl/aprsd&Date)

437
README.rst Normal file
View File

@ -0,0 +1,437 @@
===============================================
APRSD - Ham radio APRS-IS Message plugin server
===============================================
KM6LYW and WB4BOR
____________________
|pypi| |pytest| |versions| |slack| |issues| |commit| |imports| |down|
`APRSD <http://github.com/craigerl/aprsd>`_ is a Ham radio `APRS <http://aprs.org>`_ message command gateway built on python.
APRSD listens on amateur radio aprs-is network for messages and respond to them.
It has a plugin architecture for extensibility. Users of APRSD can write their own
plugins that can respond to APRS-IS messages.
You must have an amateur radio callsign to use this software. APRSD gets
messages for the configured HAM callsign, and sends those messages to a
list of plugins for processing. There are a set of core plugins that
provide responding to messages to check email, get location, ping,
time of day, get weather, and fortune telling as well as version information
of aprsd itself.
Please `read the docs`_ to learn more!
.. contents:: :local:
APRSD Overview Diagram
======================
.. image:: https://raw.githubusercontent.com/craigerl/aprsd/master/docs/_static/aprsd_overview.svg?sanitize=true
Typical use case
================
Ham radio operator using an APRS enabled HAM radio sends a message to check
the weather. An APRS message is sent, and then picked up by APRSD. The
APRS packet is decoded, and the message is sent through the list of plugins
for processing. For example, the WeatherPlugin picks up the message, fetches the weather
for the area around the user who sent the request, and then responds with
the weather conditions in that area. Also includes a watch list of HAM
callsigns to look out for. The watch list can notify you when a HAM callsign
in the list is seen and now available to message on the APRS network.
Current list of built-in plugins
======================================
::
└─> aprsd list-plugins
🐍 APRSD Built-in Plugins 🐍
┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Plugin Name ┃ Info ┃ Type ┃ Plugin Path ┃
┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ AVWXWeatherPlugin │ AVWX weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.AVWXWeatherPlugin │
│ EmailPlugin │ Send and Receive email │ RegexCommand │ aprsd.plugins.email.EmailPlugin │
│ FortunePlugin │ Give me a fortune │ RegexCommand │ aprsd.plugins.fortune.FortunePlugin │
│ LocationPlugin │ Where in the world is a CALLSIGN's last GPS beacon? │ RegexCommand │ aprsd.plugins.location.LocationPlugin │
│ NotifySeenPlugin │ Notify me when a CALLSIGN is recently seen on APRS-IS │ WatchList │ aprsd.plugins.notify.NotifySeenPlugin │
│ OWMWeatherPlugin │ OpenWeatherMap weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.OWMWeatherPlugin │
│ PingPlugin │ reply with a Pong! │ RegexCommand │ aprsd.plugins.ping.PingPlugin │
│ QueryPlugin │ APRSD Owner command to query messages in the MsgTrack │ RegexCommand │ aprsd.plugins.query.QueryPlugin │
│ TimeOWMPlugin │ Current time of GPS beacon's timezone. Uses OpenWeatherMap │ RegexCommand │ aprsd.plugins.time.TimeOWMPlugin │
│ TimePlugin │ What is the current local time. │ RegexCommand │ aprsd.plugins.time.TimePlugin │
│ USMetarPlugin │ USA only METAR of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USMetarPlugin │
│ USWeatherPlugin │ Provide USA only weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USWeatherPlugin │
│ VersionPlugin │ What is the APRSD Version │ RegexCommand │ aprsd.plugins.version.VersionPlugin │
└───────────────────┴────────────────────────────────────────────────────────────┴──────────────┴─────────────────────────────────────────┘
Pypi.org APRSD Installable Plugin Packages
Install any of the following plugins with 'pip install <Plugin Package Name>'
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
┃ Plugin Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
│ 📂 aprsd-stock-plugin │ Ham Radio APRSD Plugin for fetching stock quotes │ 0.1.3 │ Dec 2, 2022 │ No │
│ 📂 aprsd-sentry-plugin │ Ham radio APRSD plugin that does.... │ 0.1.2 │ Dec 2, 2022 │ No │
│ 📂 aprsd-timeopencage-plugin │ APRSD plugin for fetching time based on GPS location │ 0.1.0 │ Dec 2, 2022 │ No │
│ 📂 aprsd-weewx-plugin │ HAM Radio APRSD that reports weather from a weewx weather station. │ 0.1.4 │ Dec 7, 2021 │ Yes │
│ 📂 aprsd-repeat-plugins │ APRSD Plugins for the REPEAT service │ 1.0.12 │ Dec 2, 2022 │ No │
│ 📂 aprsd-telegram-plugin │ Ham Radio APRS APRSD plugin for Telegram IM service │ 0.1.3 │ Dec 2, 2022 │ No │
│ 📂 aprsd-twitter-plugin │ Python APRSD plugin to send tweets │ 0.3.0 │ Dec 7, 2021 │ No │
│ 📂 aprsd-slack-plugin │ Amateur radio APRS daemon which listens for messages and responds │ 1.0.5 │ Dec 18, 2022 │ No │
└──────────────────────────────┴────────────────────────────────────────────────────────────────────┴─────────┴──────────────┴────────────┘
🐍 APRSD Installed 3rd party Plugins 🐍
┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Package Name ┃ Plugin Name ┃ Version ┃ Type ┃ Plugin Path ┃
┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ aprsd-weewx-plugin │ WeewxMQTTPlugin │ 1.0 │ RegexCommand │ aprsd_weewx_plugin.weewx.WeewxMQTTPlugin │
└────────────────────┴─────────────────┴─────────┴──────────────┴──────────────────────────────────────────┘
Installation
=============
To install ``aprsd``, use Pip:
``pip install aprsd``
Example usage
==============
``aprsd -h``
Help
====
::
└─> aprsd -h
Usage: aprsd [OPTIONS] COMMAND [ARGS]...
Options:
--version Show the version and exit.
-h, --help Show this message and exit.
Commands:
check-version Check this version against the latest in pypi.org.
completion Click Completion subcommands
dev Development type subcommands
healthcheck Check the health of the running aprsd server.
list-plugins List the built in plugins available to APRSD.
listen Listen to packets on the APRS-IS Network based on FILTER.
sample-config Generate a sample Config file from aprsd and all...
send-message Send a message to a callsign via APRS_IS.
server Start the aprsd server gateway process.
version Show the APRSD version.
webchat Web based HAM Radio chat program!
Commands
========
Configuration
=============
This command outputs a sample config yml formatted block that you can edit
and use to pass in to ``aprsd`` with ``-c``. By default aprsd looks in ``~/.config/aprsd/aprsd.yml``
``aprsd sample-config``
::
└─> aprsd sample-config
...
server
======
This is the main server command that will listen to APRS-IS servers and
look for incomming commands to the callsign configured in the config file
::
└─[$] > aprsd server --help
Usage: aprsd server [OPTIONS]
Start the aprsd server gateway process.
Options:
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
The log level to use for aprsd.log
[default: INFO]
-c, --config TEXT The aprsd config file to use for options.
[default:
/Users/i530566/.config/aprsd/aprsd.yml]
--quiet Don't log to stdout
-f, --flush Flush out all old aged messages on disk.
[default: False]
-h, --help Show this message and exit.
└─> aprsd server
Load config
12/07/2021 03:16:17 PM MainThread INFO APRSD is up to date server.py:51
12/07/2021 03:16:17 PM MainThread INFO APRSD Started version: 2.5.6 server.py:52
12/07/2021 03:16:17 PM MainThread INFO Using CONFIG values: server.py:55
12/07/2021 03:16:17 PM MainThread INFO ham.callsign = WB4BOR server.py:60
12/07/2021 03:16:17 PM MainThread INFO aprs.login = WB4BOR-12 server.py:60
12/07/2021 03:16:17 PM MainThread INFO aprs.password = XXXXXXXXXXXXXXXXXXX server.py:58
12/07/2021 03:16:17 PM MainThread INFO aprs.host = noam.aprs2.net server.py:60
12/07/2021 03:16:17 PM MainThread INFO aprs.port = 14580 server.py:60
12/07/2021 03:16:17 PM MainThread INFO aprs.logfile = /tmp/aprsd.log server.py:60
send-message
============
This command is typically used for development to send another aprsd instance
test messages
::
└─[$] > aprsd send-message -h
Usage: aprsd send-message [OPTIONS] TOCALLSIGN COMMAND...
Send a message to a callsign via APRS_IS.
Options:
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
The log level to use for aprsd.log
[default: INFO]
-c, --config TEXT The aprsd config file to use for options.
[default:
/Users/i530566/.config/aprsd/aprsd.yml]
--quiet Don't log to stdout
--aprs-login TEXT What callsign to send the message from.
[env var: APRS_LOGIN]
--aprs-password TEXT the APRS-IS password for APRS_LOGIN [env
var: APRS_PASSWORD]
-n, --no-ack Don't wait for an ack, just sent it to APRS-
IS and bail. [default: False]
-w, --wait-response Wait for a response to the message?
[default: False]
--raw TEXT Send a raw message. Implies --no-ack
-h, --help Show this message and exit.
SEND EMAIL (radio to smtp server)
=================================
::
Received message______________
Raw : KM6XXX>APY400,WIDE1-1,qAO,KM6XXX-1::KM6XXX-9 :-user@host.com test new shortcuts global, radio to pc{29
From : KM6XXX
Message : -user@host.com test new shortcuts global, radio to pc
Msg number : 29
Sending Email_________________
To : user@host.com
Subject : KM6XXX
Body : test new shortcuts global, radio to pc
Sending ack __________________ Tx(3)
Raw : KM6XXX-9>APRS::KM6XXX :ack29
To : KM6XXX
Ack number : 29
RECEIVE EMAIL (imap server to radio)
====================================
::
Sending message_______________ 6(Tx3)
Raw : KM6XXX-9>APRS::KM6XXX :-somebody@gmail.com email from internet to radio{6
To : KM6XXX
Message : -somebody@gmail.com email from internet to radio
Received message______________
Raw : KM6XXX>APY400,WIDE1-1,qAO,KM6XXX-1::KM6XXX-9 :ack6
From : KM6XXX
Message : ack6
Msg number : 0
LOCATION
========
::
Received Message _______________
Raw : KM6XXX-6>APRS,TCPIP*,qAC,T2CAEAST::KM6XXX-14:location{2
From : KM6XXX-6
Message : location
Msg number : 2
Received Message _______________ Complete
Sending Message _______________
Raw : KM6XXX-14>APRS::KM6XXX-6 :KM6XXX-6: 8 Miles E Auburn CA 0' 0,-120.93584 1873.7h ago{2
To : KM6XXX-6
Message : KM6XXX-6: 8 Miles E Auburn CA 0' 0,-120.93584 1873.7h ago
Msg number : 2
Sending Message _______________ Complete
Sending ack _______________
Raw : KM6XXX-14>APRS::KM6XXX-6 :ack2
To : KM6XXX-6
Ack : 2
Sending ack _______________ Complete
AND... ping, fortune, time.....
Web Admin Interface
===================
To start the web admin interface, You have to install gunicorn in your virtualenv that already has aprsd installed.
::
source <path to APRSD's virtualenv>/bin/activate
pip install gunicorn
gunicorn --bind 0.0.0.0:8080 "aprsd.wsgi:app"
The web admin interface will be running on port 8080 on the local machine. http://localhost:8080
Development
===========
* ``git clone git@github.com:craigerl/aprsd.git``
* ``cd aprsd``
* ``make``
Workflow
========
While working aprsd, The workflow is as follows:
* Checkout a new branch to work on by running
``git checkout -b mybranch``
* Make your changes to the code
* Run Tox with the following options:
- ``tox -epep8``
- ``tox -efmt``
- ``tox -p``
* Commit your changes. This will run the pre-commit hooks which does checks too
``git commit``
* Once you are done with all of your commits, then push up the branch to
github with:
``git push -u origin mybranch``
* Create a pull request from your branch so github tests can run and we can do
a code review.
Release
=======
To do release to pypi:
* Tag release with:
``git tag -v1.XX -m "New release"``
* Push release tag:
``git push origin master --tags``
* Do a test build and verify build is valid by running:
``make build``
* Once twine is happy, upload release to pypi:
``make upload``
Docker Container
================
Building
========
There are 2 versions of the container Dockerfile that can be used.
The main Dockerfile, which is for building the official release container
based off of the pip install version of aprsd and the Dockerfile-dev,
which is used for building a container based off of a git branch of
the repo.
Official Build
==============
``docker build -t hemna6969/aprsd:latest .``
Development Build
=================
``docker build -t hemna6969/aprsd:latest -f Dockerfile-dev .``
Running the container
=====================
There is a ``docker-compose.yml`` file in the ``docker/`` directory
that can be used to run your container. To provide the container
an ``aprsd.conf`` configuration file, change your
``docker-compose.yml`` as shown below:
::
volumes:
- $HOME/.config/aprsd:/config
To install plugins at container start time, pass in a list of
comma-separated list of plugins on PyPI using the ``APRSD_PLUGINS``
environment variable in the ``docker-compose.yml`` file. Note that
version constraints may also be provided. For example:
::
environment:
- APRSD_PLUGINS=aprsd-slack-plugin>=1.0.2,aprsd-twitter-plugin
.. badges
.. |pypi| image:: https://badge.fury.io/py/aprsd.svg
:target: https://badge.fury.io/py/aprsd
.. |pytest| image:: https://github.com/craigerl/aprsd/workflows/python/badge.svg
:target: https://github.com/craigerl/aprsd/actions
.. |versions| image:: https://img.shields.io/pypi/pyversions/aprsd.svg
:target: https://pypi.org/pypi/aprsd
.. |slack| image:: https://img.shields.io/badge/slack-@hemna/aprsd-blue.svg?logo=slack
:target: https://hemna.slack.com/app_redirect?channel=C01KQSCP5RP
.. |imports| image:: https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336
:target: https://timothycrosley.github.io/isort/
.. |issues| image:: https://img.shields.io/github/issues/craigerl/aprsd
.. |commit| image:: https://img.shields.io/github/last-commit/craigerl/aprsd
.. |down| image:: https://static.pepy.tech/personalized-badge/aprsd?period=month&units=international_system&left_color=black&right_color=orange&left_text=Downloads
:target: https://pepy.tech/project/aprsd
.. links
.. _read the docs:
https://aprsd.readthedocs.io

View File

@ -10,10 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from importlib.metadata import PackageNotFoundError, version
import pbr.version
try:
__version__ = version("aprsd")
except PackageNotFoundError:
pass
__version__ = pbr.version.VersionInfo("aprsd").version_string()

View File

@ -1,7 +1,7 @@
import logging
import typing as t
from functools import update_wrapper
import logging
from pathlib import Path
import typing as t
import click
from oslo_config import cfg
@ -11,37 +11,38 @@ from aprsd import conf # noqa: F401
from aprsd.log import log
from aprsd.utils import trace
CONF = cfg.CONF
home = str(Path.home())
DEFAULT_CONFIG_DIR = f'{home}/.config/aprsd/'
DEFAULT_SAVE_FILE = f'{home}/.config/aprsd/aprsd.p'
DEFAULT_CONFIG_FILE = f'{home}/.config/aprsd/aprsd.conf'
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
DEFAULT_SAVE_FILE = f"{home}/.config/aprsd/aprsd.p"
DEFAULT_CONFIG_FILE = f"{home}/.config/aprsd/aprsd.conf"
F = t.TypeVar('F', bound=t.Callable[..., t.Any])
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
common_options = [
click.option(
'--loglevel',
default='INFO',
"--loglevel",
default="INFO",
show_default=True,
type=click.Choice(
['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"],
case_sensitive=False,
),
show_choices=True,
help='The log level to use for aprsd.log',
help="The log level to use for aprsd.log",
),
click.option(
'-c',
'--config',
'config_file',
"-c",
"--config",
"config_file",
show_default=True,
default=DEFAULT_CONFIG_FILE,
help='The aprsd config file to use for options.',
help="The aprsd config file to use for options.",
),
click.option(
'--quiet',
"--quiet",
is_flag=True,
default=False,
help="Don't log to stdout",
@ -49,50 +50,11 @@ common_options = [
]
class AliasedGroup(click.Group):
def command(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a command to
the group. This takes the same arguments as :func:`command` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
Copied from `click` and extended for `aliases`.
"""
def decorator(f):
aliases = kwargs.pop('aliases', [])
cmd = click.decorators.command(*args, **kwargs)(f)
self.add_command(cmd)
for alias in aliases:
self.add_command(cmd, name=alias)
return cmd
return decorator
def group(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a group to
the group. This takes the same arguments as :func:`group` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
Copied from `click` and extended for `aliases`.
"""
def decorator(f):
aliases = kwargs.pop('aliases', [])
cmd = click.decorators.group(*args, **kwargs)(f)
self.add_command(cmd)
for alias in aliases:
self.add_command(cmd, name=alias)
return cmd
return decorator
def add_options(options):
def _add_options(func):
for option in reversed(options):
func = option(func)
return func
return _add_options
@ -101,37 +63,34 @@ def process_standard_options(f: F) -> F:
ctx = args[0]
ctx.ensure_object(dict)
config_file_found = True
if kwargs['config_file']:
default_config_files = [kwargs['config_file']]
if kwargs["config_file"]:
default_config_files = [kwargs["config_file"]]
else:
default_config_files = None
try:
CONF(
[],
project='aprsd',
version=aprsd.__version__,
[], project="aprsd", version=aprsd.__version__,
default_config_files=default_config_files,
)
except cfg.ConfigFilesNotFoundError:
config_file_found = False
ctx.obj['loglevel'] = kwargs['loglevel']
ctx.obj["loglevel"] = kwargs["loglevel"]
# ctx.obj["config_file"] = kwargs["config_file"]
ctx.obj['quiet'] = kwargs['quiet']
ctx.obj["quiet"] = kwargs["quiet"]
log.setup_logging(
ctx.obj['loglevel'],
ctx.obj['quiet'],
ctx.obj["loglevel"],
ctx.obj["quiet"],
)
if CONF.trace_enabled:
trace.setup_tracing(['method', 'api'])
trace.setup_tracing(["method", "api"])
if not config_file_found:
LOG = logging.getLogger('APRSD') # noqa: N806
LOG = logging.getLogger("APRSD") # noqa: N806
LOG.error("No config file found!! run 'aprsd sample-config'")
del kwargs['loglevel']
del kwargs['config_file']
del kwargs['quiet']
del kwargs["loglevel"]
del kwargs["config_file"]
del kwargs["quiet"]
return f(*args, **kwargs)
return update_wrapper(t.cast(F, new_func), f)
@ -139,21 +98,20 @@ def process_standard_options(f: F) -> F:
def process_standard_options_no_config(f: F) -> F:
"""Use this as a decorator when config isn't needed."""
def new_func(*args, **kwargs):
ctx = args[0]
ctx.ensure_object(dict)
ctx.obj['loglevel'] = kwargs['loglevel']
ctx.obj['config_file'] = kwargs['config_file']
ctx.obj['quiet'] = kwargs['quiet']
log.setup_logging(
ctx.obj['loglevel'],
ctx.obj['quiet'],
ctx.obj["loglevel"] = kwargs["loglevel"]
ctx.obj["config_file"] = kwargs["config_file"]
ctx.obj["quiet"] = kwargs["quiet"]
log.setup_logging_no_config(
ctx.obj["loglevel"],
ctx.obj["quiet"],
)
del kwargs['loglevel']
del kwargs['config_file']
del kwargs['quiet']
del kwargs["loglevel"]
del kwargs["config_file"]
del kwargs["quiet"]
return f(*args, **kwargs)
return update_wrapper(t.cast(F, new_func), f)

314
aprsd/client.py Normal file
View File

@ -0,0 +1,314 @@
import abc
import logging
import time
import aprslib
from aprslib.exceptions import LoginError
from oslo_config import cfg
from aprsd import exception
from aprsd.clients import aprsis, kiss
from aprsd.packets import core, packet_list
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
TRANSPORT_APRSIS = "aprsis"
TRANSPORT_TCPKISS = "tcpkiss"
TRANSPORT_SERIALKISS = "serialkiss"
# Main must create this from the ClientFactory
# object such that it's populated with the
# Correct config
factory = None
class Client(metaclass=trace.TraceWrapperMetaclass):
"""Singleton client class that constructs the aprslib connection."""
_instance = None
_client = None
connected = False
server_string = None
filter = None
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
return cls._instance
def set_filter(self, filter):
self.filter = filter
if self._client:
self._client.set_filter(filter)
@property
def client(self):
if not self._client:
LOG.info("Creating APRS client")
self._client = self.setup_connection()
if self.filter:
LOG.info("Creating APRS client filter")
self._client.set_filter(self.filter)
return self._client
def send(self, packet: core.Packet):
packet_list.PacketList().tx(packet)
self.client.send(packet)
def reset(self):
"""Call this to force a rebuild/reconnect."""
if self._client:
del self._client
else:
LOG.warning("Client not initialized, nothing to reset.")
# Recreate the client
LOG.info(f"Creating new client {self.client}")
@abc.abstractmethod
def setup_connection(self):
pass
@staticmethod
@abc.abstractmethod
def is_enabled():
pass
@staticmethod
@abc.abstractmethod
def transport():
pass
@abc.abstractmethod
def decode_packet(self, *args, **kwargs):
pass
class APRSISClient(Client, metaclass=trace.TraceWrapperMetaclass):
_client = None
@staticmethod
def is_enabled():
# Defaults to True if the enabled flag is non existent
try:
return CONF.aprs_network.enabled
except KeyError:
return False
@staticmethod
def is_configured():
if APRSISClient.is_enabled():
# Ensure that the config vars are correctly set
if not CONF.aprs_network.login:
LOG.error("Config aprs_network.login not set.")
raise exception.MissingConfigOptionException(
"aprs_network.login is not set.",
)
if not CONF.aprs_network.password:
LOG.error("Config aprs_network.password not set.")
raise exception.MissingConfigOptionException(
"aprs_network.password is not set.",
)
if not CONF.aprs_network.host:
LOG.error("Config aprs_network.host not set.")
raise exception.MissingConfigOptionException(
"aprs_network.host is not set.",
)
return True
return True
def is_alive(self):
if self._client:
return self._client.is_alive()
else:
return False
@staticmethod
def transport():
return TRANSPORT_APRSIS
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
return core.Packet.factory(args[0])
def setup_connection(self):
user = CONF.aprs_network.login
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
connected = False
backoff = 1
aprs_client = None
while not connected:
try:
LOG.info("Creating aprslib client")
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
# Force the log to be the same
aprs_client.logger = LOG
aprs_client.connect()
connected = True
backoff = 1
except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'")
connected = False
time.sleep(backoff)
except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
connected = False
time.sleep(backoff)
# Don't allow the backoff to go to inifinity.
if backoff > 5:
backoff = 5
else:
backoff += 1
continue
LOG.debug(f"Logging in to APRS-IS with user '{user}'")
self._client = aprs_client
return aprs_client
class KISSClient(Client, metaclass=trace.TraceWrapperMetaclass):
_client = None
@staticmethod
def is_enabled():
"""Return if tcp or serial KISS is enabled."""
if CONF.kiss_serial.enabled:
return True
if CONF.kiss_tcp.enabled:
return True
return False
@staticmethod
def is_configured():
# Ensure that the config vars are correctly set
if KISSClient.is_enabled():
transport = KISSClient.transport()
if transport == TRANSPORT_SERIALKISS:
if not CONF.kiss_serial.device:
LOG.error("KISS serial enabled, but no device is set.")
raise exception.MissingConfigOptionException(
"kiss_serial.device is not set.",
)
elif transport == TRANSPORT_TCPKISS:
if not CONF.kiss_tcp.host:
LOG.error("KISS TCP enabled, but no host is set.")
raise exception.MissingConfigOptionException(
"kiss_tcp.host is not set.",
)
return True
return False
def is_alive(self):
if self._client:
return self._client.is_alive()
else:
return False
@staticmethod
def transport():
if CONF.kiss_serial.enabled:
return TRANSPORT_SERIALKISS
if CONF.kiss_tcp.enabled:
return TRANSPORT_TCPKISS
def decode_packet(self, *args, **kwargs):
"""We get a frame, which has to be decoded."""
LOG.debug(f"kwargs {kwargs}")
frame = kwargs["frame"]
LOG.debug(f"Got an APRS Frame '{frame}'")
# try and nuke the * from the fromcall sign.
# frame.header._source._ch = False
# payload = str(frame.payload.decode())
# msg = f"{str(frame.header)}:{payload}"
# msg = frame.tnc2
# LOG.debug(f"Decoding {msg}")
raw = aprslib.parse(str(frame))
packet = core.Packet.factory(raw)
if isinstance(packet, core.ThirdParty):
return packet.subpacket
else:
return packet
def setup_connection(self):
self._client = kiss.KISS3Client()
return self._client
class ClientFactory:
_instance = None
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
# Put any initialization here.
return cls._instance
def __init__(self):
self._builders = {}
def register(self, key, builder):
self._builders[key] = builder
def create(self, key=None):
if not key:
if APRSISClient.is_enabled():
key = TRANSPORT_APRSIS
elif KISSClient.is_enabled():
key = KISSClient.transport()
builder = self._builders.get(key)
if not builder:
raise ValueError(key)
return builder()
def is_client_enabled(self):
"""Make sure at least one client is enabled."""
enabled = False
for key in self._builders.keys():
try:
enabled |= self._builders[key].is_enabled()
except KeyError:
pass
return enabled
def is_client_configured(self):
enabled = False
for key in self._builders.keys():
try:
enabled |= self._builders[key].is_configured()
except KeyError:
pass
except exception.MissingConfigOptionException as ex:
LOG.error(ex.message)
return False
except exception.ConfigOptionBogusDefaultException as ex:
LOG.error(ex.message)
return False
return enabled
@staticmethod
def setup():
"""Create and register all possible client objects."""
global factory
factory = ClientFactory()
factory.register(TRANSPORT_APRSIS, APRSISClient)
factory.register(TRANSPORT_TCPKISS, KISSClient)
factory.register(TRANSPORT_SERIALKISS, KISSClient)

View File

@ -1,5 +0,0 @@
# define the client transports here
TRANSPORT_APRSIS = 'aprsis'
TRANSPORT_TCPKISS = 'tcpkiss'
TRANSPORT_SERIALKISS = 'serialkiss'
TRANSPORT_FAKE = 'fake'

View File

@ -1,141 +0,0 @@
import logging
import threading
from typing import Callable
import timeago
import wrapt
from loguru import logger
from oslo_config import cfg
from aprsd.client import drivers # noqa - ensure drivers are registered
from aprsd.client.drivers.registry import DriverRegistry
from aprsd.packets import core
from aprsd.utils import keepalive_collector
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOGU = logger
class APRSDClient:
"""APRSD client class.
This is a singleton class that provides a single instance of the APRSD client.
It is responsible for connecting to the appropriate APRSD client driver based on
the configuration.
"""
_instance = None
driver = None
lock = threading.Lock()
filter = None
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
keepalive_collector.KeepAliveCollector().register(cls)
return cls._instance
def __init__(self):
self.connected = False
self.login_status = {
'success': False,
'message': None,
}
if not self.driver:
self.driver = DriverRegistry().get_driver()
self.driver.setup_connection()
def stats(self, serializable=False) -> dict:
stats = {}
if self.driver:
stats = self.driver.stats(serializable=serializable)
return stats
@property
def is_enabled(self):
if not self.driver:
return False
return self.driver.is_enabled()
@property
def is_configured(self):
if not self.driver:
return False
return self.driver.is_configured()
# @property
# def is_connected(self):
# if not self.driver:
# return False
# return self.driver.is_connected()
@property
def login_success(self):
if not self.driver:
return False
return self.driver.login_success
@property
def login_failure(self):
if not self.driver:
return None
return self.driver.login_failure
def set_filter(self, filter):
self.filter = filter
if not self.driver:
return
self.driver.set_filter(filter)
def get_filter(self):
if not self.driver:
return None
return self.driver.filter
def is_alive(self):
return self.driver.is_alive()
def close(self):
if not self.driver:
return
self.driver.close()
@wrapt.synchronized(lock)
def reset(self):
"""Call this to force a rebuild/reconnect."""
LOG.info('Resetting client connection.')
if self.driver:
self.driver.close()
self.driver.setup_connection()
if self.filter:
self.driver.set_filter(self.filter)
else:
LOG.warning('Client not initialized, nothing to reset.')
def send(self, packet: core.Packet) -> bool:
return self.driver.send(packet)
# For the keepalive collector
def keepalive_check(self):
# Don't check the first time through.
if not self.driver.is_alive and self._checks:
LOG.warning("Resetting client. It's not alive.")
self.reset()
self._checks = True
# For the keepalive collector
def keepalive_log(self):
if ka := self.driver.keepalive:
keepalive = timeago.format(ka)
else:
keepalive = 'N/A'
LOGU.opt(colors=True).info(f'<green>Client keepalive {keepalive}</green>')
def consumer(self, callback: Callable, raw: bool = False):
return self.driver.consumer(callback=callback, raw=raw)
def decode_packet(self, *args, **kwargs) -> core.Packet:
return self.driver.decode_packet(*args, **kwargs)

View File

@ -1,10 +0,0 @@
# All client drivers must be registered here
from aprsd.client.drivers.aprsis import APRSISDriver
from aprsd.client.drivers.fake import APRSDFakeDriver
from aprsd.client.drivers.registry import DriverRegistry
from aprsd.client.drivers.tcpkiss import TCPKISSDriver
driver_registry = DriverRegistry()
driver_registry.register(APRSDFakeDriver)
driver_registry.register(APRSISDriver)
driver_registry.register(TCPKISSDriver)

View File

@ -1,205 +0,0 @@
import datetime
import logging
import time
from typing import Callable
from aprslib.exceptions import LoginError
from loguru import logger
from oslo_config import cfg
from aprsd import client, exception
from aprsd.client.drivers.lib.aprslib import APRSLibClient
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOGU = logger
# class APRSISDriver(metaclass=trace.TraceWrapperMetaclass):
class APRSISDriver:
"""This is the APRS-IS driver for the APRSD client.
This driver uses our modified aprslib.IS class to connect to the APRS-IS server.
"""
_client = None
_checks = False
def __init__(self):
max_timeout = {'hours': 0.0, 'minutes': 2, 'seconds': 0}
self.max_delta = datetime.timedelta(**max_timeout)
self.login_status = {
'success': False,
'message': None,
}
@staticmethod
def is_enabled():
# Defaults to True if the enabled flag is non existent
try:
return CONF.aprs_network.enabled
except KeyError:
return False
@staticmethod
def is_configured():
if APRSISDriver.is_enabled():
# Ensure that the config vars are correctly set
if not CONF.aprs_network.login:
LOG.error('Config aprs_network.login not set.')
raise exception.MissingConfigOptionException(
'aprs_network.login is not set.',
)
if not CONF.aprs_network.password:
LOG.error('Config aprs_network.password not set.')
raise exception.MissingConfigOptionException(
'aprs_network.password is not set.',
)
if not CONF.aprs_network.host:
LOG.error('Config aprs_network.host not set.')
raise exception.MissingConfigOptionException(
'aprs_network.host is not set.',
)
return True
return True
@property
def is_alive(self):
if not self._client:
LOG.warning(f'APRS_CLIENT {self._client} alive? NO!!!')
return False
return self._client.is_alive() and not self._is_stale_connection()
def close(self):
if self._client:
self._client.stop()
self._client.close()
def send(self, packet: core.Packet) -> bool:
return self._client.send(packet)
def setup_connection(self):
user = CONF.aprs_network.login
password = CONF.aprs_network.password
host = CONF.aprs_network.host
port = CONF.aprs_network.port
self.connected = False
backoff = 1
retries = 3
retry_count = 0
while not self.connected:
retry_count += 1
if retry_count >= retries:
break
try:
LOG.info(
f'Creating aprslib client({host}:{port}) and logging in {user}.'
)
self._client = APRSLibClient(
user, passwd=password, host=host, port=port
)
# Force the log to be the same
self._client.logger = LOG
self._client.connect()
self.connected = self.login_status['success'] = True
self.login_status['message'] = self._client.server_string
backoff = 1
except LoginError as e:
LOG.error(f"Failed to login to APRS-IS Server '{e}'")
self.connected = self.login_status['success'] = False
self.login_status['message'] = (
e.message if hasattr(e, 'message') else str(e)
)
LOG.error(self.login_status['message'])
time.sleep(backoff)
except Exception as e:
LOG.error(f"Unable to connect to APRS-IS server. '{e}' ")
self.connected = self.login_status['success'] = False
self.login_status['message'] = getattr(e, 'message', str(e))
time.sleep(backoff)
# Don't allow the backoff to go to inifinity.
if backoff > 5:
backoff = 5
else:
backoff += 1
continue
def set_filter(self, filter):
self._client.set_filter(filter)
def login_success(self) -> bool:
return self.login_status.get('success', False)
def login_failure(self) -> str:
return self.login_status.get('message', None)
@property
def filter(self):
return self._client.filter
@property
def server_string(self):
return self._client.server_string
@property
def keepalive(self):
return self._client.aprsd_keepalive
def _is_stale_connection(self):
delta = datetime.datetime.now() - self._client.aprsd_keepalive
if delta > self.max_delta:
LOG.error(f'Connection is stale, last heard {delta} ago.')
return True
return False
@staticmethod
def transport():
return client.TRANSPORT_APRSIS
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
return core.factory(args[0])
def consumer(self, callback: Callable, raw: bool = False):
if self._client:
try:
self._client.consumer(
callback,
blocking=False,
immortal=False,
raw=raw,
)
except Exception as e:
LOG.error(e)
LOG.info(e.__cause__)
raise e
else:
LOG.warning('client is None, might be resetting.')
self.connected = False
def stats(self, serializable=False) -> dict:
stats = {}
if self.is_configured():
if self._client:
keepalive = self._client.aprsd_keepalive
server_string = self._client.server_string
if serializable:
keepalive = keepalive.isoformat()
filter = self.filter
else:
keepalive = 'None'
server_string = 'None'
filter = 'None'
stats = {
'connected': self.is_alive,
'filter': filter,
'login_status': self.login_status,
'connection_keepalive': keepalive,
'server_string': server_string,
'transport': self.transport(),
}
return stats

View File

@ -1,121 +0,0 @@
import datetime
import logging
import threading
import time
from typing import Callable
import aprslib
import wrapt
from oslo_config import cfg
from aprsd import conf # noqa
from aprsd.packets import core
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
class APRSDFakeDriver(metaclass=trace.TraceWrapperMetaclass):
"""Fake client for testing."""
# flag to tell us to stop
thread_stop = False
# date for last time we heard from the server
aprsd_keepalive = datetime.datetime.now()
lock = threading.Lock()
path = []
def __init__(self):
LOG.info('Starting APRSDFakeDriver driver.')
self.path = ['WIDE1-1', 'WIDE2-1']
@staticmethod
def is_enabled():
if CONF.fake_client.enabled:
return True
return False
@staticmethod
def is_configured():
return APRSDFakeDriver.is_enabled
def is_alive(self):
"""If the connection is alive or not."""
return not self.thread_stop
def close(self):
self.thread_stop = True
LOG.info('Shutdown APRSDFakeDriver driver.')
def setup_connection(self):
# It's fake....
pass
def set_filter(self, filter: str) -> None:
pass
def login_success(self) -> bool:
return True
def login_failure(self) -> str:
return None
@wrapt.synchronized(lock)
def send(self, packet: core.Packet):
"""Send an APRS Message object."""
LOG.info(f'Sending packet: {packet}')
payload = None
if isinstance(packet, core.Packet):
packet.prepare()
payload = packet.payload.encode('US-ASCII')
else:
msg_payload = f'{packet.raw}{{{str(packet.msgNo)}'
payload = (
':{:<9}:{}'.format(
packet.to_call,
msg_payload,
)
).encode('US-ASCII')
LOG.debug(
f"FAKE::Send '{payload}' TO '{packet.to_call}' From "
f'\'{packet.from_call}\' with PATH "{self.path}"',
)
def consumer(self, callback: Callable, raw: bool = False):
LOG.debug('Start non blocking FAKE consumer')
# Generate packets here?
raw_str = 'GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW'
self.aprsd_keepalive = datetime.datetime.now()
if raw:
callback(raw=raw_str)
else:
pkt_raw = aprslib.parse(raw_str)
pkt = core.factory(pkt_raw)
callback(packet=pkt)
LOG.debug(f'END blocking FAKE consumer {self}')
time.sleep(1)
def decode_packet(self, *args, **kwargs):
"""APRS lib already decodes this."""
if not kwargs:
return None
if kwargs.get('packet'):
return kwargs.get('packet')
if kwargs.get('raw'):
pkt_raw = aprslib.parse(kwargs.get('raw'))
pkt = core.factory(pkt_raw)
return pkt
def stats(self, serializable: bool = False) -> dict:
return {
'driver': self.__class__.__name__,
'is_alive': self.is_alive(),
'transport': 'fake',
}

View File

@ -1,86 +0,0 @@
from typing import Callable, Protocol, runtime_checkable
from aprsd.packets import core
from aprsd.utils import singleton, trace
@runtime_checkable
class ClientDriver(Protocol):
"""Protocol for APRSD client drivers.
This protocol defines the methods that must be
implemented by APRSD client drivers.
"""
@staticmethod
def is_enabled(self) -> bool:
pass
@staticmethod
def is_configured(self) -> bool:
pass
def is_alive(self) -> bool:
pass
def close(self) -> None:
pass
def send(self, packet: core.Packet) -> bool:
pass
def setup_connection(self) -> None:
pass
def set_filter(self, filter: str) -> None:
pass
def login_success(self) -> bool:
pass
def login_failure(self) -> str:
pass
def consumer(self, callback: Callable, raw: bool = False) -> None:
pass
def decode_packet(self, *args, **kwargs) -> core.Packet:
pass
def stats(self, serializable: bool = False) -> dict:
pass
@singleton
class DriverRegistry(metaclass=trace.TraceWrapperMetaclass):
"""Registry for APRSD client drivers.
This registry is used to register and unregister APRSD client drivers.
This allows us to dynamically load the configured driver at runtime.
All drivers are registered, then when aprsd needs the client, the
registry provides the configured driver for the single instance of the
single APRSD client.
"""
def __init__(self):
self.drivers = []
def register(self, driver: Callable):
if not isinstance(driver, ClientDriver):
raise ValueError('Driver must be of ClientDriver type')
self.drivers.append(driver)
def unregister(self, driver: Callable):
if driver in self.drivers:
self.drivers.remove(driver)
else:
raise ValueError(f'Driver {driver} not found')
def get_driver(self) -> ClientDriver:
"""Get the first enabled driver."""
for driver in self.drivers:
if driver.is_enabled() and driver.is_configured():
return driver()
raise ValueError('No enabled driver found')

View File

@ -1,408 +0,0 @@
"""
APRSD KISS Client Driver using native KISS implementation.
This module provides a KISS client driver for APRSD using the new
non-asyncio KISSInterface implementation.
"""
import datetime
import logging
import select
import socket
import time
from typing import Any, Callable, Dict
import aprslib
from ax253 import frame as ax25frame
from kiss import constants as kiss_constants
from kiss import util as kissutil
from kiss.kiss import Command
from oslo_config import cfg
from aprsd import ( # noqa
client,
conf, # noqa
exception,
)
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
def handle_fend(buffer: bytes, strip_df_start: bool = True) -> bytes:
"""
Handle FEND (end of frame) encountered in a KISS data stream.
:param buffer: the buffer containing the frame
:param strip_df_start: remove leading null byte (DATA_FRAME opcode)
:return: the bytes of the frame without escape characters or frame
end markers (FEND)
"""
frame = kissutil.recover_special_codes(kissutil.strip_nmea(bytes(buffer)))
if strip_df_start:
frame = kissutil.strip_df_start(frame)
LOG.warning(f'handle_fend {" ".join(f"{b:02X}" for b in bytes(frame))}')
return bytes(frame)
# class TCPKISSDriver(metaclass=trace.TraceWrapperMetaclass):
class TCPKISSDriver:
"""APRSD client driver for TCP KISS connections."""
# Class level attributes required by Client protocol
packets_received = 0
packets_sent = 0
last_packet_sent = None
last_packet_received = None
keepalive = None
client_name = None
socket = None
# timeout in seconds
select_timeout = 1
path = None
def __init__(self):
"""Initialize the KISS client.
Args:
client_name: Name of the client instance
"""
super().__init__()
self._connected = False
self.keepalive = datetime.datetime.now()
self._running = False
# This is initialized in setup_connection()
self.socket = None
@property
def transport(self) -> str:
return client.TRANSPORT_TCPKISS
@classmethod
def is_enabled(cls) -> bool:
"""Check if KISS is enabled in configuration.
Returns:
bool: True if either TCP is enabled
"""
return CONF.kiss_tcp.enabled
@staticmethod
def is_configured():
# Ensure that the config vars are correctly set
if TCPKISSDriver.is_enabled():
if not CONF.kiss_tcp.host:
LOG.error('KISS TCP enabled, but no host is set.')
raise exception.MissingConfigOptionException(
'kiss_tcp.host is not set.',
)
return True
return False
@property
def is_alive(self) -> bool:
"""Check if the client is connected.
Returns:
bool: True if connected to KISS TNC, False otherwise
"""
return self._connected
def close(self):
"""Close the connection."""
self.stop()
def send(self, packet: core.Packet):
"""Send an APRS packet.
Args:
packet: APRS packet to send (Packet or Message object)
Raises:
Exception: If not connected or send fails
"""
if not self.socket:
raise Exception('KISS interface not initialized')
payload = None
path = self.path
packet.prepare()
payload = packet.payload.encode('US-ASCII')
if packet.path:
path = packet.path
LOG.debug(
f"KISS Send '{payload}' TO '{packet.to_call}' From "
f"'{packet.from_call}' with PATH '{path}'",
)
frame = ax25frame.Frame.ui(
destination='APZ100',
# destination=packet.to_call,
source=packet.from_call,
path=path,
info=payload,
)
# now escape the frame special characters
frame_escaped = kissutil.escape_special_codes(bytes(frame))
# and finally wrap the frame in KISS protocol
command = Command.DATA_FRAME
frame_kiss = b''.join(
[kiss_constants.FEND, command.value, frame_escaped, kiss_constants.FEND]
)
self.socket.send(frame_kiss)
# Update last packet sent time
self.last_packet_sent = datetime.datetime.now()
# Increment packets sent counter
self.packets_sent += 1
def setup_connection(self):
"""Set up the KISS interface."""
if not self.is_enabled():
LOG.error('KISS is not enabled in configuration')
return
try:
# Configure for TCP KISS
if self.is_enabled():
LOG.info(
f'KISS TCP Connection to {CONF.kiss_tcp.host}:{CONF.kiss_tcp.port}'
)
self.path = CONF.kiss_tcp.path
self.connect()
if self._connected:
LOG.info('KISS interface initialized')
else:
LOG.error('Failed to connect to KISS interface')
except Exception as ex:
LOG.error('Failed to initialize KISS interface')
LOG.exception(ex)
self._connected = False
def set_filter(self, filter_text: str):
"""Set packet filter (not implemented for KISS).
Args:
filter_text: Filter specification (ignored for KISS)
"""
# KISS doesn't support filtering at the TNC level
pass
@property
def filter(self) -> str:
"""Get packet filter (not implemented for KISS).
Returns:
str: Empty string (not implemented for KISS)
"""
return ''
def login_success(self) -> bool:
"""There is no login for KISS."""
if not self._connected:
return False
return True
def login_failure(self) -> str:
"""There is no login for KISS."""
return 'Login successful'
def consumer(self, callback: Callable, raw: bool = False):
"""Start consuming frames with the given callback.
Args:
callback: Function to call with received packets
Raises:
Exception: If not connected to KISS TNC
"""
self._running = True
while self._running:
# Ensure connection
if not self._connected:
if not self.connect():
time.sleep(1)
continue
# Read frame
frame = self.read_frame()
if frame:
LOG.warning(f'GOT FRAME: {frame} calling {callback}')
kwargs = {
'frame': frame,
}
callback(**kwargs)
def decode_packet(self, *args, **kwargs) -> core.Packet:
"""Decode a packet from an AX.25 frame.
Args:
frame: Received AX.25 frame
"""
frame = kwargs.get('frame')
if not frame:
LOG.warning('No frame received to decode?!?!')
return None
LOG.warning(f'FRAME: {str(frame)}')
try:
aprslib_frame = aprslib.parse(str(frame))
return core.factory(aprslib_frame)
except Exception as e:
LOG.error(f'Error decoding packet: {e}')
return None
def stop(self):
"""Stop the KISS interface."""
self._running = False
self._connected = False
if self.socket:
try:
self.socket.close()
except Exception:
pass
def stats(self, serializable: bool = False) -> Dict[str, Any]:
"""Get client statistics.
Returns:
Dict containing client statistics
"""
if serializable:
keepalive = self.keepalive.isoformat()
else:
keepalive = self.keepalive
stats = {
'client': self.__class__.__name__,
'transport': self.transport,
'connected': self._connected,
'path': self.path,
'packets_sent': self.packets_sent,
'packets_received': self.packets_received,
'last_packet_sent': self.last_packet_sent,
'last_packet_received': self.last_packet_received,
'connection_keepalive': keepalive,
'host': CONF.kiss_tcp.host,
'port': CONF.kiss_tcp.port,
}
return stats
def connect(self) -> bool:
"""Establish TCP connection to the KISS host.
Returns:
bool: True if connection successful, False otherwise
"""
try:
if self.socket:
try:
self.socket.close()
except Exception:
pass
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.settimeout(5.0) # 5 second timeout for connection
self.socket.connect((CONF.kiss_tcp.host, CONF.kiss_tcp.port))
self.socket.settimeout(0.1) # Reset to shorter timeout for reads
self._connected = True
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# MACOS doesn't have TCP_KEEPIDLE
if hasattr(socket, 'TCP_KEEPIDLE'):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
return True
except ConnectionError as e:
LOG.error(
f'Failed to connect to {CONF.kiss_tcp.host}:{CONF.kiss_tcp.port} - {str(e)}'
)
self._connected = False
return False
except Exception as e:
LOG.error(
f'Failed to connect to {CONF.kiss_tcp.host}:{CONF.kiss_tcp.port} - {str(e)}'
)
self._connected = False
return False
def fix_raw_frame(self, raw_frame: bytes) -> bytes:
"""Fix the raw frame by recalculating the FCS."""
ax25_data = raw_frame[2:-1] # Remove KISS markers
return handle_fend(ax25_data)
def read_frame(self, blocking=False):
"""
Generator for complete lines, received from the server
"""
try:
self.socket.setblocking(0)
except OSError as e:
LOG.error(f'socket error when setblocking(0): {str(e)}')
raise aprslib.ConnectionDrop('connection dropped') from e
while self._running:
short_buf = b''
try:
readable, _, _ = select.select(
[self.socket],
[],
[],
self.select_timeout,
)
if not readable:
if not blocking:
break
else:
continue
except Exception as e:
LOG.error(f'Error in read loop: {e}')
self._connected = False
break
try:
print('reading from socket')
short_buf = self.socket.recv(1024)
print(f'short_buf: {short_buf}')
# sock.recv returns empty if the connection drops
if not short_buf:
if not blocking:
# We could just not be blocking, so empty is expected
continue
else:
self.logger.error('socket.recv(): returned empty')
raise aprslib.ConnectionDrop('connection dropped')
raw_frame = self.fix_raw_frame(short_buf)
return ax25frame.Frame.from_bytes(raw_frame)
except OSError as e:
# self.logger.error("socket error on recv(): %s" % str(e))
if 'Resource temporarily unavailable' in str(e):
if not blocking:
if len(short_buf) == 0:
break
except socket.timeout:
continue
except (KeyboardInterrupt, SystemExit):
raise
except ConnectionError:
self.close()
if not self.auto_reconnect:
raise
else:
self.connect()
continue
except StopIteration:
break
except IOError:
LOG.error('IOError')
break
except Exception as e:
LOG.error(f'Error in read loop: {e}')
self._connected = False
if not self.auto_reconnect:
break

View File

@ -1,18 +0,0 @@
import threading
import wrapt
from oslo_config import cfg
from aprsd.client.client import APRSDClient
from aprsd.utils import singleton
CONF = cfg.CONF
@singleton
class APRSClientStats:
lock = threading.Lock()
@wrapt.synchronized(lock)
def stats(self, serializable=False):
return APRSDClient().stats(serializable=serializable)

View File

@ -1,56 +1,36 @@
import datetime
import logging
import select
import socket
import threading
import aprslib
import wrapt
from aprslib import is_py3
from aprslib.exceptions import (
ConnectionDrop,
ConnectionError,
GenericError,
LoginError,
ParseError,
ConnectionDrop, ConnectionError, GenericError, LoginError, ParseError,
UnknownFormat,
)
import wrapt
import aprsd
from aprsd import stats
from aprsd.packets import core
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
class APRSLibClient(aprslib.IS):
"""Extend the aprslib class so we can exit properly.
This is a modified version of the aprslib.IS class that adds a stop method to
allow the client to exit cleanly.
The aprsis driver uses this class to connect to the APRS-IS server.
"""
class Aprsdis(aprslib.IS):
"""Extend the aprslib class so we can exit properly."""
# flag to tell us to stop
thread_stop = False
# date for last time we heard from the server
aprsd_keepalive = datetime.datetime.now()
# Which server we are connected to?
server_string = 'None'
# timeout in seconds
select_timeout = 1
lock = threading.Lock()
def stop(self):
self.thread_stop = True
LOG.warning('Shutdown Aprsdis client.')
def close(self):
LOG.warning('Closing Aprsdis client.')
super().close()
LOG.info("Shutdown Aprsdis client.")
@wrapt.synchronized(lock)
def send(self, packet: core.Packet):
@ -61,57 +41,6 @@ class APRSLibClient(aprslib.IS):
"""If the connection is alive or not."""
return self._connected
def _connect(self):
"""
Attemps connection to the server
"""
self.logger.info(
'Attempting connection to %s:%s', self.server[0], self.server[1]
)
try:
self._open_socket()
peer = self.sock.getpeername()
self.logger.info('Connected to %s', str(peer))
# 5 second timeout to receive server banner
self.sock.setblocking(1)
self.sock.settimeout(5)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# MACOS doesn't have TCP_KEEPIDLE
if hasattr(socket, 'TCP_KEEPIDLE'):
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
banner = self.sock.recv(512)
if is_py3:
banner = banner.decode('latin-1')
if banner[0] == '#':
self.logger.debug('Banner: %s', banner.rstrip())
else:
raise ConnectionError('invalid banner from server')
except ConnectionError as e:
self.logger.error(str(e))
self.close()
raise
except (socket.error, socket.timeout) as e:
self.close()
self.logger.error('Socket error: %s' % str(e))
if str(e) == 'timed out':
raise ConnectionError('no banner from server') from e
else:
raise ConnectionError(e) from e
self._connected = True
def _socket_readlines(self, blocking=False):
"""
Generator for complete lines, received from the server
@ -119,12 +48,12 @@ class APRSLibClient(aprslib.IS):
try:
self.sock.setblocking(0)
except OSError as e:
self.logger.error(f'socket error when setblocking(0): {str(e)}')
raise aprslib.ConnectionDrop('connection dropped') from e
self.logger.error(f"socket error when setblocking(0): {str(e)}")
raise aprslib.ConnectionDrop("connection dropped")
while not self.thread_stop:
short_buf = b''
newline = b'\r\n'
short_buf = b""
newline = b"\r\n"
# set a select timeout, so we get a chance to exit
# when user hits CTRL-C
@ -149,11 +78,11 @@ class APRSLibClient(aprslib.IS):
# We could just not be blocking, so empty is expected
continue
else:
self.logger.error('socket.recv(): returned empty')
raise aprslib.ConnectionDrop('connection dropped')
self.logger.error("socket.recv(): returned empty")
raise aprslib.ConnectionDrop("connection dropped")
except OSError as e:
# self.logger.error("socket error on recv(): %s" % str(e))
if 'Resource temporarily unavailable' in str(e):
if "Resource temporarily unavailable" in str(e):
if not blocking:
if len(self.buf) == 0:
break
@ -169,22 +98,23 @@ class APRSLibClient(aprslib.IS):
"""
Sends login string to server
"""
login_str = 'user {0} pass {1} vers Python-APRSD {3}{2}\r\n'
login_str = "user {0} pass {1} vers github.com/craigerl/aprsd {3}{2}\r\n"
login_str = login_str.format(
self.callsign,
self.passwd,
(' filter ' + self.filter) if self.filter != '' else '',
(" filter " + self.filter) if self.filter != "" else "",
aprsd.__version__,
)
self.logger.debug('Sending login information')
self.logger.info("Sending login information")
try:
self._sendall(login_str)
self.sock.settimeout(5)
test = self.sock.recv(len(login_str) + 100)
self.logger.debug("Server: '%s'", test)
if is_py3:
test = test.decode('latin-1')
test = test.decode("latin-1")
test = test.rstrip()
self.logger.debug("Server: '%s'", test)
@ -192,27 +122,28 @@ class APRSLibClient(aprslib.IS):
if not test:
raise LoginError(f"Server Response Empty: '{test}'")
_, _, callsign, status, e = test.split(' ', 4)
s = e.split(',')
_, _, callsign, status, e = test.split(" ", 4)
s = e.split(",")
if len(s):
server_string = s[0].replace('server ', '')
server_string = s[0].replace("server ", "")
else:
server_string = e.replace('server ', '')
server_string = e.replace("server ", "")
if callsign == '':
raise LoginError('Server responded with empty callsign???')
if callsign == "":
raise LoginError("Server responded with empty callsign???")
if callsign != self.callsign:
raise LoginError(f'Server: {test}')
if status != 'verified,' and self.passwd != '-1':
raise LoginError('Password is incorrect')
raise LoginError(f"Server: {test}")
if status != "verified," and self.passwd != "-1":
raise LoginError("Password is incorrect")
if self.passwd == '-1':
self.logger.info('Login successful (receive only)')
if self.passwd == "-1":
self.logger.info("Login successful (receive only)")
else:
self.logger.info('Login successful')
self.logger.info("Login successful")
self.logger.info(f'Connected to {server_string}')
self.logger.info(f"Connected to {server_string}")
self.server_string = server_string
stats.APRSDStats().set_aprsis_server(server_string)
except LoginError as e:
self.logger.error(str(e))
@ -222,7 +153,7 @@ class APRSLibClient(aprslib.IS):
self.close()
self.logger.error(f"Failed to login '{e}'")
self.logger.exception(e)
raise LoginError('Failed to login') from e
raise LoginError("Failed to login")
def consumer(self, callback, blocking=True, immortal=False, raw=False):
"""
@ -238,38 +169,37 @@ class APRSLibClient(aprslib.IS):
"""
if not self._connected:
raise ConnectionError('not connected to a server')
raise ConnectionError("not connected to a server")
line = b''
line = b""
while not self.thread_stop:
while True and not self.thread_stop:
try:
for line in self._socket_readlines(blocking):
if line[0:1] != b'#':
self.aprsd_keepalive = datetime.datetime.now()
if line[0:1] != b"#":
if raw:
callback(line)
else:
callback(self._parse(line))
else:
self.logger.debug('Server: %s', line.decode('utf8'))
self.aprsd_keepalive = datetime.datetime.now()
self.logger.debug("Server: %s", line.decode("utf8"))
stats.APRSDStats().set_aprsis_keepalive()
except ParseError as exp:
self.logger.log(
11,
"%s Packet: '%s'",
"%s\n Packet: %s",
exp,
exp.packet,
)
except UnknownFormat as exp:
self.logger.log(
9,
"%s Packet: '%s'",
"%s\n Packet: %s",
exp,
exp.packet,
)
except LoginError as exp:
self.logger.error('%s: %s', exp.__class__.__name__, exp)
self.logger.error("%s: %s", exp.__class__.__name__, exp)
except (KeyboardInterrupt, SystemExit):
raise
except (ConnectionDrop, ConnectionError):
@ -284,12 +214,8 @@ class APRSLibClient(aprslib.IS):
pass
except StopIteration:
break
except IOError:
if not self.thread_stop:
self.logger.error('IOError')
break
except Exception:
self.logger.error('APRS Packet: %s', line)
self.logger.error("APRS Packet: %s", line)
raise
if not blocking:

113
aprsd/clients/kiss.py Normal file
View File

@ -0,0 +1,113 @@
import logging
from ax253 import Frame
import kiss
from oslo_config import cfg
from aprsd import conf # noqa
from aprsd.packets import core
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class KISS3Client:
def __init__(self):
self.setup()
def is_alive(self):
return True
def setup(self):
# we can be TCP kiss or Serial kiss
if CONF.kiss_serial.enabled:
LOG.debug(
"KISS({}) Serial connection to {}".format(
kiss.__version__,
CONF.kiss_serial.device,
),
)
self.kiss = kiss.SerialKISS(
port=CONF.kiss_serial.device,
speed=CONF.kiss_serial.baudrate,
strip_df_start=True,
)
elif CONF.kiss_tcp.enabled:
LOG.debug(
"KISS({}) TCP Connection to {}:{}".format(
kiss.__version__,
CONF.kiss_tcp.host,
CONF.kiss_tcp.port,
),
)
self.kiss = kiss.TCPKISS(
host=CONF.kiss_tcp.host,
port=CONF.kiss_tcp.port,
strip_df_start=True,
)
LOG.debug("Starting KISS interface connection")
self.kiss.start()
@trace.trace
def stop(self):
try:
self.kiss.stop()
self.kiss.loop.call_soon_threadsafe(
self.kiss.protocol.transport.close,
)
except Exception as ex:
LOG.exception(ex)
def set_filter(self, filter):
# This does nothing right now.
pass
def parse_frame(self, frame_bytes):
try:
frame = Frame.from_bytes(frame_bytes)
# Now parse it with aprslib
kwargs = {
"frame": frame,
}
self._parse_callback(**kwargs)
except Exception as ex:
LOG.error("Failed to parse bytes received from KISS interface.")
LOG.exception(ex)
def consumer(self, callback, blocking=False, immortal=False, raw=False):
LOG.debug("Start blocking KISS consumer")
self._parse_callback = callback
self.kiss.read(callback=self.parse_frame, min_frames=None)
LOG.debug(f"END blocking KISS consumer {self.kiss}")
def send(self, packet):
"""Send an APRS Message object."""
payload = None
path = ["WIDE1-1", "WIDE2-1"]
if isinstance(packet, core.Packet):
packet.prepare()
payload = packet.payload.encode("US-ASCII")
else:
msg_payload = f"{packet.raw}{{{str(packet.msgNo)}"
payload = (
":{:<9}:{}".format(
packet.to_call,
msg_payload,
)
).encode("US-ASCII")
LOG.debug(
f"KISS Send '{payload}' TO '{packet.to_call}' From "
f"'{packet.from_call}' with PATH '{path}'",
)
frame = Frame.ui(
destination="APZ100",
source=packet.from_call,
path=path,
info=payload,
)
self.kiss.write(frame)

View File

@ -1,25 +1,36 @@
import click
import click.shell_completion
import click_completion
from aprsd.main import cli
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@cli.command()
@click.argument(
"shell", type=click.Choice(list(click.shell_completion._available_shells))
)
def completion(shell):
"""Show the shell completion code"""
from click.utils import _detect_program_name
@cli.group(help="Click Completion subcommands", context_settings=CONTEXT_SETTINGS)
@click.pass_context
def completion(ctx):
pass
cls = click.shell_completion.get_completion_class(shell)
prog_name = _detect_program_name()
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
print(cls(cli, {}, prog_name, complete_var).source())
print(
"# Add the following line to your shell configuration file to have aprsd command line completion"
)
print("# but remove the leading '#' character.")
print(f'# eval "$(aprsd completion {shell})"')
# show dumps out the completion code for a particular shell
@completion.command(help="Show completion code for shell", name="show")
@click.option("-i", "--case-insensitive/--no-case-insensitive", help="Case insensitive completion")
@click.argument("shell", required=False, type=click_completion.DocumentedChoice(click_completion.core.shells))
def show(shell, case_insensitive):
"""Show the click-completion-command completion code"""
extra_env = {"_CLICK_COMPLETION_COMMAND_CASE_INSENSITIVE_COMPLETE": "ON"} if case_insensitive else {}
click.echo(click_completion.core.get_code(shell, extra_env=extra_env))
# install will install the completion code for a particular shell
@completion.command(help="Install completion code for a shell", name="install")
@click.option("--append/--overwrite", help="Append the completion code to the file", default=None)
@click.option("-i", "--case-insensitive/--no-case-insensitive", help="Case insensitive completion")
@click.argument("shell", required=False, type=click_completion.DocumentedChoice(click_completion.core.shells))
@click.argument("path", required=False)
def install(append, case_insensitive, shell, path):
"""Install the click-completion-command completion"""
extra_env = {"_CLICK_COMPLETION_COMMAND_CASE_INSENSITIVE_COMPLETE": "ON"} if case_insensitive else {}
shell, path = click_completion.core.install(shell=shell, path=path, append=append, extra_env=extra_env)
click.echo(f"{shell} completion installed in {path}")

View File

@ -8,18 +8,18 @@ import logging
import click
from oslo_config import cfg
from aprsd import cli_helper, conf, packets, plugin
# local imports here
from aprsd import cli_helper, client, conf, packets, plugin
from aprsd.main import cli
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
LOG = logging.getLogger("APRSD")
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@cli.group(help='Development type subcommands', context_settings=CONTEXT_SETTINGS)
@cli.group(help="Development type subcommands", context_settings=CONTEXT_SETTINGS)
@click.pass_context
def dev(ctx):
pass
@ -28,37 +28,37 @@ def dev(ctx):
@dev.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'--aprs-login',
envvar='APRS_LOGIN',
"--aprs-login",
envvar="APRS_LOGIN",
show_envvar=True,
help='What callsign to send the message from.',
help="What callsign to send the message from.",
)
@click.option(
'-p',
'--plugin',
'plugin_path',
"-p",
"--plugin",
"plugin_path",
show_default=True,
default=None,
help='The plugin to run. Ex: aprsd.plugins.ping.PingPlugin',
help="The plugin to run. Ex: aprsd.plugins.ping.PingPlugin",
)
@click.option(
'-a',
'--all',
'load_all',
"-a",
"--all",
"load_all",
show_default=True,
is_flag=True,
default=False,
help='Load all the plugins in config?',
help="Load all the plugins in config?",
)
@click.option(
'-n',
'--num',
'number',
"-n",
"--num",
"number",
show_default=True,
default=1,
help='Number of times to call the plugin',
help="Number of times to call the plugin",
)
@click.argument('message', nargs=-1, required=True)
@click.argument("message", nargs=-1, required=True)
@click.pass_context
@cli_helper.process_standard_options
def test_plugin(
@ -75,7 +75,7 @@ def test_plugin(
if not aprs_login:
if CONF.aprs_network.login == conf.client.DEFAULT_LOGIN:
click.echo('Must set --aprs_login or APRS_LOGIN')
click.echo("Must set --aprs_login or APRS_LOGIN")
ctx.exit(-1)
return
else:
@ -85,32 +85,33 @@ def test_plugin(
if not plugin_path:
click.echo(ctx.get_help())
click.echo('')
click.echo('Failed to provide -p option to test a plugin')
click.echo("")
click.echo("Failed to provide -p option to test a plugin")
ctx.exit(-1)
return
if type(message) is tuple:
message = ' '.join(message)
message = " ".join(message)
if CONF.trace_enabled:
trace.setup_tracing(['method', 'api'])
trace.setup_tracing(["method", "api"])
client.Client()
pm = plugin.PluginManager()
if load_all:
pm.setup_plugins(load_help_plugin=CONF.load_help_plugin)
pm.setup_plugins()
obj = pm._create_class(plugin_path, plugin.APRSDPluginBase)
if not obj:
click.echo(ctx.get_help())
click.echo('')
click.echo("")
ctx.fail(f"Failed to create object from plugin path '{plugin_path}'")
ctx.exit()
# Register the plugin they wanted tested.
LOG.info(
'Testing plugin {} Version {}'.format(
obj.__class__,
obj.version,
"Testing plugin {} Version {}".format(
obj.__class__, obj.version,
),
)
pm.register_msg(obj)
@ -123,33 +124,9 @@ def test_plugin(
)
LOG.info(f"P'{plugin_path}' F'{fromcall}' C'{message}'")
for _ in range(number):
replies = pm.run(packet)
for x in range(number):
reply = pm.run(packet)
# Plugin might have threads, so lets stop them so we can exit.
# obj.stop_threads()
for reply in replies:
if isinstance(reply, list):
# one of the plugins wants to send multiple messages
for subreply in reply:
if isinstance(subreply, packets.Packet):
LOG.info(subreply)
else:
LOG.info(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=subreply,
),
)
elif isinstance(reply, packets.Packet):
# We have a message based object.
LOG.info(reply)
elif reply is not packets.NULL_MESSAGE:
LOG.info(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=reply,
),
)
LOG.info(f"Result{x} = '{reply}'")
pm.stop()

View File

@ -1,8 +1,9 @@
# Fetch active stats from a remote running instance of aprsd admin web interface.
# Fetch active stats from a remote running instance of aprsd server
# This uses the RPC server to fetch the stats from the remote server.
import logging
import click
import requests
from oslo_config import cfg
from rich.console import Console
from rich.table import Table
@ -11,298 +12,145 @@ from rich.table import Table
import aprsd
from aprsd import cli_helper
from aprsd.main import cli
from aprsd.threads.stats import StatsStore
from aprsd.rpc import client as rpc_client
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
CONF = cfg.CONF
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'--host',
type=str,
"--host", type=str,
default=None,
help='IP address of the remote aprsd admin web ui fetch stats from.',
help="IP address of the remote aprsd server to fetch stats from.",
)
@click.option(
'--port',
type=int,
"--port", type=int,
default=None,
help='Port of the remote aprsd web admin interface to fetch stats from.',
help="Port of the remote aprsd server rpc port to fetch stats from.",
)
@click.option(
"--magic-word", type=str,
default=None,
help="Magic word of the remote aprsd server rpc port to fetch stats from.",
)
@click.pass_context
@cli_helper.process_standard_options
def fetch_stats(ctx, host, port):
"""Fetch stats from a APRSD admin web interface."""
console = Console()
console.print(f'APRSD Fetch-Stats started version: {aprsd.__version__}')
def fetch_stats(ctx, host, port, magic_word):
"""Fetch stats from a remote running instance of aprsd server."""
LOG.info(f"APRSD Fetch-Stats started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG)
if not host:
host = CONF.admin.web_ip
host = CONF.rpc_settings.ip
if not port:
port = CONF.admin.web_port
port = CONF.rpc_settings.port
if not magic_word:
magic_word = CONF.rpc_settings.magic_word
msg = f'Fetching stats from {host}:{port}'
msg = f"Fetching stats from {host}:{port} with magic word '{magic_word}'"
console = Console()
console.print(msg)
with console.status(msg):
response = requests.get(f'http://{host}:{port}/stats', timeout=120)
if not response:
console.print(
f'Failed to fetch stats from {host}:{port}?',
style='bold red',
)
return
stats = response.json()
if not stats:
console.print(
f'Failed to fetch stats from aprsd admin ui at {host}:{port}',
style='bold red',
)
return
client = rpc_client.RPCClient(host, port, magic_word)
stats = client.get_stats_dict()
console.print_json(data=stats)
aprsd_title = (
'APRSD '
f'[bold cyan]v{stats["APRSDStats"]["version"]}[/] '
f'Callsign [bold green]{stats["APRSDStats"]["callsign"]}[/] '
f'Uptime [bold yellow]{stats["APRSDStats"]["uptime"]}[/]'
"APRSD "
f"[bold cyan]v{stats['aprsd']['version']}[/] "
f"Callsign [bold green]{stats['aprsd']['callsign']}[/] "
f"Uptime [bold yellow]{stats['aprsd']['uptime']}[/]"
)
console.rule(f'Stats from {host}:{port}')
console.print('\n\n')
console.rule(f"Stats from {host}:{port} with magic word '{magic_word}'")
console.print("\n\n")
console.rule(aprsd_title)
# Show the connection to APRS
# It can be a connection to an APRS-IS server or a local TNC via KISS or KISSTCP
if 'aprs-is' in stats:
title = f'APRS-IS Connection {stats["APRSClientStats"]["server_string"]}'
if "aprs-is" in stats:
title = f"APRS-IS Connection {stats['aprs-is']['server']}"
table = Table(title=title)
table.add_column('Key')
table.add_column('Value')
for key, value in stats['APRSClientStats'].items():
table.add_column("Key")
table.add_column("Value")
for key, value in stats["aprs-is"].items():
table.add_row(key, value)
console.print(table)
threads_table = Table(title='Threads')
threads_table.add_column('Name')
threads_table.add_column('Alive?')
for name, alive in stats['APRSDThreadList'].items():
threads_table = Table(title="Threads")
threads_table.add_column("Name")
threads_table.add_column("Alive?")
for name, alive in stats["aprsd"]["threads"].items():
threads_table.add_row(name, str(alive))
console.print(threads_table)
packet_totals = Table(title='Packet Totals')
packet_totals.add_column('Key')
packet_totals.add_column('Value')
packet_totals.add_row('Total Received', str(stats['PacketList']['rx']))
packet_totals.add_row('Total Sent', str(stats['PacketList']['tx']))
msgs_table = Table(title="Messages")
msgs_table.add_column("Key")
msgs_table.add_column("Value")
for key, value in stats["messages"].items():
msgs_table.add_row(key, str(value))
console.print(msgs_table)
packet_totals = Table(title="Packet Totals")
packet_totals.add_column("Key")
packet_totals.add_column("Value")
packet_totals.add_row("Total Received", str(stats["packets"]["total_received"]))
packet_totals.add_row("Total Sent", str(stats["packets"]["total_sent"]))
packet_totals.add_row("Total Tracked", str(stats["packets"]["total_tracked"]))
console.print(packet_totals)
# Show each of the packet types
packets_table = Table(title='Packets By Type')
packets_table.add_column('Packet Type')
packets_table.add_column('TX')
packets_table.add_column('RX')
for key, value in stats['PacketList']['packets'].items():
packets_table.add_row(key, str(value['tx']), str(value['rx']))
packets_table = Table(title="Packets By Type")
packets_table.add_column("Packet Type")
packets_table.add_column("TX")
packets_table.add_column("RX")
for key, value in stats["packets"]["by_type"].items():
packets_table.add_row(key, str(value["tx"]), str(value["rx"]))
console.print(packets_table)
if 'plugins' in stats:
count = len(stats['PluginManager'])
plugins_table = Table(title=f'Plugins ({count})')
plugins_table.add_column('Plugin')
plugins_table.add_column('Enabled')
plugins_table.add_column('Version')
plugins_table.add_column('TX')
plugins_table.add_column('RX')
plugins = stats['PluginManager']
for key, _ in plugins.items():
if "plugins" in stats:
count = len(stats["plugins"])
plugins_table = Table(title=f"Plugins ({count})")
plugins_table.add_column("Plugin")
plugins_table.add_column("Enabled")
plugins_table.add_column("Version")
plugins_table.add_column("TX")
plugins_table.add_column("RX")
for key, value in stats["plugins"].items():
plugins_table.add_row(
key,
str(plugins[key]['enabled']),
plugins[key]['version'],
str(plugins[key]['tx']),
str(plugins[key]['rx']),
str(stats["plugins"][key]["enabled"]),
stats["plugins"][key]["version"],
str(stats["plugins"][key]["tx"]),
str(stats["plugins"][key]["rx"]),
)
console.print(plugins_table)
if seen_list := stats.get('SeenList'):
count = len(seen_list)
seen_table = Table(title=f'Seen List ({count})')
seen_table.add_column('Callsign')
seen_table.add_column('Message Count')
seen_table.add_column('Last Heard')
for key, value in seen_list.items():
seen_table.add_row(key, str(value['count']), value['last'])
if "seen_list" in stats["aprsd"]:
count = len(stats["aprsd"]["seen_list"])
seen_table = Table(title=f"Seen List ({count})")
seen_table.add_column("Callsign")
seen_table.add_column("Message Count")
seen_table.add_column("Last Heard")
for key, value in stats["aprsd"]["seen_list"].items():
seen_table.add_row(key, str(value["count"]), value["last"])
console.print(seen_table)
if watch_list := stats.get('WatchList'):
count = len(watch_list)
watch_table = Table(title=f'Watch List ({count})')
watch_table.add_column('Callsign')
watch_table.add_column('Last Heard')
for key, value in watch_list.items():
watch_table.add_row(key, value['last'])
if "watch_list" in stats["aprsd"]:
count = len(stats["aprsd"]["watch_list"])
watch_table = Table(title=f"Watch List ({count})")
watch_table.add_column("Callsign")
watch_table.add_column("Last Heard")
for key, value in stats["aprsd"]["watch_list"].items():
watch_table.add_row(key, value["last"])
console.print(watch_table)
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'--raw',
is_flag=True,
default=False,
help='Dump raw stats instead of formatted output.',
)
@click.option(
'--show-section',
default=['All'],
help='Show specific sections of the stats. '
' Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,'
' PacketList, SeenList, WatchList',
multiple=True,
type=click.Choice(
[
'All',
'APRSDStats',
'APRSDThreadList',
'APRSClientStats',
'PacketList',
'SeenList',
'WatchList',
],
case_sensitive=False,
),
)
@click.pass_context
@cli_helper.process_standard_options
def dump_stats(ctx, raw, show_section):
"""Dump the current stats from the running APRSD instance."""
console = Console()
console.print(f'APRSD Dump-Stats started version: {aprsd.__version__}')
with console.status('Dumping stats'):
ss = StatsStore()
ss.load()
stats = ss.data
if raw:
if 'All' in show_section:
console.print(stats)
return
else:
for section in show_section:
console.print(f'Dumping {section} section:')
console.print(stats[section])
return
t = Table(title='APRSD Stats')
t.add_column('Key')
t.add_column('Value')
for key, value in stats['APRSDStats'].items():
t.add_row(key, str(value))
if 'All' in show_section or 'APRSDStats' in show_section:
console.print(t)
# Show the thread list
t = Table(title='Thread List')
t.add_column('Name')
t.add_column('Class')
t.add_column('Alive?')
t.add_column('Loop Count')
t.add_column('Age')
for name, value in stats['APRSDThreadList'].items():
t.add_row(
name,
value['class'],
str(value['alive']),
str(value['loop_count']),
str(value['age']),
)
if 'All' in show_section or 'APRSDThreadList' in show_section:
console.print(t)
# Show the plugins
t = Table(title='Plugin List')
t.add_column('Name')
t.add_column('Enabled')
t.add_column('Version')
t.add_column('TX')
t.add_column('RX')
for name, value in stats['PluginManager'].items():
t.add_row(
name,
str(value['enabled']),
value['version'],
str(value['tx']),
str(value['rx']),
)
if 'All' in show_section or 'PluginManager' in show_section:
console.print(t)
# Now show the client stats
t = Table(title='Client Stats')
t.add_column('Key')
t.add_column('Value')
for key, value in stats['APRSClientStats'].items():
t.add_row(key, str(value))
if 'All' in show_section or 'APRSClientStats' in show_section:
console.print(t)
# now show the packet list
packet_list = stats.get('PacketList')
t = Table(title='Packet List')
t.add_column('Key')
t.add_column('Value')
t.add_row('Total Received', str(packet_list['rx']))
t.add_row('Total Sent', str(packet_list['tx']))
if 'All' in show_section or 'PacketList' in show_section:
console.print(t)
# now show the seen list
seen_list = stats.get('SeenList')
sorted_seen_list = sorted(
seen_list.items(),
)
t = Table(title='Seen List')
t.add_column('Callsign')
t.add_column('Message Count')
t.add_column('Last Heard')
for key, value in sorted_seen_list:
t.add_row(
key,
str(value['count']),
str(value['last']),
)
if 'All' in show_section or 'SeenList' in show_section:
console.print(t)
# now show the watch list
watch_list = stats.get('WatchList')
sorted_watch_list = sorted(
watch_list.items(),
)
t = Table(title='Watch List')
t.add_column('Callsign')
t.add_column('Last Heard')
for key, value in sorted_watch_list:
t.add_row(
key,
str(value['last']),
)
if 'All' in show_section or 'WatchList' in show_section:
console.print(t)

View File

@ -13,14 +13,12 @@ from oslo_config import cfg
from rich.console import Console
import aprsd
from aprsd import ( # noqa: F401
cli_helper,
conf,
)
from aprsd import cli_helper, utils
from aprsd import conf # noqa
# local imports here
from aprsd.main import cli
from aprsd.threads import stats as stats_threads
from aprsd.rpc import client as aprsd_rpc_client
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
@ -41,48 +39,46 @@ console = Console()
@cli_helper.process_standard_options
def healthcheck(ctx, timeout):
"""Check the health of the running aprsd server."""
ver_str = f"APRSD HealthCheck version: {aprsd.__version__}"
console.log(ver_str)
console.log(f"APRSD HealthCheck version: {aprsd.__version__}")
if not CONF.rpc_settings.enabled:
LOG.error("Must enable rpc_settings.enabled to use healthcheck")
sys.exit(-1)
if not CONF.rpc_settings.ip:
LOG.error("Must enable rpc_settings.ip to use healthcheck")
sys.exit(-1)
if not CONF.rpc_settings.magic_word:
LOG.error("Must enable rpc_settings.magic_word to use healthcheck")
sys.exit(-1)
with console.status(ver_str):
with console.status(f"APRSD HealthCheck version: {aprsd.__version__}") as status:
try:
stats_obj = stats_threads.StatsStore()
stats_obj.load()
stats = stats_obj.data
# console.print(stats)
status.update(f"Contacting APRSD via RPC {CONF.rpc_settings.ip}")
stats = aprsd_rpc_client.RPCClient().get_stats_dict()
except Exception as ex:
console.log(f"Failed to load stats: '{ex}'")
console.log(f"Failed to fetch healthcheck : '{ex}'")
sys.exit(-1)
else:
now = datetime.datetime.now()
if not stats:
console.log("No stats from aprsd")
sys.exit(-1)
email_thread_last_update = stats["email"]["thread_last_update"]
email_stats = stats.get("EmailStats")
if email_stats:
email_thread_last_update = email_stats["last_check_time"]
if email_thread_last_update != "never":
d = now - email_thread_last_update
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 30}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
console.log(f"Email thread is very old! {d}")
sys.exit(-1)
client_stats = stats.get("APRSClientStats")
if not client_stats:
console.log("No APRSClientStats")
sys.exit(-1)
else:
aprsis_last_update = client_stats["connection_keepalive"]
d = now - aprsis_last_update
if email_thread_last_update != "never":
delta = utils.parse_delta_str(email_thread_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
console.log(f"Email thread is very old! {d}")
sys.exit(-1)
console.log("OK")
aprsis_last_update = stats["aprs-is"]["last_update"]
delta = utils.parse_delta_str(aprsis_last_update)
d = datetime.timedelta(**delta)
max_timeout = {"hours": 0.0, "minutes": 5, "seconds": 0}
max_delta = datetime.timedelta(**max_timeout)
if d > max_delta:
LOG.error(f"APRS-IS last update is very old! {d}")
sys.exit(-1)
sys.exit(0)

View File

@ -4,9 +4,12 @@ import inspect
import logging
import os
import pkgutil
import re
import sys
from traceback import print_tb
from urllib.parse import urljoin
from bs4 import BeautifulSoup
import click
import requests
from rich.console import Console
@ -17,14 +20,16 @@ from thesmuggler import smuggle
from aprsd import cli_helper
from aprsd import plugin as aprsd_plugin
from aprsd.main import cli
from aprsd.plugins import fortune, notify, ping, time, version, weather
from aprsd.plugins import (
email, fortune, location, notify, ping, query, time, version, weather,
)
LOG = logging.getLogger('APRSD')
PYPI_URL = 'https://pypi.org/search/'
LOG = logging.getLogger("APRSD")
def onerror(name):
print(f'Error importing module {name}')
print(f"Error importing module {name}")
type, value, traceback = sys.exc_info()
print_tb(traceback)
@ -40,19 +45,19 @@ def is_plugin(obj):
def plugin_type(obj):
for c in inspect.getmro(obj):
if issubclass(c, aprsd_plugin.APRSDRegexCommandPluginBase):
return 'RegexCommand'
return "RegexCommand"
if issubclass(c, aprsd_plugin.APRSDWatchListPluginBase):
return 'WatchList'
return "WatchList"
if issubclass(c, aprsd_plugin.APRSDPluginBase):
return 'APRSDPluginBase'
return "APRSDPluginBase"
return 'Unknown'
return "Unknown"
def walk_package(package):
return pkgutil.walk_packages(
package.__path__,
package.__name__ + '.',
package.__name__ + ".",
onerror=onerror,
)
@ -62,62 +67,44 @@ def get_module_info(package_name, module_name, module_path):
return None
dir_path = os.path.realpath(module_path)
pattern = '*.py'
pattern = "*.py"
obj_list = []
for path, _subdirs, files in os.walk(dir_path):
for name in files:
if fnmatch.fnmatch(name, pattern):
module = smuggle(f'{path}/{name}')
module = smuggle(f"{path}/{name}")
for mem_name, obj in inspect.getmembers(module):
if inspect.isclass(obj) and is_plugin(obj):
obj_list.append(
{
'package': package_name,
'name': mem_name,
'obj': obj,
'version': obj.version,
'path': f'{".".join([module_name, obj.__name__])}',
"package": package_name,
"name": mem_name, "obj": obj,
"version": obj.version,
"path": f"{'.'.join([module_name, obj.__name__])}",
},
)
return obj_list
def _get_installed_aprsd_items():
# installed plugins
plugins = {}
extensions = {}
for _finder, name, ispkg in pkgutil.iter_modules():
if ispkg and name.startswith('aprsd_'):
module = importlib.import_module(name)
pkgs = walk_package(module)
for pkg in pkgs:
pkg_info = get_module_info(
module.__name__, pkg.name, module.__path__[0]
)
if 'plugin' in name:
plugins[name] = pkg_info
elif 'extension' in name:
extensions[name] = pkg_info
return plugins, extensions
def get_installed_plugins():
# installed plugins
plugins, extensions = _get_installed_aprsd_items()
return plugins
def get_installed_extensions():
# installed plugins
plugins, extensions = _get_installed_aprsd_items()
return extensions
ip = {}
for finder, name, ispkg in pkgutil.iter_modules():
if name.startswith("aprsd_"):
if ispkg:
module = importlib.import_module(name)
pkgs = walk_package(module)
for pkg in pkgs:
pkg_info = get_module_info(module.__name__, pkg.name, module.__path__[0])
ip[name] = pkg_info
return ip
def show_built_in_plugins(console):
modules = [fortune, notify, ping, time, version, weather]
modules = [email, fortune, location, notify, ping, query, time, version, weather]
plugins = []
for module in modules:
@ -126,142 +113,88 @@ def show_built_in_plugins(console):
cls = entry[1]
if issubclass(cls, aprsd_plugin.APRSDPluginBase):
info = {
'name': cls.__qualname__,
'path': f'{cls.__module__}.{cls.__qualname__}',
'version': cls.version,
'docstring': cls.__doc__,
'short_desc': cls.short_description,
"name": cls.__qualname__,
"path": f"{cls.__module__}.{cls.__qualname__}",
"version": cls.version,
"docstring": cls.__doc__,
"short_desc": cls.short_description,
}
if issubclass(cls, aprsd_plugin.APRSDRegexCommandPluginBase):
info['command_regex'] = cls.command_regex
info['type'] = 'RegexCommand'
info["command_regex"] = cls.command_regex
info["type"] = "RegexCommand"
if issubclass(cls, aprsd_plugin.APRSDWatchListPluginBase):
info['type'] = 'WatchList'
info["type"] = "WatchList"
plugins.append(info)
plugins = sorted(plugins, key=lambda i: i['name'])
plugins = sorted(plugins, key=lambda i: i["name"])
table = Table(
title='[not italic]:snake:[/] [bold][magenta]APRSD Built-in Plugins [not italic]:snake:[/]',
title="[not italic]:snake:[/] [bold][magenta]APRSD Built-in Plugins [not italic]:snake:[/]",
)
table.add_column('Plugin Name', style='cyan', no_wrap=True)
table.add_column('Info', style='bold yellow')
table.add_column('Type', style='bold green')
table.add_column('Plugin Path', style='bold blue')
table.add_column("Plugin Name", style="cyan", no_wrap=True)
table.add_column("Info", style="bold yellow")
table.add_column("Type", style="bold green")
table.add_column("Plugin Path", style="bold blue")
for entry in plugins:
table.add_row(entry['name'], entry['short_desc'], entry['type'], entry['path'])
table.add_row(entry["name"], entry["short_desc"], entry["type"], entry["path"])
console.print(table)
def _get_pypi_packages():
if simple_r := requests.get(
'https://pypi.org/simple',
headers={'Accept': 'application/vnd.pypi.simple.v1+json'},
):
simple_response = simple_r.json()
else:
simple_response = {}
key = 'aprsd'
matches = [
p['name'] for p in simple_response['projects'] if p['name'].startswith(key)
]
packages = []
for pkg in matches:
# Get info for first match
if r := requests.get(
f'https://pypi.org/pypi/{pkg}/json',
headers={'Accept': 'application/json'},
):
packages.append(r.json())
return packages
def show_pypi_plugins(installed_plugins, console):
packages = _get_pypi_packages()
query = "aprsd"
api_url = "https://pypi.org/search/"
snippets = []
s = requests.Session()
for page in range(1, 3):
params = {"q": query, "page": page}
r = s.get(api_url, params=params)
soup = BeautifulSoup(r.text, "html.parser")
snippets += soup.select('a[class*="snippet"]')
if not hasattr(s, "start_url"):
s.start_url = r.url.rsplit("&page", maxsplit=1).pop(0)
title = Text.assemble(
('Pypi.org APRSD Installable Plugin Packages\n\n', 'bold magenta'),
('Install any of the following plugins with\n', 'bold yellow'),
("'pip install ", 'bold white'),
("<Plugin Package Name>'", 'cyan'),
("Pypi.org APRSD Installable Plugin Packages\n\n", "bold magenta"),
("Install any of the following plugins with ", "bold yellow"),
("'pip install ", "bold white"),
("<Plugin Package Name>'", "cyan"),
)
table = Table(title=title)
table.add_column('Plugin Package Name', style='cyan', no_wrap=True)
table.add_column('Description', style='yellow')
table.add_column('Version', style='yellow', justify='center')
table.add_column('Released', style='bold green', justify='center')
table.add_column('Installed?', style='red', justify='center')
emoji = ':open_file_folder:'
for package in packages:
link = package['info']['package_url']
version = package['info']['version']
package_name = package['info']['name']
description = package['info']['summary']
created = package['releases'][version][0]['upload_time']
table.add_column("Plugin Package Name", style="cyan", no_wrap=True)
table.add_column("Description", style="yellow")
table.add_column("Version", style="yellow", justify="center")
table.add_column("Released", style="bold green", justify="center")
table.add_column("Installed?", style="red", justify="center")
for snippet in snippets:
link = urljoin(api_url, snippet.get("href"))
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
emoji = ":open_file_folder:"
if 'aprsd-' not in package_name or '-plugin' not in package_name:
if "aprsd-" not in package or "-plugin" not in package:
continue
under = package_name.replace('-', '_')
installed = 'Yes' if under in installed_plugins else 'No'
under = package.replace("-", "_")
if under in installed_plugins:
installed = "Yes"
else:
installed = "No"
table.add_row(
f'[link={link}]{emoji}[/link] {package_name}',
description,
version,
created,
installed,
f"[link={link}]{emoji}[/link] {package}",
description, version, created, installed,
)
console.print('\n')
console.print(table)
def show_pypi_extensions(installed_extensions, console):
packages = _get_pypi_packages()
title = Text.assemble(
('Pypi.org APRSD Installable Extension Packages\n\n', 'bold magenta'),
('Install any of the following extensions by running\n', 'bold yellow'),
("'pip install ", 'bold white'),
("<Plugin Package Name>'", 'cyan'),
)
table = Table(title=title)
table.add_column('Extension Package Name', style='cyan', no_wrap=True)
table.add_column('Description', style='yellow')
table.add_column('Version', style='yellow', justify='center')
table.add_column('Released', style='bold green', justify='center')
table.add_column('Installed?', style='red', justify='center')
emoji = ':open_file_folder:'
for package in packages:
link = package['info']['package_url']
version = package['info']['version']
package_name = package['info']['name']
description = package['info']['summary']
created = package['releases'][version][0]['upload_time']
if 'aprsd-' not in package_name or '-extension' not in package_name:
continue
under = package_name.replace('-', '_')
installed = 'Yes' if under in installed_extensions else 'No'
table.add_row(
f'[link={link}]{emoji}[/link] {package_name}',
description,
version,
created,
installed,
)
console.print('\n')
console.print("\n")
console.print(table)
return
def show_installed_plugins(installed_plugins, console):
@ -269,24 +202,24 @@ def show_installed_plugins(installed_plugins, console):
return
table = Table(
title='[not italic]:snake:[/] [bold][magenta]APRSD Installed 3rd party Plugins [not italic]:snake:[/]',
title="[not italic]:snake:[/] [bold][magenta]APRSD Installed 3rd party Plugins [not italic]:snake:[/]",
)
table.add_column('Package Name', style=' bold white', no_wrap=True)
table.add_column('Plugin Name', style='cyan', no_wrap=True)
table.add_column('Version', style='yellow', justify='center')
table.add_column('Type', style='bold green')
table.add_column('Plugin Path', style='bold blue')
table.add_column("Package Name", style=" bold white", no_wrap=True)
table.add_column("Plugin Name", style="cyan", no_wrap=True)
table.add_column("Version", style="yellow", justify="center")
table.add_column("Type", style="bold green")
table.add_column("Plugin Path", style="bold blue")
for name in installed_plugins:
for plugin in installed_plugins[name]:
table.add_row(
name.replace('_', '-'),
plugin['name'],
plugin['version'],
plugin_type(plugin['obj']),
plugin['path'],
name.replace("_", "-"),
plugin["name"],
plugin["version"],
plugin_type(plugin["obj"]),
plugin["path"],
)
console.print('\n')
console.print("\n")
console.print(table)
@ -298,28 +231,12 @@ def list_plugins(ctx):
"""List the built in plugins available to APRSD."""
console = Console()
with console.status('Show Built-in Plugins') as status:
with console.status("Show Built-in Plugins") as status:
show_built_in_plugins(console)
status.update('Fetching pypi.org plugins')
status.update("Fetching pypi.org plugins")
installed_plugins = get_installed_plugins()
show_pypi_plugins(installed_plugins, console)
status.update('Looking for installed APRSD plugins')
status.update("Looking for installed APRSD plugins")
show_installed_plugins(installed_plugins, console)
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.pass_context
@cli_helper.process_standard_options_no_config
def list_extensions(ctx):
"""List the built in plugins available to APRSD."""
console = Console()
with console.status('Show APRSD Extensions') as status:
status.update('Fetching pypi.org APRSD Extensions')
status.update('Looking for installed APRSD Extensions')
installed_extensions = get_installed_extensions()
show_pypi_extensions(installed_extensions, console)

View File

@ -10,176 +10,114 @@ import sys
import time
import click
from loguru import logger
from oslo_config import cfg
from rich.console import Console
# local imports here
import aprsd
from aprsd import cli_helper, packets, plugin, threads, utils
from aprsd.client.client import APRSDClient
from aprsd import cli_helper, client, packets, plugin, stats, threads
from aprsd.main import cli
from aprsd.packets import collector as packet_collector
from aprsd.packets import core, seen_list
from aprsd.packets import log as packet_log
from aprsd.packets.filter import PacketFilter
from aprsd.packets.filters import dupe_filter, packet_type
from aprsd.stats import collector
from aprsd.threads import keepalive, rx
from aprsd.threads import stats as stats_thread
from aprsd.threads.aprsd import APRSDThread
from aprsd.rpc import server as rpc_server
from aprsd.threads import rx
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
CONF = cfg.CONF
LOGU = logger
console = Console()
def signal_handler(sig, frame):
threads.APRSDThreadList().stop_all()
if 'subprocess' not in str(frame):
if "subprocess" not in str(frame):
LOG.info(
'Ctrl+C, Sending all threads exit! Can take up to 10 seconds {}'.format(
"Ctrl+C, Sending all threads exit! Can take up to 10 seconds {}".format(
datetime.datetime.now(),
),
)
time.sleep(5)
# Last save to disk
collector.Collector().collect()
LOG.info(stats.APRSDStats())
class APRSDListenProcessThread(rx.APRSDFilterThread):
def __init__(
self,
packet_queue,
packet_filter=None,
plugin_manager=None,
enabled_plugins=None,
log_packets=False,
):
super().__init__('ListenProcThread', packet_queue)
class APRSDListenThread(rx.APRSDRXThread):
def __init__(self, packet_queue, packet_filter=None, plugin_manager=None):
super().__init__(packet_queue)
self.packet_filter = packet_filter
self.plugin_manager = plugin_manager
if self.plugin_manager:
LOG.info(f'Plugins {self.plugin_manager.get_message_plugins()}')
self.log_packets = log_packets
LOG.info(f"Plugins {self.plugin_manager.get_message_plugins()}")
def print_packet(self, packet):
if self.log_packets:
packet_log.log(packet)
def process_packet(self, *args, **kwargs):
packet = self._client.decode_packet(*args, **kwargs)
filters = {
packets.Packet.__name__: packets.Packet,
packets.AckPacket.__name__: packets.AckPacket,
packets.GPSPacket.__name__: packets.GPSPacket,
packets.MessagePacket.__name__: packets.MessagePacket,
packets.MicEPacket.__name__: packets.MicEPacket,
packets.WeatherPacket.__name__: packets.WeatherPacket,
}
def process_packet(self, packet: type[core.Packet]):
if self.plugin_manager:
# Don't do anything with the reply.
# This is the listen only command.
self.plugin_manager.run(packet)
if self.packet_filter:
filter_class = filters[self.packet_filter]
if isinstance(packet, filter_class):
packet.log(header="RX")
if self.plugin_manager:
# Don't do anything with the reply
# This is the listen only command.
self.plugin_manager.run(packet)
else:
if self.plugin_manager:
# Don't do anything with the reply.
# This is the listen only command.
self.plugin_manager.run(packet)
else:
packet.log(header="RX")
class ListenStatsThread(APRSDThread):
"""Log the stats from the PacketList."""
def __init__(self):
super().__init__('PacketStatsLog')
self._last_total_rx = 0
self.period = 31
def loop(self):
if self.loop_count % self.period == 0:
# log the stats every 10 seconds
stats_json = collector.Collector().collect()
stats = stats_json['PacketList']
total_rx = stats['rx']
packet_count = len(stats['packets'])
rx_delta = total_rx - self._last_total_rx
rate = rx_delta / self.period
# Log summary stats
LOGU.opt(colors=True).info(
f'<green>RX Rate: {rate:.2f} pps</green> '
f'<yellow>Total RX: {total_rx}</yellow> '
f'<red>RX Last {self.period} secs: {rx_delta}</red> '
f'<white>Packets in PacketListStats: {packet_count}</white>',
)
self._last_total_rx = total_rx
# Log individual type stats
for k, v in stats['types'].items():
thread_hex = f'fg {utils.hex_from_name(k)}'
LOGU.opt(colors=True).info(
f'<{thread_hex}>{k:<15}</{thread_hex}> '
f'<blue>RX: {v["rx"]}</blue> <red>TX: {v["tx"]}</red>',
)
time.sleep(1)
return True
packets.PacketList().rx(packet)
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'--aprs-login',
envvar='APRS_LOGIN',
"--aprs-login",
envvar="APRS_LOGIN",
show_envvar=True,
help='What callsign to send the message from.',
help="What callsign to send the message from.",
)
@click.option(
'--aprs-password',
envvar='APRS_PASSWORD',
"--aprs-password",
envvar="APRS_PASSWORD",
show_envvar=True,
help='the APRS-IS password for APRS_LOGIN',
help="the APRS-IS password for APRS_LOGIN",
)
@click.option(
'--packet-filter',
"--packet-filter",
type=click.Choice(
[
packets.Packet.__name__,
packets.AckPacket.__name__,
packets.BeaconPacket.__name__,
packets.GPSPacket.__name__,
packets.MicEPacket.__name__,
packets.MessagePacket.__name__,
packets.ObjectPacket.__name__,
packets.RejectPacket.__name__,
packets.StatusPacket.__name__,
packets.ThirdPartyPacket.__name__,
packets.UnknownPacket.__name__,
packets.WeatherPacket.__name__,
],
case_sensitive=False,
),
multiple=True,
default=[],
help='Filter by packet type',
help="Filter by packet type",
)
@click.option(
'--enable-plugin',
multiple=True,
help='Enable a plugin. This is the name of the file in the plugins directory.',
)
@click.option(
'--load-plugins',
"--load-plugins",
default=False,
is_flag=True,
help='Load plugins as enabled in aprsd.conf ?',
help="Load plugins as enabled in aprsd.conf ?",
)
@click.argument(
'filter',
"filter",
nargs=-1,
required=True,
)
@click.option(
'--log-packets',
default=False,
is_flag=True,
help='Log incoming packets.',
)
@click.option(
'--enable-packet-stats',
default=False,
is_flag=True,
help='Enable packet stats periodic logging.',
)
@click.pass_context
@cli_helper.process_standard_options
def listen(
@ -187,11 +125,8 @@ def listen(
aprs_login,
aprs_password,
packet_filter,
enable_plugin,
load_plugins,
filter,
log_packets,
enable_packet_stats,
):
"""Listen to packets on the APRS-IS Network based on FILTER.
@ -208,108 +143,72 @@ def listen(
if not aprs_login:
click.echo(ctx.get_help())
click.echo('')
ctx.fail('Must set --aprs-login or APRS_LOGIN')
click.echo("")
ctx.fail("Must set --aprs_login or APRS_LOGIN")
ctx.exit()
if not aprs_password:
click.echo(ctx.get_help())
click.echo('')
ctx.fail('Must set --aprs-password or APRS_PASSWORD')
click.echo("")
ctx.fail("Must set --aprs-password or APRS_PASSWORD")
ctx.exit()
# CONF.aprs_network.login = aprs_login
# config["aprs"]["password"] = aprs_password
LOG.info(f'APRSD Listen Started version: {aprsd.__version__}')
LOG.info(f"APRSD Listen Started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG)
collector.Collector()
# Try and load saved MsgTrack list
LOG.debug('Loading saved MsgTrack object.')
LOG.debug("Loading saved MsgTrack object.")
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
# Make sure we have 1 client transport enabled
if not APRSDClient().is_enabled:
LOG.error('No Clients are enabled in config.')
if not client.factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
# Creates the client object
LOG.info('Creating client connection')
aprs_client = APRSDClient()
LOG.info("Creating client connection")
aprs_client = client.factory.create()
LOG.info(aprs_client)
if not aprs_client.login_success:
# We failed to login, will just quit!
msg = f'Login Failure: {aprs_client.login_failure}'
LOG.error(msg)
print(msg)
sys.exit(-1)
LOG.debug(f"Filter messages on aprsis server by '{filter}'")
LOG.debug(f"Filter by '{filter}'")
aprs_client.set_filter(filter)
keepalive_thread = keepalive.KeepAliveThread()
keepalive = threads.KeepAliveThread()
keepalive.start()
if not CONF.enable_seen_list:
# just deregister the class from the packet collector
packet_collector.PacketCollector().unregister(seen_list.SeenList)
# we don't want the dupe filter to run here.
PacketFilter().unregister(dupe_filter.DupePacketFilter)
if packet_filter:
LOG.info('Enabling packet filtering for {packet_filter}')
packet_type.PacketTypeFilter().set_allow_list(packet_filter)
PacketFilter().register(packet_type.PacketTypeFilter)
else:
LOG.info('No packet filtering enabled.')
if CONF.rpc_settings.enabled:
rpc = rpc_server.APRSDRPCThread()
rpc.start()
pm = None
pm = plugin.PluginManager()
if load_plugins:
pm = plugin.PluginManager()
LOG.info('Loading plugins')
LOG.info("Loading plugins")
pm.setup_plugins(load_help_plugin=False)
elif enable_plugin:
pm = plugin.PluginManager()
pm.setup_plugins(
load_help_plugin=False,
plugin_list=enable_plugin,
)
else:
LOG.warning(
"Not Loading any plugins use --load-plugins to load what's "
'defined in the config file.',
"defined in the config file.",
)
if pm:
for p in pm.get_plugins():
LOG.info('Loaded plugin %s', p.__class__.__name__)
stats = stats_thread.APRSDStatsStoreThread()
stats.start()
LOG.debug('Start APRSDRxThread')
rx_thread = rx.APRSDRXThread(packet_queue=threads.packet_queue)
rx_thread.start()
LOG.debug('Create APRSDListenProcessThread')
listen_thread = APRSDListenProcessThread(
LOG.debug("Create APRSDListenThread")
listen_thread = APRSDListenThread(
packet_queue=threads.packet_queue,
packet_filter=packet_filter,
plugin_manager=pm,
enabled_plugins=enable_plugin,
log_packets=log_packets,
)
LOG.debug('Start APRSDListenProcessThread')
LOG.debug("Start APRSDListenThread")
listen_thread.start()
if enable_packet_stats:
listen_stats = ListenStatsThread()
listen_stats.start()
keepalive_thread.start()
LOG.debug('keepalive Join')
keepalive_thread.join()
rx_thread.join()
LOG.debug("keepalive Join")
keepalive.join()
LOG.debug("listen_thread Join")
listen_thread.join()
stats.join()
if CONF.rpc_settings.enabled:
rpc.join()

View File

@ -3,60 +3,54 @@ import sys
import time
import aprslib
import click
from aprslib.exceptions import LoginError
import click
from oslo_config import cfg
import aprsd
import aprsd.packets # noqa : F401
from aprsd import (
cli_helper,
conf, # noqa : F401
packets,
)
from aprsd.client.client import APRSDClient
from aprsd import cli_helper, client, packets
from aprsd import conf # noqa : F401
from aprsd.main import cli
from aprsd.packets import collector
from aprsd.packets import log as packet_log
from aprsd.threads import tx
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'--aprs-login',
envvar='APRS_LOGIN',
"--aprs-login",
envvar="APRS_LOGIN",
show_envvar=True,
help='What callsign to send the message from. Defaults to config entry.',
help="What callsign to send the message from. Defaults to config entry.",
)
@click.option(
'--aprs-password',
envvar='APRS_PASSWORD',
"--aprs-password",
envvar="APRS_PASSWORD",
show_envvar=True,
help='the APRS-IS password for APRS_LOGIN. Defaults to config entry.',
help="the APRS-IS password for APRS_LOGIN. Defaults to config entry.",
)
@click.option(
'--no-ack',
'-n',
"--no-ack",
"-n",
is_flag=True,
show_default=True,
default=False,
help="Don't wait for an ack, just sent it to APRS-IS and bail.",
)
@click.option(
'--wait-response',
'-w',
"--wait-response",
"-w",
is_flag=True,
show_default=True,
default=False,
help='Wait for a response to the message?',
help="Wait for a response to the message?",
)
@click.option('--raw', default=None, help='Send a raw message. Implies --no-ack')
@click.argument('tocallsign', required=True)
@click.argument('command', nargs=-1, required=True)
@click.option("--raw", default=None, help="Send a raw message. Implies --no-ack")
@click.argument("tocallsign", required=True)
@click.argument("command", nargs=-1, required=True)
@click.pass_context
@cli_helper.process_standard_options
def send_message(
@ -71,42 +65,47 @@ def send_message(
):
"""Send a message to a callsign via APRS_IS."""
global got_ack, got_response
quiet = ctx.obj['quiet']
quiet = ctx.obj["quiet"]
if not aprs_login:
if CONF.aprs_network.login == conf.client.DEFAULT_LOGIN:
click.echo('Must set --aprs_login or APRS_LOGIN')
click.echo("Must set --aprs_login or APRS_LOGIN")
ctx.exit(-1)
return
else:
aprs_login = CONF.aprs_network.login
if not aprs_password:
LOG.warning(CONF.aprs_network.password)
if not CONF.aprs_network.password:
click.echo('Must set --aprs-password or APRS_PASSWORD')
click.echo("Must set --aprs-password or APRS_PASSWORD")
ctx.exit(-1)
return
else:
aprs_password = CONF.aprs_network.password
LOG.info(f'APRSD LISTEN Started version: {aprsd.__version__}')
LOG.info(f"APRSD LISTEN Started version: {aprsd.__version__}")
if type(command) is tuple:
command = ' '.join(command)
command = " ".join(command)
if not quiet:
if raw:
LOG.info(f"L'{aprs_login}' R'{raw}'")
else:
LOG.info(f"L'{aprs_login}' To'{tocallsign}' C'{command}'")
packets.PacketList()
packets.WatchList()
packets.SeenList()
got_ack = False
got_response = False
def rx_packet(packet):
global got_ack, got_response
cl = APRSDClient()
cl = client.factory.create()
packet = cl.decode_packet(packet)
collector.PacketCollector().rx(packet)
packet_log.log(packet, tx=False)
packets.PacketList().rx(packet)
packet.log("RX")
# LOG.debug("Got packet back {}".format(packet))
if isinstance(packet, packets.AckPacket):
got_ack = True
@ -131,7 +130,8 @@ def send_message(
sys.exit(0)
try:
APRSDClient().client # noqa: B018
client.ClientFactory.setup()
client.factory.create().client
except LoginError:
sys.exit(-1)
@ -142,7 +142,7 @@ def send_message(
# message
if raw:
tx.send(
packets.Packet(from_call='', to_call='', raw=raw),
packets.Packet(from_call="", to_call="", raw=raw),
direct=True,
)
sys.exit(0)
@ -163,10 +163,10 @@ def send_message(
# This will register a packet consumer with aprslib
# When new packets come in the consumer will process
# the packet
aprs_client = APRSDClient()
aprs_client = client.factory.create().client
aprs_client.consumer(rx_packet, raw=False)
except aprslib.exceptions.ConnectionDrop:
LOG.error('Connection dropped, reconnecting')
LOG.error("Connection dropped, reconnecting")
time.sleep(5)
# Force the deletion of the client object connected to aprs
# This will cause a reconnect, next time client.get_client()

View File

@ -6,30 +6,29 @@ import click
from oslo_config import cfg
import aprsd
from aprsd import cli_helper, plugin, threads, utils
from aprsd import cli_helper, client
from aprsd import main as aprsd_main
from aprsd.client.client import APRSDClient
from aprsd import packets, plugin, threads, utils
from aprsd.main import cli
from aprsd.packets import collector as packet_collector
from aprsd.packets import seen_list
from aprsd.threads import keepalive, registry, rx, service, tx
from aprsd.threads import stats as stats_thread
from aprsd.rpc import server as rpc_server
from aprsd.threads import rx
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
# main() ###
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
'-f',
'--flush',
'flush',
"-f",
"--flush",
"flush",
is_flag=True,
show_default=True,
default=False,
help='Flush out all old aged messages on disk.',
help="Flush out all old aged messages on disk.",
)
@click.pass_context
@cli_helper.process_standard_options
@ -38,93 +37,84 @@ def server(ctx, flush):
signal.signal(signal.SIGINT, aprsd_main.signal_handler)
signal.signal(signal.SIGTERM, aprsd_main.signal_handler)
service_threads = service.ServiceThreads()
level, msg = utils._check_version()
if level:
LOG.warning(msg)
else:
LOG.info(msg)
LOG.info(f'APRSD Started version: {aprsd.__version__}')
LOG.info(f"APRSD Started version: {aprsd.__version__}")
# Make sure we have 1 client transport enabled
if not APRSDClient().is_enabled:
LOG.error('No Clients are enabled in config.')
sys.exit(-1)
if not APRSDClient().is_configured:
LOG.error('APRS client is not properly configured in config file.')
sys.exit(-1)
# Creates the client object
LOG.info('Creating client connection')
aprs_client = APRSDClient()
LOG.info(aprs_client)
if not aprs_client.login_success:
# We failed to login, will just quit!
msg = f'Login Failure: {aprs_client.login_failure}'
LOG.error(msg)
print(msg)
sys.exit(-1)
# Check to make sure the login worked.
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
# Create the initial PM singleton and Register plugins
# We register plugins first here so we can register each
# plugins config options, so we can dump them all in the
# log file output.
LOG.info('Loading Plugin Manager and registering plugins')
LOG.info("Loading Plugin Manager and registering plugins")
plugin_manager = plugin.PluginManager()
plugin_manager.setup_plugins(load_help_plugin=CONF.load_help_plugin)
plugin_manager.setup_plugins()
# Dump all the config options now.
CONF.log_opt_values(LOG, logging.DEBUG)
message_plugins = plugin_manager.get_message_plugins()
watchlist_plugins = plugin_manager.get_watchlist_plugins()
LOG.info('Message Plugins enabled and running:')
LOG.info("Message Plugins enabled and running:")
for p in message_plugins:
LOG.info(p)
LOG.info('Watchlist Plugins enabled and running:')
LOG.info("Watchlist Plugins enabled and running:")
for p in watchlist_plugins:
LOG.info(p)
if not CONF.enable_seen_list:
# just deregister the class from the packet collector
packet_collector.PacketCollector().unregister(seen_list.SeenList)
# Make sure we have 1 client transport enabled
if not client.factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
if not client.factory.is_client_configured():
LOG.error("APRS client is not properly configured in config file.")
sys.exit(-1)
# Creates the client object
# LOG.info("Creating client connection")
# client.factory.create().client
# Now load the msgTrack from disk if any
packets.PacketList()
if flush:
LOG.debug('Flushing All packet tracking objects.')
packet_collector.PacketCollector().flush()
LOG.debug("Deleting saved MsgTrack.")
packets.PacketTrack().flush()
packets.WatchList().flush()
packets.SeenList().flush()
else:
# Try and load saved MsgTrack list
LOG.debug('Loading saved packet tracking data.')
packet_collector.PacketCollector().load()
LOG.debug("Loading saved MsgTrack object.")
packets.PacketTrack().load()
packets.WatchList().load()
packets.SeenList().load()
# Now start all the main processing threads.
keepalive = threads.KeepAliveThread()
keepalive.start()
service_threads.register(keepalive.KeepAliveThread())
service_threads.register(stats_thread.APRSDStatsStoreThread())
service_threads.register(
rx.APRSDRXThread(
packet_queue=threads.packet_queue,
),
rx_thread = rx.APRSDPluginRXThread(
packet_queue=threads.packet_queue,
)
service_threads.register(
rx.APRSDPluginProcessPacketThread(
packet_queue=threads.packet_queue,
),
process_thread = rx.APRSDPluginProcessPacketThread(
packet_queue=threads.packet_queue,
)
rx_thread.start()
process_thread.start()
if CONF.enable_beacon:
LOG.info('Beacon Enabled. Starting Beacon thread.')
service_threads.register(tx.BeaconSendThread())
packets.PacketTrack().restart()
if CONF.aprs_registry.enabled:
LOG.info('Registry Enabled. Starting Registry thread.')
service_threads.register(registry.APRSRegistryThread())
if CONF.rpc_settings.enabled:
rpc = rpc_server.APRSDRPCThread()
rpc.start()
log_monitor = threads.log_monitor.LogMonitorThread()
log_monitor.start()
service_threads.start()
service_threads.join()
rx_thread.join()
process_thread.join()
return 0

465
aprsd/cmds/webchat.py Normal file
View File

@ -0,0 +1,465 @@
import datetime
import json
import logging
from logging.handlers import RotatingFileHandler
import signal
import sys
import threading
import time
from aprslib import util as aprslib_util
import click
import flask
from flask import request
from flask.logging import default_handler
from flask_httpauth import HTTPBasicAuth
from flask_socketio import Namespace, SocketIO
from oslo_config import cfg
from werkzeug.security import check_password_hash, generate_password_hash
import wrapt
import aprsd
from aprsd import cli_helper, client, conf, packets, stats, threads, utils
from aprsd.log import rich as aprsd_logging
from aprsd.main import cli
from aprsd.threads import rx, tx
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
auth = HTTPBasicAuth()
users = {}
socketio = None
flask_app = flask.Flask(
"aprsd",
static_url_path="/static",
static_folder="web/chat/static",
template_folder="web/chat/templates",
)
def signal_handler(sig, frame):
click.echo("signal_handler: called")
LOG.info(
f"Ctrl+C, Sending all threads({len(threads.APRSDThreadList())}) exit! "
f"Can take up to 10 seconds {datetime.datetime.now()}",
)
threads.APRSDThreadList().stop_all()
if "subprocess" not in str(frame):
time.sleep(1.5)
# packets.WatchList().save()
# packets.SeenList().save()
LOG.info(stats.APRSDStats())
LOG.info("Telling flask to bail.")
signal.signal(signal.SIGTERM, sys.exit(0))
class SentMessages:
_instance = None
lock = threading.Lock()
data = {}
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def is_initialized(self):
return True
@wrapt.synchronized(lock)
def add(self, msg):
self.data[msg.msgNo] = msg.__dict__
@wrapt.synchronized(lock)
def __len__(self):
return len(self.data.keys())
@wrapt.synchronized(lock)
def get(self, id):
if id in self.data:
return self.data[id]
@wrapt.synchronized(lock)
def get_all(self):
return self.data
@wrapt.synchronized(lock)
def set_status(self, id, status):
if id in self.data:
self.data[id]["last_update"] = str(datetime.datetime.now())
self.data[id]["status"] = status
@wrapt.synchronized(lock)
def ack(self, id):
"""The message got an ack!"""
if id in self.data:
self.data[id]["last_update"] = str(datetime.datetime.now())
self.data[id]["ack"] = True
@wrapt.synchronized(lock)
def reply(self, id, packet):
"""We got a packet back from the sent message."""
if id in self.data:
self.data[id]["reply"] = packet
# HTTPBasicAuth doesn't work on a class method.
# This has to be out here. Rely on the APRSDFlask
# class to initialize the users from the config
@auth.verify_password
def verify_password(username, password):
global users
if username in users and check_password_hash(users[username], password):
return username
class WebChatProcessPacketThread(rx.APRSDProcessPacketThread):
"""Class that handles packets being sent to us."""
def __init__(self, packet_queue, socketio):
self.socketio = socketio
self.connected = False
super().__init__(packet_queue)
def process_ack_packet(self, packet: packets.AckPacket):
super().process_ack_packet(packet)
ack_num = packet.get("msgNo")
SentMessages().ack(int(ack_num))
self.socketio.emit(
"ack", SentMessages().get(int(ack_num)),
namespace="/sendmsg",
)
self.got_ack = True
def process_our_message_packet(self, packet: packets.MessagePacket):
LOG.info(f"process MessagePacket {repr(packet)}")
self.socketio.emit(
"new", packet.__dict__,
namespace="/sendmsg",
)
def set_config():
global users
def _get_transport(stats):
if CONF.aprs_network.enabled:
transport = "aprs-is"
aprs_connection = (
"APRS-IS Server: <a href='http://status.aprs2.net' >"
"{}</a>".format(stats["stats"]["aprs-is"]["server"])
)
else:
# We might be connected to a KISS socket?
if client.KISSClient.is_enabled():
transport = client.KISSClient.transport()
if transport == client.TRANSPORT_TCPKISS:
aprs_connection = (
"TCPKISS://{}:{}".format(
CONF.kiss_tcp.host,
CONF.kiss_tcp.port,
)
)
elif transport == client.TRANSPORT_SERIALKISS:
# for pep8 violation
aprs_connection = (
"SerialKISS://{}@{} baud".format(
CONF.kiss_serial.device,
CONF.kiss_serial.baudrate,
),
)
return transport, aprs_connection
@auth.login_required
@flask_app.route("/")
def index():
stats = _stats()
# For development
html_template = "index.html"
LOG.debug(f"Template {html_template}")
transport, aprs_connection = _get_transport(stats)
LOG.debug(f"transport {transport} aprs_connection {aprs_connection}")
stats["transport"] = transport
stats["aprs_connection"] = aprs_connection
LOG.debug(f"initial stats = {stats}")
latitude = CONF.webchat.latitude
if latitude:
latitude = float(CONF.webchat.latitude)
longitude = CONF.webchat.longitude
if longitude:
longitude = float(longitude)
return flask.render_template(
html_template,
initial_stats=stats,
aprs_connection=aprs_connection,
callsign=CONF.callsign,
version=aprsd.__version__,
latitude=latitude,
longitude=longitude,
)
@auth.login_required
@flask_app.route("//send-message-status")
def send_message_status():
LOG.debug(request)
msgs = SentMessages()
info = msgs.get_all()
return json.dumps(info)
def _stats():
stats_obj = stats.APRSDStats()
now = datetime.datetime.now()
time_format = "%m-%d-%Y %H:%M:%S"
stats_dict = stats_obj.stats()
# Webchat doesnt need these
if "watch_list" in stats_dict["aprsd"]:
del stats_dict["aprsd"]["watch_list"]
if "seen_list" in stats_dict["aprsd"]:
del stats_dict["aprsd"]["seen_list"]
if "threads" in stats_dict["aprsd"]:
del stats_dict["aprsd"]["threads"]
# del stats_dict["email"]
# del stats_dict["plugins"]
# del stats_dict["messages"]
result = {
"time": now.strftime(time_format),
"stats": stats_dict,
}
return result
@flask_app.route("/stats")
def get_stats():
return json.dumps(_stats())
class SendMessageNamespace(Namespace):
"""Class to handle the socketio interactions."""
got_ack = False
reply_sent = False
msg = None
request = None
def __init__(self, namespace=None, config=None):
super().__init__(namespace)
def on_connect(self):
global socketio
LOG.debug("Web socket connected")
socketio.emit(
"connected", {"data": "/sendmsg Connected"},
namespace="/sendmsg",
)
def on_disconnect(self):
LOG.debug("WS Disconnected")
def on_send(self, data):
global socketio
LOG.debug(f"WS: on_send {data}")
self.request = data
data["from"] = CONF.callsign
pkt = packets.MessagePacket(
from_call=data["from"],
to_call=data["to"].upper(),
message_text=data["message"],
)
pkt.prepare()
self.msg = pkt
msgs = SentMessages()
msgs.add(pkt)
tx.send(pkt)
msgs.set_status(pkt.msgNo, "Sending")
obj = msgs.get(pkt.msgNo)
socketio.emit(
"sent", obj,
namespace="/sendmsg",
)
def on_gps(self, data):
LOG.debug(f"WS on_GPS: {data}")
lat = aprslib_util.latitude_to_ddm(data["latitude"])
long = aprslib_util.longitude_to_ddm(data["longitude"])
LOG.debug(f"Lat DDM {lat}")
LOG.debug(f"Long DDM {long}")
tx.send(
packets.GPSPacket(
from_call=CONF.callsign,
to_call="APDW16",
latitude=lat,
longitude=long,
comment="APRSD WebChat Beacon",
),
direct=True,
)
def handle_message(self, data):
LOG.debug(f"WS Data {data}")
def handle_json(self, data):
LOG.debug(f"WS json {data}")
def setup_logging(flask_app, loglevel, quiet):
flask_log = logging.getLogger("werkzeug")
flask_app.logger.removeHandler(default_handler)
flask_log.removeHandler(default_handler)
log_level = conf.log.LOG_LEVELS[loglevel]
flask_log.setLevel(log_level)
date_format = CONF.logging.date_format
if CONF.logging.rich_logging and not quiet:
log_format = "%(message)s"
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
rh = aprsd_logging.APRSDRichHandler(
show_thread=True, thread_width=15,
rich_tracebacks=True, omit_repeated_times=False,
)
rh.setFormatter(log_formatter)
flask_log.addHandler(rh)
log_file = CONF.logging.logfile
if log_file:
log_format = CONF.logging.logformat
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
fh = RotatingFileHandler(
log_file, maxBytes=(10248576 * 5),
backupCount=4,
)
fh.setFormatter(log_formatter)
flask_log.addHandler(fh)
@trace.trace
def init_flask(loglevel, quiet):
global socketio, flask_app
setup_logging(flask_app, loglevel, quiet)
socketio = SocketIO(
flask_app, logger=False, engineio_logger=False,
async_mode="threading",
)
# async_mode="gevent",
# async_mode="eventlet",
# import eventlet
# eventlet.monkey_patch()
socketio.on_namespace(
SendMessageNamespace(
"/sendmsg",
),
)
return socketio
# main() ###
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@click.option(
"-f",
"--flush",
"flush",
is_flag=True,
show_default=True,
default=False,
help="Flush out all old aged messages on disk.",
)
@click.option(
"-p",
"--port",
"port",
show_default=True,
default=None,
help="Port to listen to web requests. This overrides the config.webchat.web_port setting.",
)
@click.pass_context
@cli_helper.process_standard_options
def webchat(ctx, flush, port):
"""Web based HAM Radio chat program!"""
loglevel = ctx.obj["loglevel"]
quiet = ctx.obj["quiet"]
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
level, msg = utils._check_version()
if level:
LOG.warning(msg)
else:
LOG.info(msg)
LOG.info(f"APRSD Started version: {aprsd.__version__}")
CONF.log_opt_values(LOG, logging.DEBUG)
user = CONF.admin.user
users[user] = generate_password_hash(CONF.admin.password)
if not port:
port = CONF.webchat.web_port
# Initialize the client factory and create
# The correct client object ready for use
client.ClientFactory.setup()
# Make sure we have 1 client transport enabled
if not client.factory.is_client_enabled():
LOG.error("No Clients are enabled in config.")
sys.exit(-1)
if not client.factory.is_client_configured():
LOG.error("APRS client is not properly configured in config file.")
sys.exit(-1)
packets.PacketList()
packets.PacketTrack()
packets.WatchList()
packets.SeenList()
keepalive = threads.KeepAliveThread()
LOG.info("Start KeepAliveThread")
keepalive.start()
socketio = init_flask(loglevel, quiet)
rx_thread = rx.APRSDPluginRXThread(
packet_queue=threads.packet_queue,
)
rx_thread.start()
process_thread = WebChatProcessPacketThread(
packet_queue=threads.packet_queue,
socketio=socketio,
)
process_thread.start()
LOG.info("Start socketio.run()")
socketio.run(
flask_app,
# This is broken for now after removing cryptography
# and pyopenssl
# ssl_context="adhoc",
host=CONF.webchat.web_ip,
port=port,
allow_unsafe_werkzeug=True,
)
LOG.info("WebChat exiting!!!! Bye.")

View File

@ -1,6 +1,6 @@
from oslo_config import cfg
from aprsd.conf import client, common, log, plugin_common
from aprsd.conf import client, common, log, plugin_common, plugin_email
CONF = cfg.CONF
@ -11,6 +11,7 @@ client.register_opts(CONF)
# plugins
plugin_common.register_opts(CONF)
plugin_email.register_opts(CONF)
def set_lib_defaults():

View File

@ -4,33 +4,27 @@ The options for log setup
from oslo_config import cfg
DEFAULT_LOGIN = "NOCALL"
aprs_group = cfg.OptGroup(
name="aprs_network",
title="APRS-IS Network settings",
)
kiss_serial_group = cfg.OptGroup(
name="kiss_serial",
title="KISS Serial device connection",
)
kiss_tcp_group = cfg.OptGroup(
name="kiss_tcp",
title="KISS TCP/IP Device connection",
)
fake_client_group = cfg.OptGroup(
name="fake_client",
title="Fake Client settings",
)
aprs_opts = [
cfg.BoolOpt(
"enabled",
default=True,
help="Set enabled to False if there is no internet connectivity."
"This is useful for a direwolf KISS aprs connection only.",
"This is useful for a direwolf KISS aprs connection only.",
),
cfg.StrOpt(
"login",
@ -41,8 +35,8 @@ aprs_opts = [
"password",
secret=True,
help="APRS Password "
"Get the passcode for your callsign here: "
"https://apps.magicbug.co.uk/passcode",
"Get the passcode for your callsign here: "
"https://apps.magicbug.co.uk/passcode",
),
cfg.HostAddressOpt(
"host",
@ -71,11 +65,6 @@ kiss_serial_opts = [
default=9600,
help="The Serial device baud rate for communication",
),
cfg.ListOpt(
"path",
default=["WIDE1-1", "WIDE2-1"],
help="The APRS path to use for wide area coverage.",
),
]
kiss_tcp_opts = [
@ -93,19 +82,6 @@ kiss_tcp_opts = [
default=8001,
help="The KISS TCP/IP network port",
),
cfg.ListOpt(
"path",
default=["WIDE1-1", "WIDE2-1"],
help="The APRS path to use for wide area coverage.",
),
]
fake_client_opts = [
cfg.BoolOpt(
"enabled",
default=False,
help="Enable fake client connection.",
),
]
@ -117,14 +93,10 @@ def register_opts(config):
config.register_opts(kiss_serial_opts, group=kiss_serial_group)
config.register_opts(kiss_tcp_opts, group=kiss_tcp_group)
config.register_group(fake_client_group)
config.register_opts(fake_client_opts, group=fake_client_group)
def list_opts():
return {
aprs_group.name: aprs_opts,
kiss_serial_group.name: kiss_serial_opts,
kiss_tcp_group.name: kiss_tcp_opts,
fake_client_group.name: fake_client_opts,
}

View File

@ -2,220 +2,191 @@ from pathlib import Path
from oslo_config import cfg
home = str(Path.home())
DEFAULT_CONFIG_DIR = f'{home}/.config/aprsd/'
APRSD_DEFAULT_MAGIC_WORD = 'CHANGEME!!!'
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
APRSD_DEFAULT_MAGIC_WORD = "CHANGEME!!!"
admin_group = cfg.OptGroup(
name="admin",
title="Admin web interface settings",
)
watch_list_group = cfg.OptGroup(
name='watch_list',
title='Watch List settings',
name="watch_list",
title="Watch List settings",
)
rpc_group = cfg.OptGroup(
name="rpc_settings",
title="RPC Settings for admin <--> web",
)
webchat_group = cfg.OptGroup(
name="webchat",
title="Settings specific to the webchat command",
)
registry_group = cfg.OptGroup(
name='aprs_registry',
title='APRS Registry settings',
)
aprsd_opts = [
cfg.StrOpt(
'callsign',
"callsign",
required=True,
help='Callsign to use for messages sent by APRSD',
help="Callsign to use for messages sent by APRSD",
),
cfg.BoolOpt(
'enable_save',
"enable_save",
default=True,
help='Enable saving of watch list, packet tracker between restarts.',
help="Enable saving of watch list, packet tracker between restarts.",
),
cfg.StrOpt(
'save_location',
"save_location",
default=DEFAULT_CONFIG_DIR,
help='Save location for packet tracking files.',
help="Save location for packet tracking files.",
),
cfg.BoolOpt(
'trace_enabled',
"trace_enabled",
default=False,
help='Enable code tracing',
help="Enable code tracing",
),
cfg.StrOpt(
'units',
default='imperial',
help='Units for display, imperial or metric',
"units",
default="imperial",
help="Units for display, imperial or metric",
),
cfg.IntOpt(
'ack_rate_limit_period',
"ack_rate_limit_period",
default=1,
help='The wait period in seconds per Ack packet being sent.'
'1 means 1 ack packet per second allowed.'
'2 means 1 pack packet every 2 seconds allowed',
help="The wait period in seconds per Ack packet being sent."
"1 means 1 ack packet per second allowed."
"2 means 1 pack packet every 2 seconds allowed",
),
cfg.IntOpt(
'msg_rate_limit_period',
"msg_rate_limit_period",
default=2,
help='Wait period in seconds per non AckPacket being sent.'
'2 means 1 packet every 2 seconds allowed.'
'5 means 1 pack packet every 5 seconds allowed',
),
cfg.IntOpt(
'packet_dupe_timeout',
default=300,
help='The number of seconds before a packet is not considered a duplicate.',
),
cfg.BoolOpt(
'enable_beacon',
default=False,
help='Enable sending of a GPS Beacon packet to locate this service. '
'Requires latitude and longitude to be set.',
),
cfg.IntOpt(
'beacon_interval',
default=1800,
help='The number of seconds between beacon packets.',
),
cfg.StrOpt(
'beacon_symbol',
default='/',
help='The symbol to use for the GPS Beacon packet. See: http://www.aprs.net/vm/DOS/SYMBOLS.HTM',
),
cfg.StrOpt(
'latitude',
default=None,
help='Latitude for the GPS Beacon button. If not set, the button will not be enabled.',
),
cfg.StrOpt(
'longitude',
default=None,
help='Longitude for the GPS Beacon button. If not set, the button will not be enabled.',
),
cfg.StrOpt(
'log_packet_format',
choices=['compact', 'multiline', 'both'],
default='compact',
help="When logging packets 'compact' will use a single line formatted for each packet."
"'multiline' will use multiple lines for each packet and is the traditional format."
'both will log both compact and multiline.',
),
cfg.IntOpt(
'default_packet_send_count',
default=3,
help='The number of times to send a non ack packet before giving up.',
),
cfg.IntOpt(
'default_ack_send_count',
default=3,
help='The number of times to send an ack packet in response to recieving a packet.',
),
cfg.IntOpt(
'packet_list_maxlen',
default=100,
help='The maximum number of packets to store in the packet list.',
),
cfg.IntOpt(
'packet_list_stats_maxlen',
default=20,
help='The maximum number of packets to send in the stats dict for admin ui. -1 means no max.',
),
cfg.BoolOpt(
'enable_seen_list',
default=True,
help='Enable the Callsign seen list tracking feature. This allows aprsd to keep track of '
'callsigns that have been seen and when they were last seen.',
),
cfg.BoolOpt(
'enable_packet_logging',
default=True,
help='Set this to False, to disable logging of packets to the log file.',
),
cfg.BoolOpt(
'load_help_plugin',
default=True,
help='Set this to False to disable the help plugin.',
),
cfg.BoolOpt(
'enable_sending_ack_packets',
default=True,
help='Set this to False, to disable sending of ack packets. This will entirely stop'
'APRSD from sending ack packets.',
help="Wait period in seconds per non AckPacket being sent."
"2 means 1 packet every 2 seconds allowed."
"5 means 1 pack packet every 5 seconds allowed",
),
]
watch_list_opts = [
cfg.BoolOpt(
'enabled',
"enabled",
default=False,
help='Enable the watch list feature. Still have to enable '
'the correct plugin. Built-in plugin to use is '
'aprsd.plugins.notify.NotifyPlugin',
help="Enable the watch list feature. Still have to enable "
"the correct plugin. Built-in plugin to use is "
"aprsd.plugins.notify.NotifyPlugin",
),
cfg.ListOpt(
'callsigns',
help='Callsigns to watch for messsages',
"callsigns",
help="Callsigns to watch for messsages",
),
cfg.StrOpt(
'alert_callsign',
help='The Ham Callsign to send messages to for watch list alerts.',
"alert_callsign",
help="The Ham Callsign to send messages to for watch list alerts.",
),
cfg.IntOpt(
'packet_keep_count',
"packet_keep_count",
default=10,
help='The number of packets to store.',
help="The number of packets to store.",
),
cfg.IntOpt(
'alert_time_seconds',
"alert_time_seconds",
default=3600,
help='Time to wait before alert is sent on new message for users in callsigns.',
help="Time to wait before alert is sent on new message for "
"users in callsigns.",
),
]
admin_opts = [
cfg.BoolOpt(
"web_enabled",
default=False,
help="Enable the Admin Web Interface",
),
cfg.IPOpt(
"web_ip",
default="0.0.0.0",
help="The ip address to listen on",
),
cfg.PortOpt(
"web_port",
default=8001,
help="The port to listen on",
),
cfg.StrOpt(
"user",
default="admin",
help="The admin user for the admin web interface",
),
cfg.StrOpt(
"password",
secret=True,
help="Admin interface password",
),
]
rpc_opts = [
cfg.BoolOpt(
"enabled",
default=True,
help="Enable RPC calls",
),
cfg.StrOpt(
"ip",
default="localhost",
help="The ip address to listen on",
),
cfg.PortOpt(
"port",
default=18861,
help="The port to listen on",
),
cfg.StrOpt(
"magic_word",
default=APRSD_DEFAULT_MAGIC_WORD,
help="Magic word to authenticate requests between client/server",
),
]
enabled_plugins_opts = [
cfg.ListOpt(
'enabled_plugins',
"enabled_plugins",
default=[
'aprsd.plugins.fortune.FortunePlugin',
'aprsd.plugins.location.LocationPlugin',
'aprsd.plugins.ping.PingPlugin',
'aprsd.plugins.time.TimePlugin',
'aprsd.plugins.weather.OWMWeatherPlugin',
'aprsd.plugins.version.VersionPlugin',
'aprsd.plugins.notify.NotifySeenPlugin',
"aprsd.plugins.email.EmailPlugin",
"aprsd.plugins.fortune.FortunePlugin",
"aprsd.plugins.location.LocationPlugin",
"aprsd.plugins.ping.PingPlugin",
"aprsd.plugins.query.QueryPlugin",
"aprsd.plugins.time.TimePlugin",
"aprsd.plugins.weather.OWMWeatherPlugin",
"aprsd.plugins.version.VersionPlugin",
"aprsd.plugins.notify.NotifySeenPlugin",
],
help='Comma separated list of enabled plugins for APRSD.'
'To enable installed external plugins add them here.'
'The full python path to the class name must be used',
help="Comma separated list of enabled plugins for APRSD."
"To enable installed external plugins add them here."
"The full python path to the class name must be used",
),
]
registry_opts = [
cfg.BoolOpt(
'enabled',
default=False,
help='Enable sending aprs registry information. This will let the '
"APRS registry know about your service and it's uptime. "
'No personal information is sent, just the callsign, uptime and description. '
'The service callsign is the callsign set in [DEFAULT] section.',
webchat_opts = [
cfg.IPOpt(
"web_ip",
default="0.0.0.0",
help="The ip address to listen on",
),
cfg.PortOpt(
"web_port",
default=8001,
help="The port to listen on",
),
cfg.StrOpt(
'description',
"latitude",
default=None,
help='Description of the service to send to the APRS registry. '
'This is what will show up in the APRS registry.'
'If not set, the description will be the same as the callsign.',
help="Latitude for the GPS Beacon button. If not set, the button will not be enabled.",
),
cfg.StrOpt(
'registry_url',
default='https://aprs.hemna.com/api/v1/registry',
help='The APRS registry domain name to send the information to.',
),
cfg.StrOpt(
'service_website',
"longitude",
default=None,
help='The website for your APRS service to send to the APRS registry.',
),
cfg.IntOpt(
'frequency_seconds',
default=3600,
help='The frequency in seconds to send the APRS registry information.',
help="Longitude for the GPS Beacon button. If not set, the button will not be enabled.",
),
]
@ -223,15 +194,21 @@ registry_opts = [
def register_opts(config):
config.register_opts(aprsd_opts)
config.register_opts(enabled_plugins_opts)
config.register_group(admin_group)
config.register_opts(admin_opts, group=admin_group)
config.register_group(watch_list_group)
config.register_opts(watch_list_opts, group=watch_list_group)
config.register_group(registry_group)
config.register_opts(registry_opts, group=registry_group)
config.register_group(rpc_group)
config.register_opts(rpc_opts, group=rpc_group)
config.register_group(webchat_group)
config.register_opts(webchat_opts, group=webchat_group)
def list_opts():
return {
'DEFAULT': (aprsd_opts + enabled_plugins_opts),
"DEFAULT": (aprsd_opts + enabled_plugins_opts),
admin_group.name: admin_opts,
watch_list_group.name: watch_list_opts,
registry_group.name: registry_opts,
rpc_group.name: rpc_opts,
webchat_group.name: webchat_opts,
}

View File

@ -1,63 +1,55 @@
"""
The options for log setup
"""
import logging
from oslo_config import cfg
LOG_LEVELS = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG,
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
}
DEFAULT_DATE_FORMAT = '%m/%d/%Y %I:%M:%S %p'
DEFAULT_DATE_FORMAT = "%m/%d/%Y %I:%M:%S %p"
DEFAULT_LOG_FORMAT = (
'[%(asctime)s] [%(threadName)-20.20s] [%(levelname)-5.5s]'
' %(message)s - [%(pathname)s:%(lineno)d]'
)
DEFAULT_LOG_FORMAT = (
'<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | '
'<yellow>{thread.name: <18}</yellow> | '
'<level>{level: <8}</level> | '
'<level>{message}</level> | '
'<cyan>{name}</cyan>:<cyan>{function:}</cyan>:<magenta>{line:}</magenta>'
"[%(asctime)s] [%(threadName)-20.20s] [%(levelname)-5.5s]"
" %(message)s - [%(pathname)s:%(lineno)d]"
)
logging_group = cfg.OptGroup(
name='logging',
title='Logging options',
name="logging",
title="Logging options",
)
logging_opts = [
cfg.StrOpt(
'logfile',
"date_format",
default=DEFAULT_DATE_FORMAT,
help="Date format for log entries",
),
cfg.BoolOpt(
"rich_logging",
default=True,
help="Enable Rich log",
),
cfg.StrOpt(
"logfile",
default=None,
help='File to log to',
help="File to log to",
),
cfg.StrOpt(
'logformat',
"logformat",
default=DEFAULT_LOG_FORMAT,
help='Log file format, unless rich_logging enabled.',
help="Log file format, unless rich_logging enabled.",
),
cfg.StrOpt(
'log_level',
default='INFO',
"log_level",
default="INFO",
choices=LOG_LEVELS.keys(),
help='Log level for logging of events.',
),
cfg.BoolOpt(
'enable_color',
default=True,
help='Enable ANSI color codes in logging',
),
cfg.BoolOpt(
'enable_console_stdout',
default=True,
help='Enable logging to the console/stdout.',
help="Log level for logging of events.",
),
]
@ -69,5 +61,7 @@ def register_opts(config):
def list_opts():
return {
logging_group.name: (logging_opts),
logging_group.name: (
logging_opts
),
}

View File

@ -31,6 +31,7 @@ import importlib
import os
import pkgutil
LIST_OPTS_FUNC_NAME = "list_opts"
@ -63,11 +64,9 @@ def _import_modules(module_names):
for modname in module_names:
mod = importlib.import_module("aprsd.conf." + modname)
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
msg = (
"The module 'aprsd.conf.%s' should have a '%s' "
"function which returns the config options."
% (modname, LIST_OPTS_FUNC_NAME)
)
msg = "The module 'aprsd.conf.%s' should have a '%s' "\
"function which returns the config options." % \
(modname, LIST_OPTS_FUNC_NAME)
raise Exception(msg)
else:
imported_modules.append(mod)

View File

@ -1,5 +1,6 @@
from oslo_config import cfg
aprsfi_group = cfg.OptGroup(
name="aprs_fi",
title="APRS.FI website settings",
@ -20,7 +21,15 @@ owm_wx_group = cfg.OptGroup(
aprsfi_opts = [
cfg.StrOpt(
"apiKey",
help="Get the apiKey from your aprs.fi account here:" "http://aprs.fi/account",
help="Get the apiKey from your aprs.fi account here:"
"http://aprs.fi/account",
),
]
query_plugin_opts = [
cfg.StrOpt(
"callsign",
help="The Ham callsign to allow access to the query plugin from RF.",
),
]
@ -28,11 +37,11 @@ owm_wx_opts = [
cfg.StrOpt(
"apiKey",
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
"This plugin uses the openweathermap API to fetch"
"location and weather information."
"To use this plugin you need to get an openweathermap"
"account and apikey."
"https://home.openweathermap.org/api_keys",
"This plugin uses the openweathermap API to fetch"
"location and weather information."
"To use this plugin you need to get an openweathermap"
"account and apikey."
"https://home.openweathermap.org/api_keys",
),
]
@ -40,16 +49,16 @@ avwx_opts = [
cfg.StrOpt(
"apiKey",
help="avwx-api is an opensource project that has"
"a hosted service here: https://avwx.rest/"
"You can launch your own avwx-api in a container"
"by cloning the githug repo here:"
"https://github.com/avwx-rest/AVWX-API",
"a hosted service here: https://avwx.rest/"
"You can launch your own avwx-api in a container"
"by cloning the githug repo here:"
"https://github.com/avwx-rest/AVWX-API",
),
cfg.StrOpt(
"base_url",
default="https://avwx.rest",
help="The base url for the avwx API. If you are hosting your own"
"Here is where you change the url to point to yours.",
"Here is where you change the url to point to yours.",
),
]
@ -58,6 +67,7 @@ def register_opts(config):
config.register_group(aprsfi_group)
config.register_opts(aprsfi_opts, group=aprsfi_group)
config.register_group(query_group)
config.register_opts(query_plugin_opts, group=query_group)
config.register_group(owm_wx_group)
config.register_opts(owm_wx_opts, group=owm_wx_group)
config.register_group(avwx_group)
@ -67,6 +77,7 @@ def register_opts(config):
def list_opts():
return {
aprsfi_group.name: aprsfi_opts,
query_group.name: query_plugin_opts,
owm_wx_group.name: owm_wx_opts,
avwx_group.name: avwx_opts,
}

105
aprsd/conf/plugin_email.py Normal file
View File

@ -0,0 +1,105 @@
from oslo_config import cfg
email_group = cfg.OptGroup(
name="email_plugin",
title="Options for the APRSD Email plugin",
)
email_opts = [
cfg.StrOpt(
"callsign",
help="(Required) Callsign to validate for doing email commands."
"Only this callsign can check email. This is also where the "
"email notifications for new emails will be sent.",
),
cfg.BoolOpt(
"enabled",
default=False,
help="Enable the Email plugin?",
),
cfg.BoolOpt(
"debug",
default=False,
help="Enable the Email plugin Debugging?",
),
]
email_imap_opts = [
cfg.StrOpt(
"imap_login",
help="Login username/email for IMAP server",
),
cfg.StrOpt(
"imap_password",
secret=True,
help="Login password for IMAP server",
),
cfg.HostnameOpt(
"imap_host",
help="Hostname/IP of the IMAP server",
),
cfg.PortOpt(
"imap_port",
default=993,
help="Port to use for IMAP server",
),
cfg.BoolOpt(
"imap_use_ssl",
default=True,
help="Use SSL for connection to IMAP Server",
),
]
email_smtp_opts = [
cfg.StrOpt(
"smtp_login",
help="Login username/email for SMTP server",
),
cfg.StrOpt(
"smtp_password",
secret=True,
help="Login password for SMTP server",
),
cfg.HostnameOpt(
"smtp_host",
help="Hostname/IP of the SMTP server",
),
cfg.PortOpt(
"smtp_port",
default=465,
help="Port to use for SMTP server",
),
cfg.BoolOpt(
"smtp_use_ssl",
default=True,
help="Use SSL for connection to SMTP Server",
),
]
email_shortcuts_opts = [
cfg.ListOpt(
"email_shortcuts",
help="List of email shortcuts for checking/sending email "
"For Exmaple: wb=walt@walt.com,cl=cl@cl.com\n"
"Means use 'wb' to send an email to walt@walt.com",
),
]
ALL_OPTS = (
email_opts
+ email_imap_opts
+ email_smtp_opts
+ email_shortcuts_opts
)
def register_opts(config):
config.register_group(email_group)
config.register_opts(ALL_OPTS, group=email_group)
def list_opts():
return {
email_group.name: ALL_OPTS,
}

View File

@ -1,13 +1,11 @@
class MissingConfigOptionException(Exception):
"""Missing a config option."""
def __init__(self, config_option):
self.message = f"Option '{config_option}' was not in config file"
class ConfigOptionBogusDefaultException(Exception):
"""Missing a config option."""
def __init__(self, config_option, default_fail):
self.message = (
f"Config file option '{config_option}' needs to be "

View File

@ -1,115 +1,89 @@
import logging
from logging import NullHandler
from logging.handlers import RotatingFileHandler
import queue
import sys
from loguru import logger
from oslo_config import cfg
from aprsd.conf import log as conf_log
from aprsd import conf
from aprsd.log import rich as aprsd_logging
CONF = cfg.CONF
# LOG = logging.getLogger("APRSD")
LOG = logger
class QueueLatest(queue.Queue):
"""Custom Queue to keep only the latest N items.
This prevents the queue from blowing up in size.
"""
def put(self, *args, **kwargs):
try:
super().put(*args, **kwargs)
except queue.Full:
self.queue.popleft()
super().put(*args, **kwargs)
logging_queue = QueueLatest(maxsize=200)
class InterceptHandler(logging.Handler):
def emit(self, record):
# get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
# find caller from where originated the logged message
frame, depth = sys._getframe(6), 6
while frame and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage()
)
LOG = logging.getLogger("APRSD")
logging_queue = queue.Queue()
# Setup the log faciility
# to disable log to stdout, but still log to file
# use the --quiet option on the cmdln
def setup_logging(loglevel=None, quiet=False, custom_handler=None):
if not loglevel:
log_level = CONF.logging.log_level
else:
log_level = conf_log.LOG_LEVELS[loglevel]
def setup_logging(loglevel, quiet):
log_level = conf.log.LOG_LEVELS[loglevel]
LOG.setLevel(log_level)
date_format = CONF.logging.date_format
rh = None
fh = None
# intercept everything at the root logger
logging.root.handlers = [InterceptHandler()]
logging.root.setLevel(log_level)
# We don't really want to see the aprslib parsing debug output.
disable_list = [
'aprslib',
'aprslib.parsing',
'aprslib.exceptions',
]
chardet_list = [
'chardet',
'chardet.charsetprober',
'chardet.eucjpprober',
]
for name in chardet_list:
disable = logging.getLogger(name)
disable.setLevel(logging.ERROR)
# remove every other logger's handlers
# and propagate to root logger
for name in logging.root.manager.loggerDict.keys():
logging.getLogger(name).handlers = []
logging.getLogger(name).propagate = name not in disable_list
handlers = []
if CONF.logging.enable_console_stdout and not quiet:
handlers.append(
{
'sink': sys.stdout,
'serialize': False,
'format': CONF.logging.logformat,
'colorize': CONF.logging.enable_color,
'level': log_level,
},
rich_logging = False
if CONF.logging.get("rich_logging", False) and not quiet:
log_format = "%(message)s"
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
rh = aprsd_logging.APRSDRichHandler(
show_thread=True, thread_width=20,
rich_tracebacks=True, omit_repeated_times=False,
)
rh.setFormatter(log_formatter)
LOG.addHandler(rh)
rich_logging = True
if CONF.logging.logfile:
handlers.append(
{
'sink': CONF.logging.logfile,
'serialize': False,
'format': CONF.logging.logformat,
'colorize': False,
'level': log_level,
},
log_file = CONF.logging.logfile
log_format = CONF.logging.logformat
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
if log_file:
fh = RotatingFileHandler(log_file, maxBytes=(10248576 * 5), backupCount=4)
fh.setFormatter(log_formatter)
LOG.addHandler(fh)
imap_logger = None
if CONF.email_plugin.enabled and CONF.email_plugin.debug:
imap_logger = logging.getLogger("imapclient.imaplib")
imap_logger.setLevel(log_level)
if rh:
imap_logger.addHandler(rh)
if fh:
imap_logger.addHandler(fh)
if CONF.admin.web_enabled:
qh = logging.handlers.QueueHandler(logging_queue)
q_log_formatter = logging.Formatter(
fmt=CONF.logging.logformat,
datefmt=CONF.logging.date_format,
)
qh.setFormatter(q_log_formatter)
LOG.addHandler(qh)
if custom_handler:
handlers.append(custom_handler)
if not quiet and not rich_logging:
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(log_formatter)
LOG.addHandler(sh)
if imap_logger:
imap_logger.addHandler(sh)
# configure loguru
logger.configure(handlers=handlers)
logger.level('DEBUG', color='<fg #BABABA>')
def setup_logging_no_config(loglevel, quiet):
log_level = conf.log.LOG_LEVELS[loglevel]
LOG.setLevel(log_level)
log_format = CONF.logging.logformat
date_format = CONF.logging.date_format
log_formatter = logging.Formatter(fmt=log_format, datefmt=date_format)
fh = NullHandler()
fh.setFormatter(log_formatter)
LOG.addHandler(fh)
if not quiet:
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(log_formatter)
LOG.addHandler(sh)

160
aprsd/log/rich.py Normal file
View File

@ -0,0 +1,160 @@
from datetime import datetime
from logging import LogRecord
from pathlib import Path
from typing import TYPE_CHECKING, Callable, Iterable, List, Optional, Union
from rich._log_render import LogRender
from rich.logging import RichHandler
from rich.text import Text, TextType
from rich.traceback import Traceback
if TYPE_CHECKING:
from rich.console import Console, ConsoleRenderable, RenderableType
from rich.table import Table
from aprsd import utils
FormatTimeCallable = Callable[[datetime], Text]
class APRSDRichLogRender(LogRender):
def __init__(
self, *args,
show_thread: bool = False,
thread_width: Optional[int] = 10,
**kwargs,
):
super().__init__(*args, **kwargs)
self.show_thread = show_thread
self.thread_width = thread_width
def __call__(
self,
console: "Console",
renderables: Iterable["ConsoleRenderable"],
log_time: Optional[datetime] = None,
time_format: Optional[Union[str, FormatTimeCallable]] = None,
level: TextType = "",
path: Optional[str] = None,
line_no: Optional[int] = None,
link_path: Optional[str] = None,
thread_name: Optional[str] = None,
) -> "Table":
from rich.containers import Renderables
from rich.table import Table
output = Table.grid(padding=(0, 1))
output.expand = True
if self.show_time:
output.add_column(style="log.time")
if self.show_thread:
rgb = str(utils.rgb_from_name(thread_name)).replace(" ", "")
output.add_column(style=f"rgb{rgb}", width=self.thread_width)
if self.show_level:
output.add_column(style="log.level", width=self.level_width)
output.add_column(ratio=1, style="log.message", overflow="fold")
if self.show_path and path:
output.add_column(style="log.path")
row: List["RenderableType"] = []
if self.show_time:
log_time = log_time or console.get_datetime()
time_format = time_format or self.time_format
if callable(time_format):
log_time_display = time_format(log_time)
else:
log_time_display = Text(log_time.strftime(time_format))
if log_time_display == self._last_time and self.omit_repeated_times:
row.append(Text(" " * len(log_time_display)))
else:
row.append(log_time_display)
self._last_time = log_time_display
if self.show_thread:
row.append(thread_name)
if self.show_level:
row.append(level)
row.append(Renderables(renderables))
if self.show_path and path:
path_text = Text()
path_text.append(
path, style=f"link file://{link_path}" if link_path else "",
)
if line_no:
path_text.append(":")
path_text.append(
f"{line_no}",
style=f"link file://{link_path}#{line_no}" if link_path else "",
)
row.append(path_text)
output.add_row(*row)
return output
class APRSDRichHandler(RichHandler):
"""APRSD's extension of rich's RichHandler to show threads.
show_thread (bool, optional): Show the name of the thread in log entry. Defaults to False.
thread_width (int, optional): The number of characters to show for thread name. Defaults to 10.
"""
def __init__(
self, *args,
show_thread: bool = True,
thread_width: Optional[int] = 10,
**kwargs,
):
super().__init__(*args, **kwargs)
self.show_thread = show_thread
self.thread_width = thread_width
kwargs["show_thread"] = show_thread
kwargs["thread_width"] = thread_width
self._log_render = APRSDRichLogRender(
show_time=True,
show_level=True,
show_path=True,
omit_repeated_times=False,
level_width=None,
show_thread=show_thread,
thread_width=thread_width,
)
def render(
self, *, record: LogRecord,
traceback: Optional[Traceback],
message_renderable: "ConsoleRenderable",
) -> "ConsoleRenderable":
"""Render log for display.
Args:
record (LogRecord): log Record.
traceback (Optional[Traceback]): Traceback instance or None for no Traceback.
message_renderable (ConsoleRenderable): Renderable (typically Text) containing log message contents.
Returns:
ConsoleRenderable: Renderable to display log.
"""
path = Path(record.pathname).name
level = self.get_level_text(record)
time_format = None if self.formatter is None else self.formatter.datefmt
log_time = datetime.fromtimestamp(record.created)
thread_name = record.threadName
log_renderable = self._log_render(
self.console,
[message_renderable] if not traceback else [
message_renderable,
traceback,
],
log_time=log_time,
time_format=time_format,
level=level,
path=path,
line_no=record.lineno,
link_path=record.pathname if self.enable_link_path else None,
thread_name=thread_name,
)
return log_renderable

View File

@ -22,76 +22,82 @@
# python included libs
import datetime
import importlib.metadata as imp
from importlib.metadata import version as metadata_version
import logging
import os
import signal
import sys
import time
from importlib.metadata import version as metadata_version
import click
import click_completion
from oslo_config import cfg, generator
# local imports here
import aprsd
from aprsd import cli_helper, packets, threads, utils
from aprsd.stats import collector
from aprsd import cli_helper, packets, stats, threads, utils
# setup the global logger
# log.basicConfig(level=log.DEBUG) # level=10
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
LOG = logging.getLogger("APRSD")
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
flask_enabled = False
rpc_serv = None
@click.group(cls=cli_helper.AliasedGroup, context_settings=CONTEXT_SETTINGS)
def custom_startswith(string, incomplete):
"""A custom completion match that supports case insensitive matching."""
if os.environ.get("_CLICK_COMPLETION_COMMAND_CASE_INSENSITIVE_COMPLETE"):
string = string.lower()
incomplete = incomplete.lower()
return string.startswith(incomplete)
click_completion.core.startswith = custom_startswith
click_completion.init()
@click.group(context_settings=CONTEXT_SETTINGS)
@click.version_option()
@click.pass_context
def cli(ctx):
pass
def load_commands():
from .cmds import ( # noqa
completion,
dev,
fetch_stats,
healthcheck,
list_plugins,
listen,
send_message,
server,
)
def main():
# First import all the possible commands for the CLI
# The commands themselves live in the cmds directory
load_commands()
utils.load_entry_points('aprsd.extension')
cli(auto_envvar_prefix='APRSD')
from .cmds import ( # noqa
completion, dev, fetch_stats, healthcheck, list_plugins, listen,
send_message, server, webchat,
)
cli(auto_envvar_prefix="APRSD")
def signal_handler(sig, frame):
click.echo('signal_handler: called')
global flask_enabled
click.echo("signal_handler: called")
threads.APRSDThreadList().stop_all()
if 'subprocess' not in str(frame):
if "subprocess" not in str(frame):
LOG.info(
'Ctrl+C, Sending all threads exit! Can take up to 10 seconds {}'.format(
"Ctrl+C, Sending all threads exit! Can take up to 10 seconds {}".format(
datetime.datetime.now(),
),
)
time.sleep(1.5)
try:
packets.PacketTrack().save()
packets.WatchList().save()
packets.SeenList().save()
packets.PacketList().save()
collector.Collector().collect()
except Exception as e:
LOG.error(f'Failed to save data: {e}')
sys.exit(0)
packets.PacketTrack().save()
packets.WatchList().save()
packets.SeenList().save()
LOG.info(stats.APRSDStats())
# signal.signal(signal.SIGTERM, sys.exit(0))
# sys.exit(0)
if flask_enabled:
signal.signal(signal.SIGTERM, sys.exit(0))
@cli.command()
@cli_helper.add_options(cli_helper.common_options)
@ -101,9 +107,9 @@ def check_version(ctx):
"""Check this version against the latest in pypi.org."""
level, msg = utils._check_version()
if level:
click.secho(msg, fg='yellow')
click.secho(msg, fg="yellow")
else:
click.secho(msg, fg='green')
click.secho(msg, fg="green")
@cli.command()
@ -111,56 +117,44 @@ def check_version(ctx):
def sample_config(ctx):
"""Generate a sample Config file from aprsd and all installed plugins."""
def _get_selected_entry_points():
import sys
if sys.version_info < (3, 10):
all = imp.entry_points()
selected = []
if 'oslo.config.opts' in all:
for x in all['oslo.config.opts']:
if x.group == 'oslo.config.opts':
selected.append(x)
else:
selected = imp.entry_points(group='oslo.config.opts')
return selected
def get_namespaces():
args = []
# selected = imp.entry_points(group="oslo.config.opts")
selected = _get_selected_entry_points()
all = imp.entry_points()
selected = []
if "oslo.config.opts" in all:
for x in all["oslo.config.opts"]:
if x.group == "oslo.config.opts":
selected.append(x)
for entry in selected:
if 'aprsd' in entry.name:
args.append('--namespace')
if "aprsd" in entry.name:
args.append("--namespace")
args.append(entry.name)
return args
args = get_namespaces()
config_version = metadata_version('oslo.config')
config_version = metadata_version("oslo.config")
logging.basicConfig(level=logging.WARN)
conf = cfg.ConfigOpts()
generator.register_cli_opts(conf)
try:
conf(args, version=config_version)
except cfg.RequiredOptError as ex:
except cfg.RequiredOptError:
conf.print_help()
if not sys.argv[1:]:
raise SystemExit from ex
raise SystemExit
raise
generator.generate(conf)
return
@cli.command()
@click.pass_context
def version(ctx):
"""Show the APRSD version."""
click.echo(click.style('APRSD Version : ', fg='white'), nl=False)
click.secho(f'{aprsd.__version__}', fg='yellow', bold=True)
click.echo(click.style("APRSD Version : ", fg="white"), nl=False)
click.secho(f"{aprsd.__version__}", fg="yellow", bold=True)
if __name__ == '__main__':
if __name__ == "__main__":
main()

4
aprsd/messaging.py Normal file
View File

@ -0,0 +1,4 @@
# What to return from a plugin if we have processed the message
# and it's ok, but don't send a usage string back
# REMOVE THIS FILE

View File

@ -1,36 +1,11 @@
from aprsd.packets import collector
from aprsd.packets.core import ( # noqa: F401
AckPacket,
BeaconPacket,
BulletinPacket,
GPSPacket,
MessagePacket,
MicEPacket,
ObjectPacket,
Packet,
RejectPacket,
StatusPacket,
ThirdPartyPacket,
UnknownPacket,
WeatherPacket,
factory,
AckPacket, GPSPacket, MessagePacket, MicEPacket, Packet, PathPacket,
RejectPacket, StatusPacket, WeatherPacket,
)
from aprsd.packets.filter import PacketFilter
from aprsd.packets.filters.dupe_filter import DupePacketFilter
from aprsd.packets.packet_list import PacketList # noqa: F401
from aprsd.packets.seen_list import SeenList # noqa: F401
from aprsd.packets.tracker import PacketTrack # noqa: F401
from aprsd.packets.watch_list import WatchList # noqa: F401
# Register all the packet tracking objects.
collector.PacketCollector().register(PacketList)
collector.PacketCollector().register(SeenList)
collector.PacketCollector().register(PacketTrack)
collector.PacketCollector().register(WatchList)
# Register all the packet filters for normal processing
# For specific commands you can deregister these if you don't want them.
PacketFilter().register(DupePacketFilter)
NULL_MESSAGE = -1

View File

@ -1,79 +0,0 @@
import logging
from typing import Callable, Protocol, runtime_checkable
from aprsd.packets import core
from aprsd.utils import singleton
LOG = logging.getLogger("APRSD")
@runtime_checkable
class PacketMonitor(Protocol):
"""Protocol for Monitoring packets in some way."""
def rx(self, packet: type[core.Packet]) -> None:
"""When we get a packet from the network."""
...
def tx(self, packet: type[core.Packet]) -> None:
"""When we send a packet out the network."""
...
def flush(self) -> None:
"""Flush out any data."""
...
def load(self) -> None:
"""Load any data."""
...
@singleton
class PacketCollector:
def __init__(self):
self.monitors: list[Callable] = []
def register(self, monitor: Callable) -> None:
if not isinstance(monitor, PacketMonitor):
raise TypeError(f"Monitor {monitor} is not a PacketMonitor")
self.monitors.append(monitor)
def unregister(self, monitor: Callable) -> None:
if not isinstance(monitor, PacketMonitor):
raise TypeError(f"Monitor {monitor} is not a PacketMonitor")
self.monitors.remove(monitor)
def rx(self, packet: type[core.Packet]) -> None:
for name in self.monitors:
cls = name()
try:
cls.rx(packet)
except Exception as e:
LOG.error(f"Error in monitor {name} (rx): {e}")
def tx(self, packet: type[core.Packet]) -> None:
for name in self.monitors:
cls = name()
try:
cls.tx(packet)
except Exception as e:
LOG.error(f"Error in monitor {name} (tx): {e}")
def flush(self):
"""Call flush on the objects. This is used to flush out any data."""
for name in self.monitors:
cls = name()
try:
cls.flush()
except Exception as e:
LOG.error(f"Error in monitor {name} (flush): {e}")
def load(self):
"""Call load on the objects. This is used to load any data."""
for name in self.monitors:
cls = name()
try:
cls.load()
except Exception as e:
LOG.error(f"Error in monitor {name} (load): {e}")

File diff suppressed because it is too large Load Diff

View File

@ -1,58 +0,0 @@
import logging
from typing import Callable, Protocol, runtime_checkable, Union, Dict
from aprsd.packets import core
from aprsd.utils import singleton
LOG = logging.getLogger("APRSD")
@runtime_checkable
class PacketFilterProtocol(Protocol):
"""Protocol API for a packet filter class.
"""
def filter(self, packet: type[core.Packet]) -> Union[type[core.Packet], None]:
"""When we get a packet from the network.
Return a Packet object if the filter passes. Return None if the
Packet is filtered out.
"""
...
@singleton
class PacketFilter:
def __init__(self):
self.filters: Dict[str, Callable] = {}
def register(self, packet_filter: Callable) -> None:
if not isinstance(packet_filter, PacketFilterProtocol):
raise TypeError(f"class {packet_filter} is not a PacketFilterProtocol object")
if packet_filter not in self.filters:
self.filters[packet_filter] = packet_filter()
def unregister(self, packet_filter: Callable) -> None:
if not isinstance(packet_filter, PacketFilterProtocol):
raise TypeError(f"class {packet_filter} is not a PacketFilterProtocol object")
if packet_filter in self.filters:
del self.filters[packet_filter]
def filter(self, packet: type[core.Packet]) -> Union[type[core.Packet], None]:
"""Run through each of the filters.
This will step through each registered filter class
and call filter on it.
If the filter object returns None, we are done filtering.
If the filter object returns the packet, we continue filtering.
"""
for packet_filter in self.filters:
try:
if not self.filters[packet_filter].filter(packet):
LOG.debug(f"{self.filters[packet_filter].__class__.__name__} dropped {packet.__class__.__name__}:{packet.human_info}")
return None
except Exception as ex:
LOG.error(f"{packet_filter.__clas__.__name__} failed filtering packet {packet.__class__.__name__} : {ex}")
return packet

View File

@ -1,68 +0,0 @@
import logging
from typing import Union
from oslo_config import cfg
from aprsd import packets
from aprsd.packets import core
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
class DupePacketFilter:
"""This is a packet filter to detect duplicate packets.
This Uses the PacketList object to see if a packet exists
already. If it does exist in the PacketList, then we need to
check the flag on the packet to see if it's been processed before.
If the packet has been processed already within the allowed
timeframe, then it's a dupe.
"""
def filter(self, packet: type[core.Packet]) -> Union[type[core.Packet], None]:
# LOG.debug(f"{self.__class__.__name__}.filter called for packet {packet}")
"""Filter a packet out if it's already been seen and processed."""
if isinstance(packet, core.AckPacket):
# We don't need to drop AckPackets, those should be
# processed.
# Send the AckPacket to the queue for processing elsewhere.
return packet
else:
# Make sure we aren't re-processing the same packet
# For RF based APRS Clients we can get duplicate packets
# So we need to track them and not process the dupes.
pkt_list = packets.PacketList()
found = False
try:
# Find the packet in the list of already seen packets
# Based on the packet.key
found = pkt_list.find(packet)
if not packet.msgNo:
# If the packet doesn't have a message id
# then there is no reliable way to detect
# if it's a dupe, so we just pass it on.
# it shouldn't get acked either.
found = False
except KeyError:
found = False
if not found:
# We haven't seen this packet before, so we process it.
return packet
if not packet.processed:
# We haven't processed this packet through the plugins.
return packet
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
# If the packet came in within N seconds of the
# Last time seeing the packet, then we drop it as a dupe.
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.'
)
else:
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked '
f'but older than {CONF.packet_dupe_timeout} seconds. processing.',
)
return packet

View File

@ -1,53 +0,0 @@
import logging
from typing import Union
from oslo_config import cfg
from aprsd import packets
from aprsd.packets import core
from aprsd.utils import singleton
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
@singleton
class PacketTypeFilter:
"""This filter is used to filter out packets that don't match a specific type.
To use this, register it with the PacketFilter class,
then instante it and call set_allow_list() with a list of packet types
you want to allow to pass the filtering. All other packets will be
filtered out.
"""
filters = {
packets.Packet.__name__: packets.Packet,
packets.AckPacket.__name__: packets.AckPacket,
packets.BeaconPacket.__name__: packets.BeaconPacket,
packets.GPSPacket.__name__: packets.GPSPacket,
packets.MessagePacket.__name__: packets.MessagePacket,
packets.MicEPacket.__name__: packets.MicEPacket,
packets.ObjectPacket.__name__: packets.ObjectPacket,
packets.StatusPacket.__name__: packets.StatusPacket,
packets.ThirdPartyPacket.__name__: packets.ThirdPartyPacket,
packets.WeatherPacket.__name__: packets.WeatherPacket,
packets.UnknownPacket.__name__: packets.UnknownPacket,
}
allow_list = ()
def set_allow_list(self, filter_list):
tmp_list = []
for filter in filter_list:
LOG.warning(
f'Setting filter {filter} : {self.filters[filter]} to tmp {tmp_list}'
)
tmp_list.append(self.filters[filter])
self.allow_list = tuple(tmp_list)
def filter(self, packet: type[core.Packet]) -> Union[type[core.Packet], None]:
"""Only allow packets of certain types to filter through."""
if self.allow_list:
if isinstance(packet, self.allow_list):
return packet

View File

@ -1,171 +0,0 @@
import logging
from typing import Optional
from haversine import Unit, haversine
from loguru import logger
from oslo_config import cfg
from aprsd import utils
from aprsd.packets.core import AckPacket, GPSPacket, RejectPacket
LOG = logging.getLogger()
LOGU = logger
CONF = cfg.CONF
FROM_COLOR = 'fg #C70039'
TO_COLOR = 'fg #D033FF'
TX_COLOR = 'red'
RX_COLOR = 'green'
PACKET_COLOR = 'cyan'
DISTANCE_COLOR = 'fg #FF5733'
DEGREES_COLOR = 'fg #FFA900'
def log_multiline(
packet, tx: Optional[bool] = False, header: Optional[bool] = True
) -> None:
"""LOG a packet to the logfile."""
if not CONF.enable_packet_logging:
return
if CONF.log_packet_format == 'compact':
return
# asdict(packet)
logit = ['\n']
name = packet.__class__.__name__
if isinstance(packet, AckPacket):
pkt_max_send_count = CONF.default_ack_send_count
else:
pkt_max_send_count = CONF.default_packet_send_count
if header:
if tx:
header_str = f'<{TX_COLOR}>TX</{TX_COLOR}>'
logit.append(
f'{header_str}________(<{PACKET_COLOR}>{name}</{PACKET_COLOR}> '
f'TX:{packet.send_count + 1} of {pkt_max_send_count}',
)
else:
header_str = f'<{RX_COLOR}>RX</{RX_COLOR}>'
logit.append(
f'{header_str}________(<{PACKET_COLOR}>{name}</{PACKET_COLOR}>)',
)
else:
header_str = ''
logit.append(f'__________(<{PACKET_COLOR}>{name}</{PACKET_COLOR}>)')
# log_list.append(f" Packet : {packet.__class__.__name__}")
if packet.msgNo:
logit.append(f' Msg # : {packet.msgNo}')
if packet.from_call:
logit.append(f' From : <{FROM_COLOR}>{packet.from_call}</{FROM_COLOR}>')
if packet.to_call:
logit.append(f' To : <{TO_COLOR}>{packet.to_call}</{TO_COLOR}>')
if hasattr(packet, 'path') and packet.path:
logit.append(f' Path : {"=>".join(packet.path)}')
if hasattr(packet, 'via') and packet.via:
logit.append(f' VIA : {packet.via}')
if not isinstance(packet, AckPacket) and not isinstance(packet, RejectPacket):
msg = packet.human_info
if msg:
msg = msg.replace('<', '\\<')
logit.append(f' Info : <light-yellow><b>{msg}</b></light-yellow>')
if hasattr(packet, 'comment') and packet.comment:
logit.append(f' Comment : {packet.comment}')
raw = packet.raw.replace('<', '\\<')
logit.append(f' Raw : <fg #828282>{raw}</fg #828282>')
logit.append(f'{header_str}________(<{PACKET_COLOR}>{name}</{PACKET_COLOR}>)')
LOGU.opt(colors=True).info('\n'.join(logit))
LOG.debug(repr(packet))
def log(
packet,
tx: Optional[bool] = False,
header: Optional[bool] = True,
packet_count: Optional[int] = None,
) -> None:
if not CONF.enable_packet_logging:
return
if CONF.log_packet_format == 'multiline':
log_multiline(packet, tx, header)
return
if not packet_count:
packet_count = ''
else:
packet_count = f'({packet_count:d})'
logit = []
name = packet.__class__.__name__
if isinstance(packet, AckPacket):
pkt_max_send_count = CONF.default_ack_send_count
else:
pkt_max_send_count = CONF.default_packet_send_count
if header:
if tx:
via_color = 'red'
arrow = f'<{via_color}>\u2192</{via_color}>'
logit.append(
f'<red>TX{packet_count}\u2191</red> '
f'<cyan>{name}</cyan>'
f':{packet.msgNo}'
f' ({packet.send_count + 1} of {pkt_max_send_count})',
)
else:
via_color = 'fg #1AA730'
arrow = f'<{via_color}>\u2192</{via_color}>'
f'<{via_color}><-</{via_color}>'
logit.append(
f'<fg #1AA730>RX{packet_count}\u2193</fg #1AA730> '
f'<cyan>{name}</cyan>'
f':{packet.msgNo}',
)
else:
via_color = 'green'
arrow = f'<{via_color}>-></{via_color}>'
logit.append(
f'<cyan>{name}</cyan>:{packet.msgNo}',
)
tmp = None
if packet.path:
tmp = f'{arrow}'.join(packet.path) + f'{arrow} '
logit.append(
f'<{FROM_COLOR}>{packet.from_call}</{FROM_COLOR}> {arrow}'
f'{tmp if tmp else " "}'
f'<{TO_COLOR}>{packet.to_call}</{TO_COLOR}>',
)
if not isinstance(packet, AckPacket) and not isinstance(packet, RejectPacket):
logit.append(':')
msg = packet.human_info
if msg:
msg = msg.replace('<', '\\<')
logit.append(f'<light-yellow><b>{msg}</b></light-yellow>')
# is there distance information?
if isinstance(packet, GPSPacket) and CONF.latitude and CONF.longitude:
my_coords = (float(CONF.latitude), float(CONF.longitude))
packet_coords = (float(packet.latitude), float(packet.longitude))
try:
bearing = utils.calculate_initial_compass_bearing(my_coords, packet_coords)
except Exception as e:
LOG.error(f'Failed to calculate bearing: {e}')
bearing = 0
logit.append(
f' : <{DEGREES_COLOR}>{utils.degrees_to_cardinal(bearing, full_string=True)}</{DEGREES_COLOR}>'
f'<{DISTANCE_COLOR}>@{haversine(my_coords, packet_coords, unit=Unit.MILES):.2f}miles</{DISTANCE_COLOR}>',
)
LOGU.opt(colors=True).info(' '.join(logit))
log_multiline(packet, tx, header)

View File

@ -1,109 +1,61 @@
import logging
from collections import OrderedDict
import threading
from oslo_config import cfg
import wrapt
from aprsd import stats, utils
from aprsd.packets import seen_list
from aprsd.packets import core
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
class PacketList(objectstore.ObjectStoreMixin):
"""Class to keep track of the packets we tx/rx."""
class PacketList:
"""Class to track all of the packets rx'd and tx'd by aprsd."""
_instance = None
lock = threading.Lock()
packet_list: utils.RingBuffer = utils.RingBuffer(1000)
_total_rx: int = 0
_total_tx: int = 0
maxlen: int = 100
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.maxlen = CONF.packet_list_maxlen
cls._instance._init_data()
return cls._instance
def _init_data(self):
self.data = {
'types': {},
'packets': OrderedDict(),
}
@wrapt.synchronized(lock)
def __iter__(self):
return iter(self.packet_list)
def rx(self, packet: type[core.Packet]):
@wrapt.synchronized(lock)
def rx(self, packet):
"""Add a packet that was received."""
with self.lock:
self._total_rx += 1
self._add(packet)
ptype = packet.__class__.__name__
type_stats = self.data['types'].setdefault(
ptype,
{'tx': 0, 'rx': 0},
)
type_stats['rx'] += 1
self._total_rx += 1
self.packet_list.append(packet)
seen_list.SeenList().update_seen(packet)
stats.APRSDStats().rx(packet)
def tx(self, packet: type[core.Packet]):
@wrapt.synchronized(lock)
def tx(self, packet):
"""Add a packet that was received."""
with self.lock:
self._total_tx += 1
self._add(packet)
ptype = packet.__class__.__name__
type_stats = self.data['types'].setdefault(
ptype,
{'tx': 0, 'rx': 0},
)
type_stats['tx'] += 1
self._total_tx += 1
self.packet_list.append(packet)
seen_list.SeenList().update_seen(packet)
stats.APRSDStats().tx(packet)
def add(self, packet):
with self.lock:
self._add(packet)
def _add(self, packet):
if not self.data.get('packets'):
self._init_data()
if packet.key in self.data['packets']:
self.data['packets'].move_to_end(packet.key)
elif len(self.data['packets']) == self.maxlen:
self.data['packets'].popitem(last=False)
self.data['packets'][packet.key] = packet
def find(self, packet):
with self.lock:
return self.data['packets'][packet.key]
def __len__(self):
with self.lock:
return len(self.data['packets'])
@wrapt.synchronized(lock)
def get(self):
return self.packet_list.get()
@wrapt.synchronized(lock)
def total_rx(self):
with self.lock:
return self._total_rx
return self._total_rx
@wrapt.synchronized(lock)
def total_tx(self):
with self.lock:
return self._total_tx
def stats(self, serializable=False) -> dict:
with self.lock:
# Get last N packets directly using list slicing
if CONF.packet_list_stats_maxlen >= 0:
packets_list = list(self.data.get('packets', {}).values())
pkts = packets_list[-CONF.packet_list_stats_maxlen :][::-1]
else:
# We have to copy here, because this get() results in a pointer
# to the packets internally here, which can change after this
# function returns, which would cause a problem trying to save
# the stats to disk.
pkts = self.data.get('packets', {}).copy()
stats = {
'total_tracked': self._total_rx
+ self._total_tx, # Fixed typo: was rx + rx
'rx': self._total_rx,
'tx': self._total_tx,
'types': self.data.get('types', {}), # Changed default from [] to {}
'packet_count': len(self.data.get('packets', [])),
'maxlen': self.maxlen,
'packets': pkts,
}
return stats
return self._total_tx

View File

@ -1,9 +1,10 @@
import datetime
import logging
import threading
from oslo_config import cfg
import wrapt
from aprsd.packets import core
from aprsd.utils import objectstore
@ -15,35 +16,28 @@ class SeenList(objectstore.ObjectStoreMixin):
"""Global callsign seen list."""
_instance = None
lock = threading.Lock()
data: dict = {}
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance._init_store()
cls._instance.data = {}
return cls._instance
def stats(self, serializable=False):
"""Return the stats for the PacketTrack class."""
with self.lock:
return self.data
def rx(self, packet: type[core.Packet]):
"""When we get a packet from the network, update the seen list."""
with self.lock:
callsign = None
if packet.from_call:
callsign = packet.from_call
else:
LOG.warning(f"Can't find FROM in packet {packet}")
return
if callsign not in self.data:
self.data[callsign] = {
"last": None,
"count": 0,
}
self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["count"] += 1
def tx(self, packet: type[core.Packet]):
"""We don't care about TX packets."""
@wrapt.synchronized(lock)
def update_seen(self, packet):
callsign = None
if packet.from_call:
callsign = packet.from_call
else:
LOG.warning(f"Can't find FROM in packet {packet}")
return
if callsign not in self.data:
self.data[callsign] = {
"last": None,
"count": 0,
}
self.data[callsign]["last"] = str(datetime.datetime.now())
self.data[callsign]["count"] += 1

View File

@ -1,14 +1,14 @@
import datetime
import logging
import threading
from oslo_config import cfg
import wrapt
from aprsd.packets import core
from aprsd.threads import tx
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class PacketTrack(objectstore.ObjectStoreMixin):
@ -26,6 +26,7 @@ class PacketTrack(objectstore.ObjectStoreMixin):
_instance = None
_start_time = None
lock = threading.Lock()
data: dict = {}
total_tracked: int = 0
@ -37,67 +38,81 @@ class PacketTrack(objectstore.ObjectStoreMixin):
cls._instance._init_store()
return cls._instance
@wrapt.synchronized(lock)
def __getitem__(self, name):
with self.lock:
return self.data[name]
return self.data[name]
@wrapt.synchronized(lock)
def __iter__(self):
with self.lock:
return iter(self.data)
return iter(self.data)
@wrapt.synchronized(lock)
def keys(self):
with self.lock:
return self.data.keys()
return self.data.keys()
@wrapt.synchronized(lock)
def items(self):
with self.lock:
return self.data.items()
return self.data.items()
@wrapt.synchronized(lock)
def values(self):
with self.lock:
return self.data.values()
return self.data.values()
def stats(self, serializable=False):
with self.lock:
stats = {
"total_tracked": self.total_tracked,
}
pkts = {}
for key in self.data:
last_send_time = self.data[key].last_send_time
pkts[key] = {
"last_send_time": last_send_time,
"send_count": self.data[key].send_count,
"retry_count": self.data[key].retry_count,
"message": self.data[key].raw,
}
stats["packets"] = pkts
return stats
@wrapt.synchronized(lock)
def __len__(self):
return len(self.data)
def rx(self, packet: type[core.Packet]) -> None:
"""When we get a packet from the network, check if we should remove it."""
if isinstance(packet, core.AckPacket):
self._remove(packet.msgNo)
elif isinstance(packet, core.RejectPacket):
self._remove(packet.msgNo)
elif hasattr(packet, "ackMsgNo"):
# Got a piggyback ack, so remove the original message
self._remove(packet.ackMsgNo)
@wrapt.synchronized(lock)
def __str__(self):
result = "{"
for key in self.data.keys():
result += f"{key}: {str(self.data[key])}, "
result += "}"
return result
def tx(self, packet: type[core.Packet]) -> None:
"""Add a packet that was sent."""
with self.lock:
key = packet.msgNo
packet.send_count = 0
self.data[key] = packet
self.total_tracked += 1
@wrapt.synchronized(lock)
def add(self, packet):
key = int(packet.msgNo)
self.data[key] = packet
self.total_tracked += 1
def remove(self, key):
self._remove(key)
@wrapt.synchronized(lock)
def get(self, id):
if id in self.data:
return self.data[id]
def _remove(self, key):
with self.lock:
try:
del self.data[key]
except KeyError:
pass
@wrapt.synchronized(lock)
def remove(self, id):
key = int(id)
if key in self.data.keys():
del self.data[key]
def restart(self):
"""Walk the list of messages and restart them if any."""
for key in self.data.keys():
pkt = self.data[key]
if pkt.last_send_attempt < pkt.retry_count:
tx.send(pkt)
def _resend(self, packet):
packet._last_send_attempt = 0
tx.send(packet)
def restart_delayed(self, count=None, most_recent=True):
"""Walk the list of delayed messages and restart them if any."""
if not count:
# Send all the delayed messages
for key in self.data.keys():
pkt = self.data[key]
if pkt._last_send_attempt == pkt._retry_count:
self._resend(pkt)
else:
# They want to resend <count> delayed messages
tmp = sorted(
self.data.items(),
reverse=most_recent,
key=lambda x: x[1].last_send_time,
)
pkt_list = tmp[:count]
for (_key, pkt) in pkt_list:
self._resend(pkt)

View File

@ -1,10 +1,11 @@
import datetime
import logging
import threading
from oslo_config import cfg
import wrapt
from aprsd import utils
from aprsd.packets import core
from aprsd.utils import objectstore
@ -16,75 +17,56 @@ class WatchList(objectstore.ObjectStoreMixin):
"""Global watch list and info for callsigns."""
_instance = None
lock = threading.Lock()
data = {}
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance._init_store()
cls._instance.data = {}
return cls._instance
def __init__(self):
super().__init__()
self._update_from_conf()
def __init__(self, config=None):
ring_size = CONF.watch_list.packet_keep_count
def _update_from_conf(self, config=None):
with self.lock:
if CONF.watch_list.enabled and CONF.watch_list.callsigns:
for callsign in CONF.watch_list.callsigns:
call = callsign.replace("*", "")
# FIXME(waboring) - we should fetch the last time we saw
# a beacon from a callsign or some other mechanism to find
# last time a message was seen by aprs-is. For now this
# is all we can do.
if call not in self.data:
self.data[call] = {
"last": None,
"packet": None,
}
def stats(self, serializable=False) -> dict:
stats = {}
with self.lock:
for callsign in self.data:
stats[callsign] = {
"last": self.data[callsign]["last"],
"packet": self.data[callsign]["packet"],
"age": self.age(callsign),
"old": self.is_old(callsign),
if CONF.watch_list.callsigns:
for callsign in CONF.watch_list.callsigns:
call = callsign.replace("*", "")
# FIXME(waboring) - we should fetch the last time we saw
# a beacon from a callsign or some other mechanism to find
# last time a message was seen by aprs-is. For now this
# is all we can do.
self.data[call] = {
"last": datetime.datetime.now(),
"packets": utils.RingBuffer(
ring_size,
),
}
return stats
def is_enabled(self):
return CONF.watch_list.enabled
def callsign_in_watchlist(self, callsign):
with self.lock:
return callsign in self.data
def rx(self, packet: type[core.Packet]) -> None:
"""Track when we got a packet from the network."""
callsign = packet.from_call
return callsign in self.data
@wrapt.synchronized(lock)
def update_seen(self, packet):
if packet.addresse:
callsign = packet.addresse
else:
callsign = packet.from_call
if self.callsign_in_watchlist(callsign):
with self.lock:
self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["packet"] = packet
def tx(self, packet: type[core.Packet]) -> None:
"""We don't care about TX packets."""
self.data[callsign]["last"] = datetime.datetime.now()
self.data[callsign]["packets"].append(packet)
def last_seen(self, callsign):
with self.lock:
if self.callsign_in_watchlist(callsign):
return self.data[callsign]["last"]
if self.callsign_in_watchlist(callsign):
return self.data[callsign]["last"]
def age(self, callsign):
now = datetime.datetime.now()
last_seen_time = self.last_seen(callsign)
if last_seen_time:
return str(now - last_seen_time)
else:
return None
return str(now - self.last_seen(callsign))
def max_delta(self, seconds=None):
if not seconds:
@ -101,19 +83,14 @@ class WatchList(objectstore.ObjectStoreMixin):
We put this here so any notification plugin can use this
same test.
"""
if not self.callsign_in_watchlist(callsign):
return False
age = self.age(callsign)
if age:
delta = utils.parse_delta_str(age)
d = datetime.timedelta(**delta)
max_delta = self.max_delta(seconds=seconds)
delta = utils.parse_delta_str(age)
d = datetime.timedelta(**delta)
if d > max_delta:
return True
else:
return False
max_delta = self.max_delta(seconds=seconds)
if d > max_delta:
return True
else:
return False

View File

@ -1,5 +1,4 @@
from __future__ import annotations
# The base plugin class
import abc
import importlib
import inspect
@ -8,41 +7,42 @@ import re
import textwrap
import threading
import pluggy
from oslo_config import cfg
import pluggy
import aprsd
from aprsd import packets, threads
from aprsd.client.client import APRSDClient
from aprsd import client, packets, threads
from aprsd.packets import watch_list
# setup the global logger
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
CORE_MESSAGE_PLUGINS = [
'aprsd.plugins.email.EmailPlugin',
'aprsd.plugins.fortune.FortunePlugin',
'aprsd.plugins.location.LocationPlugin',
'aprsd.plugins.ping.PingPlugin',
'aprsd.plugins.time.TimePlugin',
'aprsd.plugins.weather.USWeatherPlugin',
'aprsd.plugins.version.VersionPlugin',
"aprsd.plugins.email.EmailPlugin",
"aprsd.plugins.fortune.FortunePlugin",
"aprsd.plugins.location.LocationPlugin",
"aprsd.plugins.ping.PingPlugin",
"aprsd.plugins.query.QueryPlugin",
"aprsd.plugins.time.TimePlugin",
"aprsd.plugins.weather.USWeatherPlugin",
"aprsd.plugins.version.VersionPlugin",
]
CORE_NOTIFY_PLUGINS = [
'aprsd.plugins.notify.NotifySeenPlugin',
"aprsd.plugins.notify.NotifySeenPlugin",
]
hookspec = pluggy.HookspecMarker('aprsd')
hookimpl = pluggy.HookimplMarker('aprsd')
hookspec = pluggy.HookspecMarker("aprsd")
hookimpl = pluggy.HookimplMarker("aprsd")
class APRSDPluginSpec:
"""A hook specification namespace."""
@hookspec
def filter(self, packet: type[packets.Packet]):
def filter(self, packet: packets.core.Packet):
"""My special little hook that you can customize."""
@ -65,7 +65,7 @@ class APRSDPluginBase(metaclass=abc.ABCMeta):
self.threads = self.create_threads() or []
self.start_threads()
def start_threads(self) -> None:
def start_threads(self):
if self.enabled and self.threads:
if not isinstance(self.threads, list):
self.threads = [self.threads]
@ -77,24 +77,24 @@ class APRSDPluginBase(metaclass=abc.ABCMeta):
else:
LOG.error(
"Can't start thread {}:{}, Must be a child "
'of aprsd.threads.APRSDThread'.format(
"of aprsd.threads.APRSDThread".format(
self,
thread,
),
)
except Exception:
LOG.error(
'Failed to start threads for plugin {}'.format(
"Failed to start threads for plugin {}".format(
self,
),
)
@property
def message_count(self) -> int:
def message_count(self):
return self.message_counter
def help(self) -> str:
return 'Help!'
def help(self):
return "Help!"
@abc.abstractmethod
def setup(self):
@ -118,11 +118,11 @@ class APRSDPluginBase(metaclass=abc.ABCMeta):
thread.stop()
@abc.abstractmethod
def filter(self, packet: type[packets.Packet]) -> str | packets.MessagePacket:
def filter(self, packet: packets.core.Packet):
pass
@abc.abstractmethod
def process(self, packet: type[packets.Packet]):
def process(self, packet: packets.core.Packet):
"""This is called when the filter passes."""
@ -147,14 +147,14 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
watch_list = CONF.watch_list.callsigns
# make sure the timeout is set or this doesn't work
if watch_list:
aprs_client = APRSDClient()
filter_str = 'b/{}'.format('/'.join(watch_list))
aprs_client = client.factory.create().client
filter_str = "b/{}".format("/".join(watch_list))
aprs_client.set_filter(filter_str)
else:
LOG.warning('Watch list enabled, but no callsigns set.')
LOG.warning("Watch list enabled, but no callsigns set.")
@hookimpl
def filter(self, packet: type[packets.Packet]) -> str | packets.MessagePacket:
def filter(self, packet: packets.core.Packet):
result = packets.NULL_MESSAGE
if self.enabled:
wl = watch_list.WatchList()
@ -165,15 +165,14 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
result = self.process(packet)
except Exception as ex:
LOG.error(
'Plugin {} failed to process packet {}'.format(
self.__class__,
ex,
"Plugin {} failed to process packet {}".format(
self.__class__, ex,
),
)
if result:
self.tx_inc()
else:
LOG.warning(f'{self.__class__} plugin is not enabled')
LOG.warning(f"{self.__class__} plugin is not enabled")
return result
@ -197,7 +196,7 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
raise NotImplementedError
def help(self):
return '{}: {}'.format(
return "{}: {}".format(
self.command_name.lower(),
self.command_regex,
)
@ -207,17 +206,15 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
self.enabled = True
@hookimpl
def filter(self, packet: packets.MessagePacket) -> str | packets.MessagePacket:
LOG.debug(f'{self.__class__.__name__} called')
def filter(self, packet: packets.core.MessagePacket):
LOG.info(f"{self.__class__.__name__} called")
if not self.enabled:
result = f"{self.__class__.__name__} isn't enabled"
LOG.warning(result)
return result
if not isinstance(packet, packets.MessagePacket):
LOG.warning(
f'{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring'
)
if not isinstance(packet, packets.core.MessagePacket):
LOG.warning(f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring")
return packets.NULL_MESSAGE
result = None
@ -229,7 +226,7 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
# and is an APRS message format and has a message.
if (
tocall == CONF.callsign
and isinstance(packet, packets.MessagePacket)
and isinstance(packet, packets.core.MessagePacket)
and message
):
if re.search(self.command_regex, message, re.IGNORECASE):
@ -238,9 +235,8 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
result = self.process(packet)
except Exception as ex:
LOG.error(
'Plugin {} failed to process packet {}'.format(
self.__class__,
ex,
"Plugin {} failed to process packet {}".format(
self.__class__, ex,
),
)
LOG.exception(ex)
@ -255,7 +251,7 @@ class APRSFIKEYMixin:
def ensure_aprs_fi_key(self):
if not CONF.aprs_fi.apiKey:
LOG.error('Config aprs_fi.apiKey is not set')
LOG.error("Config aprs_fi.apiKey is not set")
self.enabled = False
else:
self.enabled = True
@ -267,31 +263,30 @@ class HelpPlugin(APRSDRegexCommandPluginBase):
This plugin is in this file to prevent a circular import.
"""
command_regex = '^[hH]'
command_name = 'help'
command_regex = "^[hH]"
command_name = "help"
def help(self):
return 'Help: send APRS help or help <plugin>'
return "Help: send APRS help or help <plugin>"
def process(self, packet: packets.MessagePacket):
LOG.info('HelpPlugin')
def process(self, packet: packets.core.MessagePacket):
LOG.info("HelpPlugin")
# fromcall = packet.get("from")
message = packet.message_text
# ack = packet.get("msgNo", "0")
a = re.search(r'^.*\s+(.*)', message)
a = re.search(r"^.*\s+(.*)", message)
command_name = None
if a is not None:
command_name = a.group(1).lower()
pm = PluginManager()
if command_name and '?' not in command_name:
if command_name and "?" not in command_name:
# user wants help for a specific plugin
reply = None
for p in pm.get_plugins():
if (
p.enabled
and isinstance(p, APRSDRegexCommandPluginBase)
p.enabled and isinstance(p, APRSDRegexCommandPluginBase)
and p.command_name.lower() == command_name
):
reply = p.help()
@ -304,20 +299,20 @@ class HelpPlugin(APRSDRegexCommandPluginBase):
LOG.debug(p)
if p.enabled and isinstance(p, APRSDRegexCommandPluginBase):
name = p.command_name.lower()
if name not in list and 'help' not in name:
if name not in list and "help" not in name:
list.append(name)
list.sort()
reply = ' '.join(list)
reply = " ".join(list)
lines = textwrap.wrap(reply, 60)
replies = ["Send APRS MSG of 'help' or 'help <plugin>'"]
for line in lines:
replies.append(f'plugins: {line}')
replies.append(f"plugins: {line}")
for entry in replies:
LOG.debug(f'{len(entry)} {entry}')
LOG.debug(f"{len(entry)} {entry}")
LOG.debug(f'{replies}')
LOG.debug(f"{replies}")
return replies
@ -342,34 +337,12 @@ class PluginManager:
return cls._instance
def _init(self):
self._pluggy_pm = pluggy.PluginManager('aprsd')
self._pluggy_pm = pluggy.PluginManager("aprsd")
self._pluggy_pm.add_hookspecs(APRSDPluginSpec)
# For the watchlist plugins
self._watchlist_pm = pluggy.PluginManager('aprsd')
self._watchlist_pm = pluggy.PluginManager("aprsd")
self._watchlist_pm.add_hookspecs(APRSDPluginSpec)
def stats(self, serializable=False) -> dict:
"""Collect and return stats for all plugins."""
def full_name_with_qualname(obj):
return '{}.{}'.format(
obj.__class__.__module__,
obj.__class__.__qualname__,
)
plugin_stats = {}
plugins = self.get_plugins()
if plugins:
for p in plugins:
plugin_stats[full_name_with_qualname(p)] = {
'enabled': p.enabled,
'rx': p.rx_count,
'tx': p.tx_count,
'version': p.version,
}
return plugin_stats
def is_plugin(self, obj):
for c in inspect.getmro(obj):
if issubclass(c, APRSDPluginBase):
@ -393,19 +366,17 @@ class PluginManager:
module_name = None
class_name = None
try:
module_name, class_name = module_class_string.rsplit('.', 1)
module_name, class_name = module_class_string.rsplit(".", 1)
module = importlib.import_module(module_name)
# Commented out because the email thread starts in a different context
# and hence gives a different singleton for the EmailStats
# module = importlib.reload(module)
module = importlib.reload(module)
except Exception as ex:
if not module_name:
LOG.error(f'Failed to load Plugin {module_class_string}')
LOG.error(f"Failed to load Plugin {module_class_string}")
else:
LOG.error(f"Failed to load Plugin '{module_name}' : '{ex}'")
return
assert hasattr(module, class_name), 'class {} is not in {}'.format(
assert hasattr(module, class_name), "class {} is not in {}".format(
class_name,
module_name,
)
@ -413,7 +384,7 @@ class PluginManager:
# class_name, module_name))
cls = getattr(module, class_name)
if super_cls is not None:
assert issubclass(cls, super_cls), 'class {} should inherit from {}'.format(
assert issubclass(cls, super_cls), "class {} should inherit from {}".format(
class_name,
super_cls.__name__,
)
@ -444,9 +415,7 @@ class PluginManager:
)
self._watchlist_pm.register(plugin_obj)
else:
LOG.warning(
f'Plugin {plugin_obj.__class__.__name__} is disabled'
)
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
elif isinstance(plugin_obj, APRSDRegexCommandPluginBase):
if plugin_obj.enabled:
LOG.info(
@ -458,9 +427,7 @@ class PluginManager:
)
self._pluggy_pm.register(plugin_obj)
else:
LOG.warning(
f'Plugin {plugin_obj.__class__.__name__} is disabled'
)
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
elif isinstance(plugin_obj, APRSDPluginBase):
if plugin_obj.enabled:
LOG.info(
@ -471,9 +438,7 @@ class PluginManager:
)
self._pluggy_pm.register(plugin_obj)
else:
LOG.warning(
f'Plugin {plugin_obj.__class__.__name__} is disabled'
)
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
except Exception as ex:
LOG.error(f"Couldn't load plugin '{plugin_name}'")
LOG.exception(ex)
@ -481,28 +446,20 @@ class PluginManager:
def reload_plugins(self):
with self.lock:
del self._pluggy_pm
self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
self.setup_plugins()
def setup_plugins(
self,
load_help_plugin=True,
plugin_list=None,
):
def setup_plugins(self, load_help_plugin=True):
"""Create the plugin manager and register plugins."""
LOG.info('Loading APRSD Plugins')
LOG.info("Loading APRSD Plugins")
# Help plugin is always enabled.
if load_help_plugin:
_help = HelpPlugin()
self._pluggy_pm.register(_help)
# if plugins_list is passed in, only load
# those plugins.
if plugin_list:
for plugin_name in plugin_list:
self._load_plugin(plugin_name)
elif CONF.enabled_plugins:
for p_name in CONF.enabled_plugins:
enabled_plugins = CONF.enabled_plugins
if enabled_plugins:
for p_name in enabled_plugins:
self._load_plugin(p_name)
else:
# Enabled plugins isn't set, so we default to loading all of
@ -510,14 +467,14 @@ class PluginManager:
for p_name in CORE_MESSAGE_PLUGINS:
self._load_plugin(p_name)
LOG.info('Completed Plugin Loading.')
LOG.info("Completed Plugin Loading.")
def run(self, packet: packets.MessagePacket):
def run(self, packet: packets.core.MessagePacket):
"""Execute all the plugins run method."""
with self.lock:
return self._pluggy_pm.hook.filter(packet=packet)
def run_watchlist(self, packet: packets.Packet):
def run_watchlist(self, packet: packets.core.Packet):
with self.lock:
return self._watchlist_pm.hook.filter(packet=packet)
@ -525,7 +482,7 @@ class PluginManager:
"""Stop all threads created by all plugins."""
with self.lock:
for p in self.get_plugins():
if hasattr(p, 'stop_threads'):
if hasattr(p, "stop_threads"):
p.stop_threads()
def register_msg(self, obj):

View File

@ -4,20 +4,21 @@ import logging
import requests
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
def get_aprs_fi(api_key, callsign):
LOG.debug(f"Fetch aprs.fi location for '{callsign}'")
try:
url = (
'http://api.aprs.fi/api/get?&what=loc&apikey={}&format=json&name={}'.format(
api_key, callsign
)
"http://api.aprs.fi/api/get?"
"&what=loc&apikey={}&format=json"
"&name={}".format(api_key, callsign)
)
response = requests.get(url)
except Exception as e:
raise Exception('Failed to get aprs.fi location') from e
except Exception:
raise Exception("Failed to get aprs.fi location")
else:
response.raise_for_status()
return json.loads(response.text)
@ -25,22 +26,22 @@ def get_aprs_fi(api_key, callsign):
def get_weather_gov_for_gps(lat, lon):
# FIXME(hemna) This is currently BROKEN
LOG.debug(f'Fetch station at {lat}, {lon}')
LOG.debug(f"Fetch station at {lat}, {lon}")
headers = requests.utils.default_headers()
headers.update(
{'User-Agent': '(aprsd, waboring@hemna.com)'},
{"User-Agent": "(aprsd, waboring@hemna.com)"},
)
try:
url2 = (
'https://forecast.weather.gov/MapClick.php?lat=%s'
'&lon=%s&FcstType=json' % (lat, lon)
"https://forecast.weather.gov/MapClick.php?lat=%s"
"&lon=%s&FcstType=json" % (lat, lon)
# f"https://api.weather.gov/points/{lat},{lon}"
)
LOG.debug(f"Fetching weather '{url2}'")
response = requests.get(url2, headers=headers)
except Exception as e:
LOG.error(e)
raise Exception('Failed to get weather') from e
raise Exception("Failed to get weather")
else:
response.raise_for_status()
return json.loads(response.text)
@ -49,24 +50,24 @@ def get_weather_gov_for_gps(lat, lon):
def get_weather_gov_metar(station):
LOG.debug(f"Fetch metar for station '{station}'")
try:
url = 'https://api.weather.gov/stations/{}/observations/latest'.format(
url = "https://api.weather.gov/stations/{}/observations/latest".format(
station,
)
response = requests.get(url)
except Exception as e:
raise Exception('Failed to fetch metar') from e
except Exception:
raise Exception("Failed to fetch metar")
else:
response.raise_for_status()
return json.loads(response)
def fetch_openweathermap(api_key, lat, lon, units='metric', exclude=None):
LOG.debug(f'Fetch openweathermap for {lat}, {lon}')
def fetch_openweathermap(api_key, lat, lon, units="metric", exclude=None):
LOG.debug(f"Fetch openweathermap for {lat}, {lon}")
if not exclude:
exclude = 'minutely,hourly,daily,alerts'
exclude = "minutely,hourly,daily,alerts"
try:
url = (
"https://api.openweathermap.org/data/3.0/onecall?"
"https://api.openweathermap.org/data/2.5/onecall?"
"lat={}&lon={}&appid={}&units={}&exclude={}".format(
lat,
lon,
@ -75,11 +76,10 @@ def fetch_openweathermap(api_key, lat, lon, units='metric', exclude=None):
exclude,
)
)
LOG.debug(f"Fetching OWM weather '{url}'")
response = requests.get(url)
except Exception as e:
LOG.error(e)
raise Exception('Failed to get weather') from e
raise Exception("Failed to get weather")
else:
response.raise_for_status()
return json.loads(response.text)

681
aprsd/plugins/email.py Normal file
View File

@ -0,0 +1,681 @@
import datetime
import email
from email.mime.text import MIMEText
import imaplib
import logging
import re
import smtplib
import threading
import time
import imapclient
from oslo_config import cfg
from aprsd import packets, plugin, stats, threads
from aprsd.threads import tx
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
shortcuts_dict = None
class EmailInfo:
"""A singleton thread safe mechanism for the global check_email_delay.
This has to be done because we have 2 separate threads that access
the delay value.
1) when EmailPlugin runs from a user message and
2) when the background EmailThread runs to check email.
Access the check email delay with
EmailInfo().delay
Set it with
EmailInfo().delay = 100
or
EmailInfo().delay += 10
"""
_instance = None
def __new__(cls, *args, **kwargs):
"""This magic turns this into a singleton."""
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance.lock = threading.Lock()
cls._instance._delay = 60
return cls._instance
@property
def delay(self):
with self.lock:
return self._delay
@delay.setter
def delay(self, val):
with self.lock:
self._delay = val
class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
"""Email Plugin."""
command_regex = "^-.*"
command_name = "email"
short_description = "Send and Receive email"
# message_number:time combos so we don't resend the same email in
# five mins {int:int}
email_sent_dict = {}
enabled = False
def setup(self):
"""Ensure that email is enabled and start the thread."""
if CONF.email_plugin.enabled:
self.enabled = True
if not CONF.email_plugin.callsign:
self.enabled = False
LOG.error("email_plugin.callsign is not set.")
return
if not CONF.email_plugin.imap_login:
LOG.error("email_plugin.imap_login not set. Disabling Plugin")
self.enabled = False
return
if not CONF.email_plugin.smtp_login:
LOG.error("email_plugin.smtp_login not set. Disabling Plugin")
self.enabled = False
return
shortcuts = _build_shortcuts_dict()
LOG.info(f"Email shortcuts {shortcuts}")
else:
LOG.info("Email services not enabled.")
self.enabled = False
def create_threads(self):
if self.enabled:
return APRSDEmailThread()
@trace.trace
def process(self, packet: packets.MessagePacket):
LOG.info("Email COMMAND")
if not self.enabled:
# Email has not been enabled
# so the plugin will just NOOP
return packets.NULL_MESSAGE
fromcall = packet.from_call
message = packet.message_text
ack = packet.get("msgNo", "0")
reply = None
if not CONF.email_plugin.enabled:
LOG.debug("Email is not enabled in config file ignoring.")
return "Email not enabled."
searchstring = "^" + CONF.email_plugin.callsign + ".*"
# only I can do email
if re.search(searchstring, fromcall):
# digits only, first one is number of emails to resend
r = re.search("^-([0-9])[0-9]*$", message)
if r is not None:
LOG.debug("RESEND EMAIL")
resend_email(r.group(1), fromcall)
reply = packets.NULL_MESSAGE
# -user@address.com body of email
elif re.search(r"^-([A-Za-z0-9_\-\.@]+) (.*)", message):
# (same search again)
a = re.search(r"^-([A-Za-z0-9_\-\.@]+) (.*)", message)
if a is not None:
to_addr = a.group(1)
content = a.group(2)
email_address = get_email_from_shortcut(to_addr)
if not email_address:
reply = "Bad email address"
return reply
# send recipient link to aprs.fi map
if content == "mapme":
content = (
"Click for my location: http://aprs.fi/{}" ""
).format(
CONF.email_plugin.callsign,
)
too_soon = 0
now = time.time()
# see if we sent this msg number recently
if ack in self.email_sent_dict:
# BUG(hemna) - when we get a 2 different email command
# with the same ack #, we don't send it.
timedelta = now - self.email_sent_dict[ack]
if timedelta < 300: # five minutes
too_soon = 1
if not too_soon or ack == 0:
LOG.info(f"Send email '{content}'")
send_result = send_email(to_addr, content)
reply = packets.NULL_MESSAGE
if send_result != 0:
reply = f"-{to_addr} failed"
else:
# clear email sent dictionary if somehow goes
# over 100
if len(self.email_sent_dict) > 98:
LOG.debug(
"DEBUG: email_sent_dict is big ("
+ str(len(self.email_sent_dict))
+ ") clearing out.",
)
self.email_sent_dict.clear()
self.email_sent_dict[ack] = now
else:
reply = packets.NULL_MESSAGE
LOG.info(
"Email for message number "
+ ack
+ " recently sent, not sending again.",
)
else:
reply = "Bad email address"
return reply
def _imap_connect():
imap_port = CONF.email_plugin.imap_port
use_ssl = CONF.email_plugin.imap_use_ssl
# host = CONFIG["aprsd"]["email"]["imap"]["host"]
# msg = "{}{}:{}".format("TLS " if use_ssl else "", host, imap_port)
# LOG.debug("Connect to IMAP host {} with user '{}'".
# format(msg, CONFIG['imap']['login']))
try:
server = imapclient.IMAPClient(
CONF.email_plugin.imap_host,
port=imap_port,
use_uid=True,
ssl=use_ssl,
timeout=30,
)
except Exception:
LOG.exception("Failed to connect IMAP server")
return
try:
server.login(
CONF.email_plugin.imap_login,
CONF.email_plugin.imap_password,
)
except (imaplib.IMAP4.error, Exception) as e:
msg = getattr(e, "message", repr(e))
LOG.error(f"Failed to login {msg}")
return
server.select_folder("INBOX")
server.fetch = trace.trace(server.fetch)
server.search = trace.trace(server.search)
server.remove_flags = trace.trace(server.remove_flags)
server.add_flags = trace.trace(server.add_flags)
return server
def _smtp_connect():
host = CONF.email_plugin.smtp_host
smtp_port = CONF.email_plugin.smtp_port
use_ssl = CONF.email_plugin.smtp_use_ssl
msg = "{}{}:{}".format("SSL " if use_ssl else "", host, smtp_port)
LOG.debug(
"Connect to SMTP host {} with user '{}'".format(
msg,
CONF.email_plugin.smtp_login,
),
)
try:
if use_ssl:
server = smtplib.SMTP_SSL(
host=host,
port=smtp_port,
timeout=30,
)
else:
server = smtplib.SMTP(
host=host,
port=smtp_port,
timeout=30,
)
except Exception:
LOG.error("Couldn't connect to SMTP Server")
return
LOG.debug(f"Connected to smtp host {msg}")
debug = CONF.email_plugin.debug
if debug:
server.set_debuglevel(5)
server.sendmail = trace.trace(server.sendmail)
try:
server.login(
CONF.email_plugin.smtp_login,
CONF.email_plugin.smtp_password,
)
except Exception:
LOG.error("Couldn't connect to SMTP Server")
return
LOG.debug(f"Logged into SMTP server {msg}")
return server
def _build_shortcuts_dict():
global shortcuts_dict
if not shortcuts_dict:
if CONF.email_plugin.email_shortcuts:
shortcuts_dict = {}
tmp = CONF.email_plugin.email_shortcuts
for combo in tmp:
entry = combo.split("=")
shortcuts_dict[entry[0]] = entry[1]
else:
shortcuts_dict = {}
return shortcuts_dict
def get_email_from_shortcut(addr):
if CONF.email_plugin.email_shortcuts:
shortcuts = _build_shortcuts_dict()
LOG.info(f"Shortcut lookup {addr} returns {shortcuts.get(addr, addr)}")
return shortcuts.get(addr, addr)
else:
return addr
def validate_email_config(disable_validation=False):
"""function to simply ensure we can connect to email services.
This helps with failing early during startup.
"""
LOG.info("Checking IMAP configuration")
imap_server = _imap_connect()
LOG.info("Checking SMTP configuration")
smtp_server = _smtp_connect()
if imap_server and smtp_server:
return True
else:
return False
@trace.trace
def parse_email(msgid, data, server):
envelope = data[b"ENVELOPE"]
# email address match
# use raw string to avoid invalid escape secquence errors r"string here"
f = re.search(r"([\.\w_-]+@[\.\w_-]+)", str(envelope.from_[0]))
if f is not None:
from_addr = f.group(1)
else:
from_addr = "noaddr"
LOG.debug(f"Got a message from '{from_addr}'")
try:
m = server.fetch([msgid], ["RFC822"])
except Exception:
LOG.exception("Couldn't fetch email from server in parse_email")
return
msg = email.message_from_string(m[msgid][b"RFC822"].decode(errors="ignore"))
if msg.is_multipart():
text = ""
html = None
# default in case body somehow isn't set below - happened once
body = b"* unreadable msg received"
# this uses the last text or html part in the email,
# phone companies often put content in an attachment
for part in msg.get_payload():
if part.get_content_charset() is None:
# or BREAK when we hit a text or html?
# We cannot know the character set,
# so return decoded "something"
LOG.debug("Email got unknown content type")
text = part.get_payload(decode=True)
continue
charset = part.get_content_charset()
if part.get_content_type() == "text/plain":
LOG.debug("Email got text/plain")
text = str(
part.get_payload(decode=True),
str(charset),
"ignore",
).encode("utf8", "replace")
if part.get_content_type() == "text/html":
LOG.debug("Email got text/html")
html = str(
part.get_payload(decode=True),
str(charset),
"ignore",
).encode("utf8", "replace")
if text is not None:
# strip removes white space fore and aft of string
body = text.strip()
else:
body = html.strip()
else: # message is not multipart
# email.uscc.net sends no charset, blows up unicode function below
LOG.debug("Email is not multipart")
if msg.get_content_charset() is None:
text = str(msg.get_payload(decode=True), "US-ASCII", "ignore").encode(
"utf8",
"replace",
)
else:
text = str(
msg.get_payload(decode=True),
msg.get_content_charset(),
"ignore",
).encode("utf8", "replace")
body = text.strip()
# FIXED: UnicodeDecodeError: 'ascii' codec can't decode byte 0xf0
# in position 6: ordinal not in range(128)
# decode with errors='ignore'. be sure to encode it before we return
# it below, also with errors='ignore'
try:
body = body.decode(errors="ignore")
except Exception:
LOG.exception("Unicode decode failure")
LOG.error(f"Unidoce decode failed: {str(body)}")
body = "Unreadable unicode msg"
# strip all html tags
body = re.sub("<[^<]+?>", "", body)
# strip CR/LF, make it one line, .rstrip fails at this
body = body.replace("\n", " ").replace("\r", " ")
# ascii might be out of range, so encode it, removing any error characters
body = body.encode(errors="ignore")
return body, from_addr
# end parse_email
@trace.trace
def send_email(to_addr, content):
shortcuts = _build_shortcuts_dict()
email_address = get_email_from_shortcut(to_addr)
LOG.info("Sending Email_________________")
if to_addr in shortcuts:
LOG.info(f"To : {to_addr}")
to_addr = email_address
LOG.info(f" ({to_addr})")
subject = CONF.email_plugin.callsign
# content = content + "\n\n(NOTE: reply with one line)"
LOG.info(f"Subject : {subject}")
LOG.info(f"Body : {content}")
# check email more often since there's activity right now
EmailInfo().delay = 60
msg = MIMEText(content)
msg["Subject"] = subject
msg["From"] = CONF.email_plugin.smtp_login
msg["To"] = to_addr
server = _smtp_connect()
if server:
try:
server.sendmail(
CONF.email_plugin.smtp_login,
[to_addr],
msg.as_string(),
)
stats.APRSDStats().email_tx_inc()
except Exception:
LOG.exception("Sendmail Error!!!!")
server.quit()
return -1
server.quit()
return 0
@trace.trace
def resend_email(count, fromcall):
date = datetime.datetime.now()
month = date.strftime("%B")[:3] # Nov, Mar, Apr
day = date.day
year = date.year
today = f"{day}-{month}-{year}"
shortcuts = _build_shortcuts_dict()
# swap key/value
shortcuts_inverted = {v: k for k, v in shortcuts.items()}
try:
server = _imap_connect()
except Exception:
LOG.exception("Failed to Connect to IMAP. Cannot resend email ")
return
try:
messages = server.search(["SINCE", today])
except Exception:
LOG.exception("Couldn't search for emails in resend_email ")
return
# LOG.debug("%d messages received today" % len(messages))
msgexists = False
messages.sort(reverse=True)
del messages[int(count) :] # only the latest "count" messages
for message in messages:
try:
parts = server.fetch(message, ["ENVELOPE"]).items()
except Exception:
LOG.exception("Couldn't fetch email parts in resend_email")
continue
for msgid, data in list(parts):
# one at a time, otherwise order is random
(body, from_addr) = parse_email(msgid, data, server)
# unset seen flag, will stay bold in email client
try:
server.remove_flags(msgid, [imapclient.SEEN])
except Exception:
LOG.exception("Failed to remove SEEN flag in resend_email")
if from_addr in shortcuts_inverted:
# reverse lookup of a shortcut
from_addr = shortcuts_inverted[from_addr]
# asterisk indicates a resend
reply = "-" + from_addr + " * " + body.decode(errors="ignore")
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=reply,
),
)
msgexists = True
if msgexists is not True:
stm = time.localtime()
h = stm.tm_hour
m = stm.tm_min
s = stm.tm_sec
# append time as a kind of serial number to prevent FT1XDR from
# thinking this is a duplicate message.
# The FT1XDR pretty much ignores the aprs message number in this
# regard. The FTM400 gets it right.
reply = "No new msg {}:{}:{}".format(
str(h).zfill(2),
str(m).zfill(2),
str(s).zfill(2),
)
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=fromcall,
message_text=reply,
),
)
# check email more often since we're resending one now
EmailInfo().delay = 60
server.logout()
# end resend_email()
class APRSDEmailThread(threads.APRSDThread):
def __init__(self):
super().__init__("EmailThread")
self.past = datetime.datetime.now()
def loop(self):
time.sleep(5)
stats.APRSDStats().email_thread_update()
# always sleep for 5 seconds and see if we need to check email
# This allows CTRL-C to stop the execution of this loop sooner
# than check_email_delay time
now = datetime.datetime.now()
if now - self.past > datetime.timedelta(seconds=EmailInfo().delay):
# It's time to check email
# slowly increase delay every iteration, max out at 300 seconds
# any send/receive/resend activity will reset this to 60 seconds
if EmailInfo().delay < 300:
EmailInfo().delay += 10
LOG.debug(
f"check_email_delay is {EmailInfo().delay} seconds ",
)
shortcuts = _build_shortcuts_dict()
# swap key/value
shortcuts_inverted = {v: k for k, v in shortcuts.items()}
date = datetime.datetime.now()
month = date.strftime("%B")[:3] # Nov, Mar, Apr
day = date.day
year = date.year
today = f"{day}-{month}-{year}"
try:
server = _imap_connect()
except Exception:
LOG.exception("IMAP Failed to connect")
return True
try:
messages = server.search(["SINCE", today])
except Exception:
LOG.exception("IMAP failed to search for messages since today.")
return True
LOG.debug(f"{len(messages)} messages received today")
try:
_msgs = server.fetch(messages, ["ENVELOPE"])
except Exception:
LOG.exception("IMAP failed to fetch/flag messages: ")
return True
for msgid, data in _msgs.items():
envelope = data[b"ENVELOPE"]
LOG.debug(
'ID:%d "%s" (%s)'
% (msgid, envelope.subject.decode(), envelope.date),
)
f = re.search(
r"'([[A-a][0-9]_-]+@[[A-a][0-9]_-\.]+)",
str(envelope.from_[0]),
)
if f is not None:
from_addr = f.group(1)
else:
from_addr = "noaddr"
# LOG.debug("Message flags/tags: " +
# str(server.get_flags(msgid)[msgid]))
# if "APRS" not in server.get_flags(msgid)[msgid]:
# in python3, imap tags are unicode. in py2 they're strings.
# so .decode them to handle both
try:
taglist = [
x.decode(errors="ignore")
for x in server.get_flags(msgid)[msgid]
]
except Exception:
LOG.error("Failed to get flags.")
break
if "APRS" not in taglist:
# if msg not flagged as sent via aprs
try:
server.fetch([msgid], ["RFC822"])
except Exception:
LOG.exception("Failed single server fetch for RFC822")
break
(body, from_addr) = parse_email(msgid, data, server)
# unset seen flag, will stay bold in email client
try:
server.remove_flags(msgid, [imapclient.SEEN])
except Exception:
LOG.exception("Failed to remove flags SEEN")
# Not much we can do here, so lets try and
# send the aprs message anyway
if from_addr in shortcuts_inverted:
# reverse lookup of a shortcut
from_addr = shortcuts_inverted[from_addr]
reply = "-" + from_addr + " " + body.decode(errors="ignore")
# Send the message to the registered user in the
# config ham.callsign
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
to_call=CONF.email_plugin.callsign,
message_text=reply,
),
)
# flag message as sent via aprs
try:
server.add_flags(msgid, ["APRS"])
# unset seen flag, will stay bold in email client
except Exception:
LOG.exception("Couldn't add APRS flag to email")
try:
server.remove_flags(msgid, [imapclient.SEEN])
except Exception:
LOG.exception("Couldn't remove seen flag from email")
# check email more often since we just received an email
EmailInfo().delay = 60
# reset clock
LOG.debug("Done looping over Server.fetch, log out.")
self.past = datetime.datetime.now()
try:
server.logout()
except Exception:
LOG.exception("IMAP failed to logout: ")
return True
else:
# We haven't hit the email delay yet.
# LOG.debug("Delta({}) < {}".format(now - past, check_email_delay))
return True
return True

View File

@ -5,30 +5,21 @@ import subprocess
from aprsd import packets, plugin
from aprsd.utils import trace
LOG = logging.getLogger('APRSD')
FORTUNE_PATHS = [
'/usr/games/fortune',
'/usr/local/bin/fortune',
'/usr/bin/fortune',
]
LOG = logging.getLogger("APRSD")
class FortunePlugin(plugin.APRSDRegexCommandPluginBase):
"""Fortune."""
command_regex = r'^([f]|[f]\s|fortune)'
command_name = 'fortune'
short_description = 'Give me a fortune'
command_regex = r"^([f]|[f]\s|fortune)"
command_name = "fortune"
short_description = "Give me a fortune"
fortune_path = None
def setup(self):
for path in FORTUNE_PATHS:
self.fortune_path = shutil.which(path)
LOG.info(f'Fortune path {self.fortune_path}')
if self.fortune_path:
break
self.fortune_path = shutil.which("fortune")
if not self.fortune_path:
self.enabled = False
else:
@ -36,7 +27,7 @@ class FortunePlugin(plugin.APRSDRegexCommandPluginBase):
@trace.trace
def process(self, packet: packets.MessagePacket):
LOG.info('FortunePlugin')
LOG.info("FortunePlugin")
# fromcall = packet.get("from")
# message = packet.get("message_text", None)
@ -45,19 +36,19 @@ class FortunePlugin(plugin.APRSDRegexCommandPluginBase):
reply = None
try:
cmnd = [self.fortune_path, '-s', '-n 60']
command = ' '.join(cmnd)
cmnd = [self.fortune_path, "-s", "-n 60"]
command = " ".join(cmnd)
output = subprocess.check_output(
command,
shell=True,
timeout=3,
text=True,
universal_newlines=True,
)
output = (
output.replace('\r', '')
.replace('\n', '')
.replace(' ', '')
.replace('\t', ' ')
output.replace("\r", "")
.replace("\n", "")
.replace(" ", "")
.replace("\t", " ")
)
except subprocess.CalledProcessError as ex:
reply = f"Fortune command failed '{ex.output}'"

97
aprsd/plugins/location.py Normal file
View File

@ -0,0 +1,97 @@
import logging
import re
import time
from geopy.geocoders import Nominatim
from oslo_config import cfg
from aprsd import packets, plugin, plugin_utils
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class LocationPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin):
"""Location!"""
command_regex = r"^([l]|[l]\s|location)"
command_name = "location"
short_description = "Where in the world is a CALLSIGN's last GPS beacon?"
def setup(self):
self.ensure_aprs_fi_key()
@trace.trace
def process(self, packet: packets.MessagePacket):
LOG.info("Location Plugin")
fromcall = packet.from_call
message = packet.get("message_text", None)
api_key = CONF.aprs_fi.apiKey
# optional second argument is a callsign to search
a = re.search(r"^.*\s+(.*)", message)
if a is not None:
searchcall = a.group(1)
searchcall = searchcall.upper()
else:
# if no second argument, search for calling station
searchcall = fromcall
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
except Exception as ex:
LOG.error(f"Failed to fetch aprs.fi '{ex}'")
return "Failed to fetch aprs.fi location"
LOG.debug(f"LocationPlugin: aprs_data = {aprs_data}")
if not len(aprs_data["entries"]):
LOG.error("Didn't get any entries from aprs.fi")
return "Failed to fetch aprs.fi location"
lat = float(aprs_data["entries"][0]["lat"])
lon = float(aprs_data["entries"][0]["lng"])
# Get some information about their location
try:
tic = time.perf_counter()
geolocator = Nominatim(user_agent="APRSD")
coordinates = f"{lat:0.6f}, {lon:0.6f}"
location = geolocator.reverse(coordinates)
address = location.raw.get("address")
toc = time.perf_counter()
if address:
LOG.info(f"Geopy address {address} took {toc - tic:0.4f}")
if address.get("country_code") == "us":
area_info = f"{address.get('county')}, {address.get('state')}"
else:
# what to do for address for non US?
area_info = f"{address.get('country'), 'Unknown'}"
except Exception as ex:
LOG.error(f"Failed to fetch Geopy address {ex}")
area_info = "Unknown Location"
try: # altitude not always provided
alt = float(aprs_data["entries"][0]["altitude"])
except Exception:
alt = 0
altfeet = int(alt * 3.28084)
aprs_lasttime_seconds = aprs_data["entries"][0]["lasttime"]
# aprs_lasttime_seconds = aprs_lasttime_seconds.encode(
# "ascii", errors="ignore"
# ) # unicode to ascii
delta_seconds = time.time() - int(aprs_lasttime_seconds)
delta_hours = delta_seconds / 60 / 60
reply = "{}: {} {}' {},{} {}h ago".format(
searchcall,
area_info,
str(altfeet),
f"{lat:0.2f}",
f"{lon:0.2f}",
str("%.1f" % round(delta_hours, 1)),
).rstrip()
return reply

View File

@ -4,6 +4,7 @@ from oslo_config import cfg
from aprsd import packets, plugin
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
@ -42,7 +43,9 @@ class NotifySeenPlugin(plugin.APRSDWatchListPluginBase):
pkt = packets.MessagePacket(
from_call=CONF.callsign,
to_call=notify_callsign,
message_text=(f"{fromcall} was just seen by type:'{packet_type}'"),
message_text=(
f"{fromcall} was just seen by type:'{packet_type}'"
),
allow_delay=False,
)
pkt.allow_delay = False

81
aprsd/plugins/query.py Normal file
View File

@ -0,0 +1,81 @@
import datetime
import logging
import re
from oslo_config import cfg
from aprsd import packets, plugin
from aprsd.packets import tracker
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class QueryPlugin(plugin.APRSDRegexCommandPluginBase):
"""Query command."""
command_regex = r"^\!.*"
command_name = "query"
short_description = "APRSD Owner command to query messages in the MsgTrack"
def setup(self):
"""Do any plugin setup here."""
if not CONF.query_plugin.callsign:
LOG.error("Config query_plugin.callsign not set. Disabling plugin")
self.enabled = False
self.enabled = True
@trace.trace
def process(self, packet: packets.MessagePacket):
LOG.info("Query COMMAND")
fromcall = packet.from_call
message = packet.get("message_text", None)
pkt_tracker = tracker.PacketTrack()
now = datetime.datetime.now()
reply = "Pending messages ({}) {}".format(
len(pkt_tracker),
now.strftime("%H:%M:%S"),
)
searchstring = "^" + CONF.query_plugin.callsign + ".*"
# only I can do admin commands
if re.search(searchstring, fromcall):
# resend last N most recent: "!3"
r = re.search(r"^\!([0-9]).*", message)
if r is not None:
if len(pkt_tracker) > 0:
last_n = r.group(1)
reply = packets.NULL_MESSAGE
LOG.debug(reply)
pkt_tracker.restart_delayed(count=int(last_n))
else:
reply = "No pending msgs to resend"
LOG.debug(reply)
return reply
# resend all: "!a"
r = re.search(r"^\![aA].*", message)
if r is not None:
if len(pkt_tracker) > 0:
reply = packets.NULL_MESSAGE
LOG.debug(reply)
pkt_tracker.restart_delayed()
else:
reply = "No pending msgs"
LOG.debug(reply)
return reply
# delete all: "!d"
r = re.search(r"^\![dD].*", message)
if r is not None:
reply = "Deleted ALL pending msgs."
LOG.debug(reply)
pkt_tracker.flush()
return reply
return reply

View File

@ -1,9 +1,9 @@
import logging
import re
import time
from oslo_config import cfg
import pytz
from tzlocal import get_localzone
from aprsd import packets, plugin, plugin_utils
from aprsd.utils import fuzzy, trace
@ -22,8 +22,7 @@ class TimePlugin(plugin.APRSDRegexCommandPluginBase):
short_description = "What is the current local time."
def _get_local_tz(self):
lz = get_localzone()
return pytz.timezone(str(lz))
return pytz.timezone(time.strftime("%Z"))
def _get_utcnow(self):
return pytz.datetime.datetime.utcnow()

View File

@ -1,8 +1,7 @@
import logging
import aprsd
from aprsd import plugin
from aprsd.stats import collector
from aprsd import plugin, stats
LOG = logging.getLogger("APRSD")
@ -24,8 +23,10 @@ class VersionPlugin(plugin.APRSDRegexCommandPluginBase):
# fromcall = packet.get("from")
# message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
s = collector.Collector().collect()
stats_obj = stats.APRSDStats()
s = stats_obj.stats()
print(s)
return "APRSD ver:{} uptime:{}".format(
aprsd.__version__,
s["APRSDStats"]["uptime"],
s["aprsd"]["uptime"],
)

View File

@ -2,14 +2,15 @@ import json
import logging
import re
import requests
from oslo_config import cfg
import requests
from aprsd import plugin, plugin_utils
from aprsd.utils import trace
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
class USWeatherPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin):
@ -25,23 +26,21 @@ class USWeatherPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin)
"weather" - returns weather near the calling callsign
"""
# command_regex = r"^([w][x]|[w][x]\s|weather)"
command_regex = r'^[wW]'
command_name = 'USWeather'
short_description = 'Provide USA only weather of GPS Beacon location'
command_regex = r"^([w][x]|[w][x]\s|weather)"
command_name = "USWeather"
short_description = "Provide USA only weather of GPS Beacon location"
def setup(self):
self.ensure_aprs_fi_key()
@trace.trace
def process(self, packet):
LOG.info('Weather Plugin')
LOG.info("Weather Plugin")
fromcall = packet.from_call
message = packet.get('message_text', None)
message = packet.get("message_text", None)
# message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
a = re.search(r'^.*\s+(.*)', message)
a = re.search(r"^.*\s+(.*)", message)
if a is not None:
searchcall = a.group(1)
searchcall = searchcall.upper()
@ -51,34 +50,34 @@ class USWeatherPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin)
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
except Exception as ex:
LOG.error(f'Failed to fetch aprs.fi data {ex}')
return 'Failed to fetch aprs.fi location'
LOG.error(f"Failed to fetch aprs.fi data {ex}")
return "Failed to fetch aprs.fi location"
LOG.debug(f'LocationPlugin: aprs_data = {aprs_data}')
if not len(aprs_data['entries']):
LOG.debug(f"LocationPlugin: aprs_data = {aprs_data}")
if not len(aprs_data["entries"]):
LOG.error("Didn't get any entries from aprs.fi")
return 'Failed to fetch aprs.fi location'
return "Failed to fetch aprs.fi location"
lat = aprs_data['entries'][0]['lat']
lon = aprs_data['entries'][0]['lng']
lat = aprs_data["entries"][0]["lat"]
lon = aprs_data["entries"][0]["lng"]
try:
wx_data = plugin_utils.get_weather_gov_for_gps(lat, lon)
except Exception as ex:
LOG.error(f"Couldn't fetch forecast.weather.gov '{ex}'")
return 'Unable to get weather'
return "Unable to get weather"
LOG.info(f'WX data {wx_data}')
LOG.info(f"WX data {wx_data}")
reply = (
'%sF(%sF/%sF) %s. %s, %s.'
"%sF(%sF/%sF) %s. %s, %s."
% (
wx_data['currentobservation']['Temp'],
wx_data['data']['temperature'][0],
wx_data['data']['temperature'][1],
wx_data['data']['weather'][0],
wx_data['time']['startPeriodName'][1],
wx_data['data']['weather'][1],
wx_data["currentobservation"]["Temp"],
wx_data["data"]["temperature"][0],
wx_data["data"]["temperature"][1],
wx_data["data"]["weather"][0],
wx_data["time"]["startPeriodName"][1],
wx_data["data"]["weather"][1],
)
).rstrip()
LOG.debug(f"reply: '{reply}' ")
@ -100,31 +99,32 @@ class USMetarPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin):
"""
command_regex = r'^([m]|[M]|[m]\s|metar)'
command_name = 'USMetar'
short_description = 'USA only METAR of GPS Beacon location'
command_regex = r"^([m]|[M]|[m]\s|metar)"
command_name = "USMetar"
short_description = "USA only METAR of GPS Beacon location"
def setup(self):
self.ensure_aprs_fi_key()
@trace.trace
def process(self, packet):
fromcall = packet.get('from')
message = packet.get('message_text', None)
print("FISTY")
fromcall = packet.get("from")
message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
LOG.info(f"WX Plugin '{message}'")
a = re.search(r'^.*\s+(.*)', message)
a = re.search(r"^.*\s+(.*)", message)
if a is not None:
searchcall = a.group(1)
station = searchcall.upper()
try:
resp = plugin_utils.get_weather_gov_metar(station)
except Exception as e:
LOG.debug(f'Weather failed with: {str(e)}')
reply = 'Unable to find station METAR'
LOG.debug(f"Weather failed with: {str(e)}")
reply = "Unable to find station METAR"
else:
station_data = json.loads(resp.text)
reply = station_data['properties']['rawMessage']
reply = station_data["properties"]["rawMessage"]
return reply
else:
@ -136,36 +136,36 @@ class USMetarPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin):
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, fromcall)
except Exception as ex:
LOG.error(f'Failed to fetch aprs.fi data {ex}')
return 'Failed to fetch aprs.fi location'
LOG.error(f"Failed to fetch aprs.fi data {ex}")
return "Failed to fetch aprs.fi location"
# LOG.debug("LocationPlugin: aprs_data = {}".format(aprs_data))
if not len(aprs_data['entries']):
LOG.error('Found no entries from aprs.fi!')
return 'Failed to fetch aprs.fi location'
if not len(aprs_data["entries"]):
LOG.error("Found no entries from aprs.fi!")
return "Failed to fetch aprs.fi location"
lat = aprs_data['entries'][0]['lat']
lon = aprs_data['entries'][0]['lng']
lat = aprs_data["entries"][0]["lat"]
lon = aprs_data["entries"][0]["lng"]
try:
wx_data = plugin_utils.get_weather_gov_for_gps(lat, lon)
except Exception as ex:
LOG.error(f"Couldn't fetch forecast.weather.gov '{ex}'")
return 'Unable to metar find station.'
return "Unable to metar find station."
if wx_data['location']['metar']:
station = wx_data['location']['metar']
if wx_data["location"]["metar"]:
station = wx_data["location"]["metar"]
try:
resp = plugin_utils.get_weather_gov_metar(station)
except Exception as e:
LOG.debug(f'Weather failed with: {str(e)}')
reply = 'Failed to get Metar'
LOG.debug(f"Weather failed with: {str(e)}")
reply = "Failed to get Metar"
else:
station_data = json.loads(resp.text)
reply = station_data['properties']['rawMessage']
reply = station_data["properties"]["rawMessage"]
else:
# Couldn't find a station
reply = 'No Metar station found'
reply = "No Metar station found"
return reply
@ -189,37 +189,33 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
"""
# command_regex = r"^([w][x]|[w][x]\s|weather)"
command_regex = r'^[wW]'
command_name = 'OpenWeatherMap'
short_description = 'OpenWeatherMap weather of GPS Beacon location'
command_regex = r"^([w][x]|[w][x]\s|weather)"
command_name = "OpenWeatherMap"
short_description = "OpenWeatherMap weather of GPS Beacon location"
def setup(self):
if not CONF.owm_weather_plugin.apiKey:
LOG.error('Config.owm_weather_plugin.apiKey is not set. Disabling')
LOG.error("Config.owm_weather_plugin.apiKey is not set. Disabling")
self.enabled = False
else:
self.enabled = True
def help(self):
_help = [
'openweathermap: Send {} to get weather from your location'.format(
self.command_regex
),
'openweathermap: Send {} <callsign> to get weather from <callsign>'.format(
self.command_regex
),
"openweathermap: Send {} to get weather "
"from your location".format(self.command_regex),
"openweathermap: Send {} <callsign> to get "
"weather from <callsign>".format(self.command_regex),
]
return _help
@trace.trace
def process(self, packet):
fromcall = packet.get('from_call')
message = packet.get('message_text', None)
fromcall = packet.get("from")
message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
LOG.info(f"OWMWeather Plugin '{message}'")
a = re.search(r'^.*\s+(.*)', message)
a = re.search(r"^.*\s+(.*)", message)
if a is not None:
searchcall = a.group(1)
searchcall = searchcall.upper()
@ -231,16 +227,16 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
except Exception as ex:
LOG.error(f'Failed to fetch aprs.fi data {ex}')
return 'Failed to fetch location'
LOG.error(f"Failed to fetch aprs.fi data {ex}")
return "Failed to fetch location"
# LOG.debug("LocationPlugin: aprs_data = {}".format(aprs_data))
if not len(aprs_data['entries']):
LOG.error('Found no entries from aprs.fi!')
return 'Failed to fetch location'
if not len(aprs_data["entries"]):
LOG.error("Found no entries from aprs.fi!")
return "Failed to fetch location"
lat = aprs_data['entries'][0]['lat']
lon = aprs_data['entries'][0]['lng']
lat = aprs_data["entries"][0]["lat"]
lon = aprs_data["entries"][0]["lng"]
units = CONF.units
api_key = CONF.owm_weather_plugin.apiKey
@ -250,40 +246,40 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
lat,
lon,
units=units,
exclude='minutely,hourly',
exclude="minutely,hourly",
)
except Exception as ex:
LOG.error(f"Couldn't fetch openweathermap api '{ex}'")
# default to UTC
return 'Unable to get weather'
return "Unable to get weather"
if units == 'metric':
degree = 'C'
if units == "metric":
degree = "C"
else:
degree = 'F'
degree = "F"
if 'wind_gust' in wx_data['current']:
wind = '{:.0f}@{}G{:.0f}'.format(
wx_data['current']['wind_speed'],
wx_data['current']['wind_deg'],
wx_data['current']['wind_gust'],
if "wind_gust" in wx_data["current"]:
wind = "{:.0f}@{}G{:.0f}".format(
wx_data["current"]["wind_speed"],
wx_data["current"]["wind_deg"],
wx_data["current"]["wind_gust"],
)
else:
wind = '{:.0f}@{}'.format(
wx_data['current']['wind_speed'],
wx_data['current']['wind_deg'],
wind = "{:.0f}@{}".format(
wx_data["current"]["wind_speed"],
wx_data["current"]["wind_deg"],
)
# LOG.debug(wx_data["current"])
# LOG.debug(wx_data["daily"])
reply = '{} {:.1f}{}/{:.1f}{} Wind {} {}%'.format(
wx_data['current']['weather'][0]['description'],
wx_data['current']['temp'],
reply = "{} {:.1f}{}/{:.1f}{} Wind {} {}%".format(
wx_data["current"]["weather"][0]["description"],
wx_data["current"]["temp"],
degree,
wx_data['current']['dew_point'],
wx_data["current"]["dew_point"],
degree,
wind,
wx_data['current']['humidity'],
wx_data["current"]["humidity"],
)
return reply
@ -312,38 +308,36 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
docker build -f Dockerfile -t avwx-api:master .
"""
command_regex = r'^([m]|[m]|[m]\s|metar)'
command_name = 'AVWXWeather'
short_description = 'AVWX weather of GPS Beacon location'
command_regex = r"^([m]|[m]|[m]\s|metar)"
command_name = "AVWXWeather"
short_description = "AVWX weather of GPS Beacon location"
def setup(self):
if not CONF.avwx_plugin.base_url:
LOG.error('Config avwx_plugin.base_url not specified. Disabling')
LOG.error("Config avwx_plugin.base_url not specified. Disabling")
return False
elif not CONF.avwx_plugin.apiKey:
LOG.error('Config avwx_plugin.apiKey not specified. Disabling')
LOG.error("Config avwx_plugin.apiKey not specified. Disabling")
return False
self.enabled = True
else:
return True
def help(self):
_help = [
'avwxweather: Send {} to get weather from your location'.format(
self.command_regex
),
'avwxweather: Send {} <callsign> to get weather from <callsign>'.format(
self.command_regex
),
"avwxweather: Send {} to get weather "
"from your location".format(self.command_regex),
"avwxweather: Send {} <callsign> to get "
"weather from <callsign>".format(self.command_regex),
]
return _help
@trace.trace
def process(self, packet):
fromcall = packet.get('from')
message = packet.get('message_text', None)
fromcall = packet.get("from")
message = packet.get("message_text", None)
# ack = packet.get("msgNo", "0")
LOG.info(f"AVWXWeather Plugin '{message}'")
a = re.search(r'^.*\s+(.*)', message)
a = re.search(r"^.*\s+(.*)", message)
if a is not None:
searchcall = a.group(1)
searchcall = searchcall.upper()
@ -354,43 +348,43 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
try:
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
except Exception as ex:
LOG.error(f'Failed to fetch aprs.fi data {ex}')
return 'Failed to fetch location'
LOG.error(f"Failed to fetch aprs.fi data {ex}")
return "Failed to fetch location"
# LOG.debug("LocationPlugin: aprs_data = {}".format(aprs_data))
if not len(aprs_data['entries']):
LOG.error('Found no entries from aprs.fi!')
return 'Failed to fetch location'
if not len(aprs_data["entries"]):
LOG.error("Found no entries from aprs.fi!")
return "Failed to fetch location"
lat = aprs_data['entries'][0]['lat']
lon = aprs_data['entries'][0]['lng']
lat = aprs_data["entries"][0]["lat"]
lon = aprs_data["entries"][0]["lng"]
api_key = CONF.avwx_plugin.apiKey
base_url = CONF.avwx_plugin.base_url
token = f'TOKEN {api_key}'
headers = {'Authorization': token}
token = f"TOKEN {api_key}"
headers = {"Authorization": token}
try:
coord = f'{lat},{lon}'
coord = f"{lat},{lon}"
url = (
'{}/api/station/near/{}?'
'n=1&airport=false&reporting=true&format=json'.format(base_url, coord)
"{}/api/station/near/{}?"
"n=1&airport=false&reporting=true&format=json".format(base_url, coord)
)
LOG.debug(f"Get stations near me '{url}'")
response = requests.get(url, headers=headers)
except Exception as ex:
LOG.error(ex)
raise Exception(f"Failed to get the weather '{ex}'") from ex
raise Exception(f"Failed to get the weather '{ex}'")
else:
wx_data = json.loads(response.text)
# LOG.debug(wx_data)
station = wx_data[0]['station']['icao']
station = wx_data[0]["station"]["icao"]
try:
url = (
'{}/api/metar/{}?options=info,translate,summary'
'&airport=true&reporting=true&format=json&onfail=cache'.format(
"{}/api/metar/{}?options=info,translate,summary"
"&airport=true&reporting=true&format=json&onfail=cache".format(
base_url,
station,
)
@ -400,9 +394,9 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
response = requests.get(url, headers=headers)
except Exception as ex:
LOG.error(ex)
raise Exception(f'Failed to get metar {ex}') from ex
raise Exception(f"Failed to get metar {ex}")
else:
metar_data = json.loads(response.text)
# LOG.debug(metar_data)
return metar_data['raw']
return metar_data["raw"]

14
aprsd/rpc/__init__.py Normal file
View File

@ -0,0 +1,14 @@
import rpyc
class AuthSocketStream(rpyc.SocketStream):
"""Used to authenitcate the RPC stream to remote."""
@classmethod
def connect(cls, *args, authorizer=None, **kwargs):
stream_obj = super().connect(*args, **kwargs)
if callable(authorizer):
authorizer(stream_obj.sock)
return stream_obj

165
aprsd/rpc/client.py Normal file
View File

@ -0,0 +1,165 @@
import json
import logging
from oslo_config import cfg
import rpyc
from aprsd import conf # noqa
from aprsd import rpc
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class RPCClient:
_instance = None
_rpc_client = None
ip = None
port = None
magic_word = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self, ip=None, port=None, magic_word=None):
if ip:
self.ip = ip
else:
self.ip = CONF.rpc_settings.ip
if port:
self.port = int(port)
else:
self.port = CONF.rpc_settings.port
if magic_word:
self.magic_word = magic_word
else:
self.magic_word = CONF.rpc_settings.magic_word
self._check_settings()
self.get_rpc_client()
def _check_settings(self):
if not CONF.rpc_settings.enabled:
LOG.warning("RPC is not enabled, no way to get stats!!")
if self.magic_word == conf.common.APRSD_DEFAULT_MAGIC_WORD:
LOG.warning("You are using the default RPC magic word!!!")
LOG.warning("edit aprsd.conf and change rpc_settings.magic_word")
LOG.debug(f"RPC Client: {self.ip}:{self.port} {self.magic_word}")
def _rpyc_connect(
self, host, port, service=rpyc.VoidService,
config={}, ipv6=False,
keepalive=False, authorizer=None, ):
LOG.info(f"Connecting to RPC host '{host}:{port}'")
try:
s = rpc.AuthSocketStream.connect(
host, port, ipv6=ipv6, keepalive=keepalive,
authorizer=authorizer,
)
return rpyc.utils.factory.connect_stream(s, service, config=config)
except ConnectionRefusedError:
LOG.error(f"Failed to connect to RPC host '{host}:{port}'")
return None
def get_rpc_client(self):
if not self._rpc_client:
self._rpc_client = self._rpyc_connect(
self.ip,
self.port,
authorizer=lambda sock: sock.send(self.magic_word.encode()),
)
return self._rpc_client
def get_stats_dict(self):
cl = self.get_rpc_client()
result = {}
if not cl:
return result
try:
rpc_stats_dict = cl.root.get_stats()
result = json.loads(rpc_stats_dict)
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_stats(self):
cl = self.get_rpc_client()
result = {}
if not cl:
return result
try:
result = cl.root.get_stats_obj()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_packet_track(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_packet_track()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_packet_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_packet_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_watch_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_watch_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_seen_list(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result = cl.root.get_seen_list()
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result
def get_log_entries(self):
cl = self.get_rpc_client()
result = None
if not cl:
return result
try:
result_str = cl.root.get_log_entries()
result = json.loads(result_str)
except EOFError:
LOG.error("Lost connection to RPC Host")
self._rpc_client = None
return result

99
aprsd/rpc/server.py Normal file
View File

@ -0,0 +1,99 @@
import json
import logging
from oslo_config import cfg
import rpyc
from rpyc.utils.authenticators import AuthenticationError
from rpyc.utils.server import ThreadPoolServer
from aprsd import conf # noqa: F401
from aprsd import packets, stats, threads
from aprsd.threads import log_monitor
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
def magic_word_authenticator(sock):
client_ip = sock.getpeername()[0]
magic = sock.recv(len(CONF.rpc_settings.magic_word)).decode()
if magic != CONF.rpc_settings.magic_word:
LOG.error(
f"wrong magic word passed from {client_ip} "
"'{magic}' != '{CONF.rpc_settings.magic_word}'",
)
raise AuthenticationError(
f"wrong magic word passed in '{magic}'"
f" != '{CONF.rpc_settings.magic_word}'",
)
return sock, None
class APRSDRPCThread(threads.APRSDThread):
def __init__(self):
super().__init__(name="RPCThread")
self.thread = ThreadPoolServer(
APRSDService,
port=CONF.rpc_settings.port,
protocol_config={"allow_public_attrs": True},
authenticator=magic_word_authenticator,
)
def stop(self):
if self.thread:
self.thread.close()
self.thread_stop = True
def loop(self):
# there is no loop as run is blocked
if self.thread and not self.thread_stop:
# This is a blocking call
self.thread.start()
@rpyc.service
class APRSDService(rpyc.Service):
def on_connect(self, conn):
# code that runs when a connection is created
# (to init the service, if needed)
LOG.info("RPC Client Connected")
self._conn = conn
def on_disconnect(self, conn):
# code that runs after the connection has already closed
# (to finalize the service, if needed)
LOG.info("RPC Client Disconnected")
self._conn = None
@rpyc.exposed
def get_stats(self):
stat = stats.APRSDStats()
stats_dict = stat.stats()
return_str = json.dumps(stats_dict, indent=4, sort_keys=True, default=str)
return return_str
@rpyc.exposed
def get_stats_obj(self):
return stats.APRSDStats()
@rpyc.exposed
def get_packet_list(self):
return packets.PacketList()
@rpyc.exposed
def get_packet_track(self):
return packets.PacketTrack()
@rpyc.exposed
def get_watch_list(self):
return packets.WatchList()
@rpyc.exposed
def get_seen_list(self):
return packets.SeenList()
@rpyc.exposed
def get_log_entries(self):
entries = log_monitor.LogEntries().get_all_and_purge()
return json.dumps(entries, default=str)

266
aprsd/stats.py Normal file
View File

@ -0,0 +1,266 @@
import datetime
import logging
import threading
from oslo_config import cfg
import wrapt
import aprsd
from aprsd import packets, plugin, utils
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSDStats:
_instance = None
lock = threading.Lock()
start_time = None
_aprsis_server = None
_aprsis_keepalive = None
_email_thread_last_time = None
_email_tx = 0
_email_rx = 0
_mem_current = 0
_mem_peak = 0
_thread_info = {}
_pkt_cnt = {
"Packet": {
"tx": 0,
"rx": 0,
},
"AckPacket": {
"tx": 0,
"rx": 0,
},
"GPSPacket": {
"tx": 0,
"rx": 0,
},
"StatusPacket": {
"tx": 0,
"rx": 0,
},
"MicEPacket": {
"tx": 0,
"rx": 0,
},
"MessagePacket": {
"tx": 0,
"rx": 0,
},
"WeatherPacket": {
"tx": 0,
"rx": 0,
},
"ObjectPacket": {
"tx": 0,
"rx": 0,
},
}
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
# any init here
cls._instance.start_time = datetime.datetime.now()
cls._instance._aprsis_keepalive = datetime.datetime.now()
return cls._instance
@wrapt.synchronized(lock)
@property
def uptime(self):
return datetime.datetime.now() - self.start_time
@wrapt.synchronized(lock)
@property
def memory(self):
return self._mem_current
@wrapt.synchronized(lock)
def set_memory(self, memory):
self._mem_current = memory
@wrapt.synchronized(lock)
@property
def memory_peak(self):
return self._mem_peak
@wrapt.synchronized(lock)
def set_memory_peak(self, memory):
self._mem_peak = memory
@wrapt.synchronized(lock)
def set_thread_info(self, thread_info):
self._thread_info = thread_info
@wrapt.synchronized(lock)
@property
def thread_info(self):
return self._thread_info
@wrapt.synchronized(lock)
@property
def aprsis_server(self):
return self._aprsis_server
@wrapt.synchronized(lock)
def set_aprsis_server(self, server):
self._aprsis_server = server
@wrapt.synchronized(lock)
@property
def aprsis_keepalive(self):
return self._aprsis_keepalive
@wrapt.synchronized(lock)
def set_aprsis_keepalive(self):
self._aprsis_keepalive = datetime.datetime.now()
def rx(self, packet):
pkt_type = packet.__class__.__name__
if pkt_type not in self._pkt_cnt:
self._pkt_cnt[pkt_type] = {
"tx": 0,
"rx": 0,
}
self._pkt_cnt[pkt_type]["rx"] += 1
def tx(self, packet):
pkt_type = packet.__class__.__name__
if pkt_type not in self._pkt_cnt:
self._pkt_cnt[pkt_type] = {
"tx": 0,
"rx": 0,
}
self._pkt_cnt[pkt_type]["tx"] += 1
@wrapt.synchronized(lock)
@property
def msgs_tracked(self):
return packets.PacketTrack().total_tracked
@wrapt.synchronized(lock)
@property
def email_tx(self):
return self._email_tx
@wrapt.synchronized(lock)
def email_tx_inc(self):
self._email_tx += 1
@wrapt.synchronized(lock)
@property
def email_rx(self):
return self._email_rx
@wrapt.synchronized(lock)
def email_rx_inc(self):
self._email_rx += 1
@wrapt.synchronized(lock)
@property
def email_thread_time(self):
return self._email_thread_last_time
@wrapt.synchronized(lock)
def email_thread_update(self):
self._email_thread_last_time = datetime.datetime.now()
@wrapt.synchronized(lock)
def stats(self):
now = datetime.datetime.now()
if self._email_thread_last_time:
last_update = str(now - self._email_thread_last_time)
else:
last_update = "never"
if self._aprsis_keepalive:
last_aprsis_keepalive = str(now - self._aprsis_keepalive)
else:
last_aprsis_keepalive = "never"
pm = plugin.PluginManager()
plugins = pm.get_plugins()
plugin_stats = {}
if plugins:
def full_name_with_qualname(obj):
return "{}.{}".format(
obj.__class__.__module__,
obj.__class__.__qualname__,
)
for p in plugins:
plugin_stats[full_name_with_qualname(p)] = {
"enabled": p.enabled,
"rx": p.rx_count,
"tx": p.tx_count,
"version": p.version,
}
wl = packets.WatchList()
sl = packets.SeenList()
pl = packets.PacketList()
stats = {
"aprsd": {
"version": aprsd.__version__,
"uptime": utils.strfdelta(self.uptime),
"callsign": CONF.callsign,
"memory_current": int(self.memory),
"memory_current_str": utils.human_size(self.memory),
"memory_peak": int(self.memory_peak),
"memory_peak_str": utils.human_size(self.memory_peak),
"threads": self._thread_info,
"watch_list": wl.get_all(),
"seen_list": sl.get_all(),
},
"aprs-is": {
"server": str(self.aprsis_server),
"callsign": CONF.aprs_network.login,
"last_update": last_aprsis_keepalive,
},
"packets": {
"total_tracked": int(pl.total_tx() + pl.total_rx()),
"total_sent": int(pl.total_tx()),
"total_received": int(pl.total_rx()),
"by_type": self._pkt_cnt,
},
"messages": {
"sent": self._pkt_cnt["MessagePacket"]["tx"],
"received": self._pkt_cnt["MessagePacket"]["tx"],
"ack_sent": self._pkt_cnt["AckPacket"]["tx"],
},
"email": {
"enabled": CONF.email_plugin.enabled,
"sent": int(self._email_tx),
"received": int(self._email_rx),
"thread_last_update": last_update,
},
"plugins": plugin_stats,
}
return stats
def __str__(self):
pl = packets.PacketList()
return (
"Uptime:{} Msgs TX:{} RX:{} "
"ACK: TX:{} RX:{} "
"Email TX:{} RX:{} LastLoop:{} ".format(
self.uptime,
pl.total_tx(),
pl.total_rx(),
self._pkt_cnt["AckPacket"]["tx"],
self._pkt_cnt["AckPacket"]["rx"],
self._email_tx,
self._email_rx,
self._email_thread_last_time,
)
)

View File

@ -1,18 +0,0 @@
from aprsd import plugin
from aprsd.client import stats as client_stats
from aprsd.packets import packet_list, seen_list, tracker, watch_list
from aprsd.stats import app, collector
from aprsd.threads import aprsd
# Create the collector and register all the objects
# that APRSD has that implement the stats protocol
stats_collector = collector.Collector()
stats_collector.register_producer(app.APRSDStats)
stats_collector.register_producer(packet_list.PacketList)
stats_collector.register_producer(watch_list.WatchList)
stats_collector.register_producer(tracker.PacketTrack)
stats_collector.register_producer(plugin.PluginManager)
stats_collector.register_producer(aprsd.APRSDThreadList)
stats_collector.register_producer(client_stats.APRSClientStats)
stats_collector.register_producer(seen_list.SeenList)

View File

@ -1,49 +0,0 @@
import datetime
import tracemalloc
from oslo_config import cfg
import aprsd
from aprsd import utils
from aprsd.log import log as aprsd_log
CONF = cfg.CONF
class APRSDStats:
"""The AppStats class is used to collect stats from the application."""
_instance = None
start_time = None
def __new__(cls, *args, **kwargs):
"""Have to override the new method to make this a singleton
instead of using @singletone decorator so the unit tests work.
"""
if not cls._instance:
cls._instance = super().__new__(cls)
cls._instance.start_time = datetime.datetime.now()
return cls._instance
def uptime(self):
return datetime.datetime.now() - self.start_time
def stats(self, serializable=False) -> dict:
current, peak = tracemalloc.get_traced_memory()
uptime = self.uptime()
qsize = aprsd_log.logging_queue.qsize()
if serializable:
uptime = str(uptime)
stats = {
"version": aprsd.__version__,
"uptime": uptime,
"callsign": CONF.callsign,
"memory_current": int(current),
"memory_current_str": utils.human_size(current),
"memory_peak": int(peak),
"memory_peak_str": utils.human_size(peak),
"loging_queue": qsize,
}
return stats

View File

@ -1,46 +0,0 @@
import logging
from typing import Callable, Protocol, runtime_checkable
from aprsd.utils import singleton
LOG = logging.getLogger('APRSD')
@runtime_checkable
class StatsProducer(Protocol):
"""The StatsProducer protocol is used to define the interface for collecting stats."""
def stats(self, serializable=False) -> dict:
"""provide stats in a dictionary format."""
...
@singleton
class Collector:
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
def __init__(self):
self.producers: list[Callable] = []
def collect(self, serializable=False) -> dict:
stats = {}
for name in self.producers:
cls = name()
try:
stats[cls.__class__.__name__] = cls.stats(
serializable=serializable
).copy()
except Exception as e:
LOG.error(f'Error in producer {name} (stats): {e}')
raise e
return stats
def register_producer(self, producer_name: Callable):
if not isinstance(producer_name, StatsProducer):
raise TypeError(f'Producer {producer_name} is not a StatsProducer')
self.producers.append(producer_name)
def unregister_producer(self, producer_name: Callable):
if not isinstance(producer_name, StatsProducer):
raise TypeError(f'Producer {producer_name} is not a StatsProducer')
self.producers.remove(producer_name)

View File

@ -3,9 +3,8 @@ import queue
# Make these available to anyone importing
# aprsd.threads
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
from .rx import ( # noqa: F401
APRSDProcessPacketThread,
APRSDRXThread,
)
from .keep_alive import KeepAliveThread # noqa: F401
from .rx import APRSDRXThread # noqa: F401
packet_queue = queue.Queue(maxsize=500)
packet_queue = queue.Queue(maxsize=20)

View File

@ -2,85 +2,19 @@ import abc
import datetime
import logging
import threading
import time
from typing import List
import wrapt
LOG = logging.getLogger("APRSD")
class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
"""Base class for all threads in APRSD."""
loop_count = 1
_pause = False
thread_stop = False
def __init__(self, name):
super().__init__(name=name)
self.thread_stop = False
APRSDThreadList().add(self)
self._last_loop = datetime.datetime.now()
def _should_quit(self):
"""see if we have a quit message from the global queue."""
if self.thread_stop:
return True
def pause(self):
"""Logically pause the processing of the main loop."""
LOG.debug(f"Pausing thread '{self.name}' loop_count {self.loop_count}")
self._pause = True
def unpause(self):
"""Logically resume processing of the main loop."""
LOG.debug(f"Resuming thread '{self.name}' loop_count {self.loop_count}")
self._pause = False
def stop(self):
LOG.debug(f"Stopping thread '{self.name}'")
self.thread_stop = True
@abc.abstractmethod
def loop(self):
pass
def _cleanup(self):
"""Add code to subclass to do any cleanup"""
def __str__(self):
out = (
f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
)
return out
def loop_age(self):
"""How old is the last loop call?"""
return datetime.datetime.now() - self._last_loop
def run(self):
LOG.debug("Starting")
while not self._should_quit():
if self._pause:
time.sleep(1)
else:
self.loop_count += 1
can_loop = self.loop()
self._last_loop = datetime.datetime.now()
if not can_loop:
self.stop()
self._cleanup()
APRSDThreadList().remove(self)
LOG.debug("Exiting")
class APRSDThreadList:
"""Singleton class that keeps track of application wide threads."""
_instance = None
threads_list: List[APRSDThread] = []
threads_list = []
lock = threading.Lock()
def __new__(cls, *args, **kwargs):
@ -89,28 +23,6 @@ class APRSDThreadList:
cls.threads_list = []
return cls._instance
def __contains__(self, name):
"""See if we have a thread in our list"""
for t in self.threads_list:
if t.name == name:
return True
return False
def stats(self, serializable=False) -> dict:
stats = {}
for th in self.threads_list:
age = th.loop_age()
if serializable:
age = str(age)
stats[th.name] = {
"name": th.name,
"class": th.__class__.__name__,
"alive": th.is_alive(),
"age": th.loop_age(),
"loop_count": th.loop_count,
}
return stats
@wrapt.synchronized(lock)
def add(self, thread_obj):
self.threads_list.append(thread_obj)
@ -125,42 +37,52 @@ class APRSDThreadList:
for th in self.threads_list:
LOG.info(f"Stopping Thread {th.name}")
if hasattr(th, "packet"):
LOG.info(f"{th.name} packet {th.packet}")
LOG.info(F"{th.name} packet {th.packet}")
th.stop()
@wrapt.synchronized
def pause_all(self):
"""Iterate over all threads and pause them."""
for th in self.threads_list:
LOG.info(f"Pausing Thread {th.name}")
if hasattr(th, "packet"):
LOG.info(f"{th.name} packet {th.packet}")
th.pause()
@wrapt.synchronized
def unpause_all(self):
"""Iterate over all threads and resume them."""
for th in self.threads_list:
LOG.info(f"Resuming Thread {th.name}")
if hasattr(th, "packet"):
LOG.info(f"{th.name} packet {th.packet}")
th.unpause()
@wrapt.synchronized(lock)
def info(self):
"""Go through all the threads and collect info about each."""
info = {}
for thread in self.threads_list:
alive = thread.is_alive()
age = thread.loop_age()
key = thread.__class__.__name__
info[key] = {
"alive": True if alive else False,
"age": age,
"name": thread.name,
}
return info
@wrapt.synchronized(lock)
def __len__(self):
return len(self.threads_list)
class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
def __init__(self, name):
super().__init__(name=name)
self.thread_stop = False
APRSDThreadList().add(self)
self._last_loop = datetime.datetime.now()
def _should_quit(self):
""" see if we have a quit message from the global queue."""
if self.thread_stop:
return True
def stop(self):
self.thread_stop = True
@abc.abstractmethod
def loop(self):
pass
def _cleanup(self):
"""Add code to subclass to do any cleanup"""
def __str__(self):
out = f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
return out
def loop_age(self):
"""How old is the last loop call?"""
return datetime.datetime.now() - self._last_loop
def run(self):
LOG.debug("Starting")
while not self._should_quit():
can_loop = self.loop()
self._last_loop = datetime.datetime.now()
if not can_loop:
self.stop()
self._cleanup()
APRSDThreadList().remove(self)
LOG.debug("Exiting")

115
aprsd/threads/keep_alive.py Normal file
View File

@ -0,0 +1,115 @@
import datetime
import logging
import time
import tracemalloc
from oslo_config import cfg
from aprsd import client, packets, stats, utils
from aprsd.threads import APRSDThread, APRSDThreadList
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class KeepAliveThread(APRSDThread):
cntr = 0
checker_time = datetime.datetime.now()
def __init__(self):
tracemalloc.start()
super().__init__("KeepAlive")
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
self.max_delta = datetime.timedelta(**max_timeout)
def loop(self):
if self.cntr % 60 == 0:
pkt_tracker = packets.PacketTrack()
stats_obj = stats.APRSDStats()
pl = packets.PacketList()
thread_list = APRSDThreadList()
now = datetime.datetime.now()
last_email = stats_obj.email_thread_time
if last_email:
email_thread_time = utils.strfdelta(now - last_email)
else:
email_thread_time = "N/A"
last_msg_time = utils.strfdelta(now - stats_obj.aprsis_keepalive)
current, peak = tracemalloc.get_traced_memory()
stats_obj.set_memory(current)
stats_obj.set_memory_peak(peak)
login = CONF.callsign
tracked_packets = len(pkt_tracker)
keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{}"
).format(
login,
utils.strfdelta(stats_obj.uptime),
pl.total_rx(),
pl.total_tx(),
tracked_packets,
stats_obj._pkt_cnt["MessagePacket"]["tx"],
stats_obj._pkt_cnt["MessagePacket"]["rx"],
last_msg_time,
email_thread_time,
utils.human_size(current),
utils.human_size(peak),
len(thread_list),
)
LOG.info(keepalive)
thread_out = []
thread_info = {}
for thread in thread_list.threads_list:
alive = thread.is_alive()
age = thread.loop_age()
key = thread.__class__.__name__
thread_out.append(f"{key}:{alive}:{age}")
if key not in thread_info:
thread_info[key] = {}
thread_info[key]["alive"] = alive
thread_info[key]["age"] = age
if not alive:
LOG.error(f"Thread {thread}")
LOG.info(",".join(thread_out))
stats_obj.set_thread_info(thread_info)
# check the APRS connection
cl = client.factory.create()
# Reset the connection if it's dead and this isn't our
# First time through the loop.
# The first time through the loop can happen at startup where
# The keepalive thread starts before the client has a chance
# to make it's connection the first time.
if not cl.is_alive() and self.cntr > 0:
LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
client.factory.create().reset()
else:
# See if we should reset the aprs-is client
# Due to losing a keepalive from them
delta_dict = utils.parse_delta_str(last_msg_time)
delta = datetime.timedelta(**delta_dict)
if delta > self.max_delta:
# We haven't gotten a keepalive from aprs-is in a while
# reset the connection.a
if not client.KISSClient.is_enabled():
LOG.warning(f"Resetting connection to APRS-IS {delta}")
client.factory.create().reset()
# Check version every day
delta = now - self.checker_time
if delta > datetime.timedelta(hours=24):
self.checker_time = now
level, msg = utils._check_version()
if level:
LOG.warning(msg)
self.cntr += 1
time.sleep(1)
return True

View File

@ -1,108 +0,0 @@
import datetime
import logging
import time
import tracemalloc
from loguru import logger
from oslo_config import cfg
from aprsd import packets, utils
from aprsd.log import log as aprsd_log
from aprsd.stats import collector
from aprsd.threads import APRSDThread, APRSDThreadList
from aprsd.utils import keepalive_collector
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
LOGU = logger
class KeepAliveThread(APRSDThread):
cntr = 0
checker_time = datetime.datetime.now()
def __init__(self):
tracemalloc.start()
super().__init__("KeepAlive")
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
self.max_delta = datetime.timedelta(**max_timeout)
def loop(self):
if self.loop_count % 60 == 0:
stats_json = collector.Collector().collect()
pl = packets.PacketList()
thread_list = APRSDThreadList()
now = datetime.datetime.now()
if (
"APRSClientStats" in stats_json
and stats_json["APRSClientStats"].get("transport") == "aprsis"
):
if stats_json["APRSClientStats"].get("server_keepalive"):
last_msg_time = utils.strfdelta(
now - stats_json["APRSClientStats"]["server_keepalive"]
)
else:
last_msg_time = "N/A"
else:
last_msg_time = "N/A"
tracked_packets = stats_json["PacketTrack"]["total_tracked"]
tx_msg = 0
rx_msg = 0
if "PacketList" in stats_json:
msg_packets = stats_json["PacketList"].get("MessagePacket")
if msg_packets:
tx_msg = msg_packets.get("tx", 0)
rx_msg = msg_packets.get("rx", 0)
keepalive = (
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
"Last:{} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
).format(
stats_json["APRSDStats"]["callsign"],
stats_json["APRSDStats"]["uptime"],
pl.total_rx(),
pl.total_tx(),
tracked_packets,
tx_msg,
rx_msg,
last_msg_time,
stats_json["APRSDStats"]["memory_current_str"],
stats_json["APRSDStats"]["memory_peak_str"],
len(thread_list),
aprsd_log.logging_queue.qsize(),
)
LOG.info(keepalive)
if "APRSDThreadList" in stats_json:
thread_list = stats_json["APRSDThreadList"]
for thread_name in thread_list:
thread = thread_list[thread_name]
alive = thread["alive"]
age = thread["age"]
key = thread["name"]
if not alive:
LOG.error(f"Thread {thread}")
thread_hex = f"fg {utils.hex_from_name(key)}"
t_name = f"<{thread_hex}>{key:<15}</{thread_hex}>"
thread_msg = f"{t_name} Alive? {str(alive): <5} {str(age): <20}"
LOGU.opt(colors=True).info(thread_msg)
# LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
# Go through the registered keepalive collectors
# and check them as well as call log.
collect = keepalive_collector.KeepAliveCollector()
collect.check()
collect.log()
# Check version every day
delta = now - self.checker_time
if delta > datetime.timedelta(hours=24):
self.checker_time = now
level, msg = utils._check_version()
if level:
LOG.warning(msg)
self.cntr += 1
time.sleep(1)
return True

View File

@ -0,0 +1,77 @@
import logging
import threading
import wrapt
from aprsd import threads
from aprsd.log import log
LOG = logging.getLogger("APRSD")
class LogEntries:
entries = []
lock = threading.Lock()
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
@wrapt.synchronized(lock)
def add(self, entry):
self.entries.append(entry)
@wrapt.synchronized(lock)
def get_all_and_purge(self):
entries = self.entries.copy()
self.entries = []
return entries
@wrapt.synchronized(lock)
def __len__(self):
return len(self.entries)
class LogMonitorThread(threads.APRSDThread):
def __init__(self):
super().__init__("LogMonitorThread")
def loop(self):
try:
record = log.logging_queue.get(block=True, timeout=2)
if isinstance(record, list):
for item in record:
entry = self.json_record(item)
LogEntries().add(entry)
else:
entry = self.json_record(record)
LogEntries().add(entry)
except Exception:
# Just ignore thi
pass
return True
def json_record(self, record):
entry = {}
entry["filename"] = record.filename
entry["funcName"] = record.funcName
entry["levelname"] = record.levelname
entry["lineno"] = record.lineno
entry["module"] = record.module
entry["name"] = record.name
entry["pathname"] = record.pathname
entry["process"] = record.process
entry["processName"] = record.processName
if hasattr(record, "stack_info"):
entry["stack_info"] = record.stack_info
else:
entry["stack_info"] = None
entry["thread"] = record.thread
entry["threadName"] = record.threadName
entry["message"] = record.getMessage()
return entry

View File

@ -1,56 +0,0 @@
import logging
import time
import requests
from oslo_config import cfg
import aprsd
from aprsd import threads as aprsd_threads
CONF = cfg.CONF
LOG = logging.getLogger("APRSD")
class APRSRegistryThread(aprsd_threads.APRSDThread):
"""This sends service information to the configured APRS Registry."""
_loop_cnt: int = 1
def __init__(self):
super().__init__("APRSRegistryThread")
self._loop_cnt = 1
if not CONF.aprs_registry.enabled:
LOG.error(
"APRS Registry is not enabled. ",
)
LOG.error(
"APRS Registry thread is STOPPING.",
)
self.stop()
LOG.info(
"APRS Registry thread is running and will send "
f"info every {CONF.aprs_registry.frequency_seconds} seconds "
f"to {CONF.aprs_registry.registry_url}.",
)
def loop(self):
# Only call the registry every N seconds
if self._loop_cnt % CONF.aprs_registry.frequency_seconds == 0:
info = {
"callsign": CONF.callsign,
"description": CONF.aprs_registry.description,
"service_website": CONF.aprs_registry.service_website,
"software": f"APRSD version {aprsd.__version__} "
"https://github.com/craigerl/aprsd",
}
try:
requests.post(
f"{CONF.aprs_registry.registry_url}",
json=info,
)
except Exception as e:
LOG.error(f"Failed to send registry info: {e}")
time.sleep(1)
self._loop_cnt += 1
return True

View File

@ -6,54 +6,25 @@ import time
import aprslib
from oslo_config import cfg
from aprsd import packets, plugin
from aprsd.client.client import APRSDClient
from aprsd.packets import collector, filter
from aprsd.packets import log as packet_log
from aprsd import client, packets, plugin
from aprsd.threads import APRSDThread, tx
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
class APRSDRXThread(APRSDThread):
"""Main Class to connect to an APRS Client and recieve packets.
A packet is received in the main loop and then sent to the
process_packet method, which sends the packet through the collector
to track the packet for stats, and then put into the packet queue
for processing in a separate thread.
"""
_client = None
# This is the queue that packets are sent to for processing.
# We process packets in a separate thread to help prevent
# getting blocked by the APRS server trying to send us packets.
packet_queue = None
pkt_count = 0
def __init__(self, packet_queue):
super().__init__('RX_PKT')
super().__init__("RX_MSG")
self.packet_queue = packet_queue
self._client = client.factory.create()
def stop(self):
self.thread_stop = True
if self._client:
self._client.close()
client.factory.create().client.stop()
def loop(self):
if not self._client:
self._client = APRSDClient()
time.sleep(1)
return True
if not self._client.is_alive:
self._client = APRSDClient()
time.sleep(1)
return True
# setup the consumer of messages and block until a messages
try:
# This will register a packet consumer with aprslib
@ -65,117 +36,46 @@ class APRSDRXThread(APRSDThread):
# and the aprslib developer didn't want to allow a PR to add
# kwargs. :(
# https://github.com/rossengeorgiev/aprs-python/pull/56
self._client.consumer(
self.process_packet,
raw=False,
self._client.client.consumer(
self.process_packet, raw=False, blocking=False,
)
except (
aprslib.exceptions.ConnectionDrop,
aprslib.exceptions.ConnectionError,
):
LOG.error('Connection dropped, reconnecting')
LOG.error("Connection dropped, reconnecting")
time.sleep(5)
# Force the deletion of the client object connected to aprs
# This will cause a reconnect, next time client.get_client()
# is called
self._client.reset()
time.sleep(5)
except Exception as ex:
LOG.exception(ex)
LOG.error('Resetting connection and trying again.')
self._client.reset()
time.sleep(5)
# Continue to loop
return True
@abc.abstractmethod
def process_packet(self, *args, **kwargs):
pass
class APRSDPluginRXThread(APRSDRXThread):
"""Process received packets.
This is the main APRSD Server command thread that
receives packets from APRIS and then sends them for
processing in the PluginProcessPacketThread.
"""
def process_packet(self, *args, **kwargs):
packet = self._client.decode_packet(*args, **kwargs)
if not packet:
LOG.error(
'No packet received from decode_packet. Most likely a failure to parse'
)
return
self.pkt_count += 1
packet_log.log(packet, packet_count=self.pkt_count)
pkt_list = packets.PacketList()
if isinstance(packet, packets.AckPacket):
# We don't need to drop AckPackets, those should be
# processed.
self.packet_queue.put(packet)
else:
# Make sure we aren't re-processing the same packet
# For RF based APRS Clients we can get duplicate packets
# So we need to track them and not process the dupes.
found = False
try:
# Find the packet in the list of already seen packets
# Based on the packet.key
found = pkt_list.find(packet)
if not packet.msgNo:
# If the packet doesn't have a message id
# then there is no reliable way to detect
# if it's a dupe, so we just pass it on.
# it shouldn't get acked either.
found = False
except KeyError:
found = False
if not found:
# We haven't seen this packet before, so we process it.
collector.PacketCollector().rx(packet)
self.packet_queue.put(packet)
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
# If the packet came in within N seconds of the
# Last time seeing the packet, then we drop it as a dupe.
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.'
)
else:
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked '
f'but older than {CONF.packet_dupe_timeout} seconds. processing.',
)
collector.PacketCollector().rx(packet)
self.packet_queue.put(packet)
# LOG.debug(raw)
packet.log(header="RX")
packets.PacketList().rx(packet)
self.packet_queue.put(packet)
class APRSDFilterThread(APRSDThread):
def __init__(self, thread_name, packet_queue):
super().__init__(thread_name)
self.packet_queue = packet_queue
def filter_packet(self, packet):
# Do any packet filtering prior to processing
if not filter.PacketFilter().filter(packet):
return None
return packet
def print_packet(self, packet):
"""Allow a child of this class to override this.
This is helpful if for whatever reason the child class
doesn't want to log packets.
"""
packet_log.log(packet)
def loop(self):
try:
packet = self.packet_queue.get(timeout=1)
self.print_packet(packet)
if packet:
if self.filter_packet(packet):
self.process_packet(packet)
except queue.Empty:
pass
return True
class APRSDProcessPacketThread(APRSDFilterThread):
"""Base class for processing received packets after they have been filtered.
Packets are received from the client, then filtered for dupes,
then sent to the packet queue. This thread pulls packets from
the packet queue for processing.
class APRSDProcessPacketThread(APRSDThread):
"""Base class for processing received packets.
This is the base class for processing packets coming from
the consumer. This base class handles sending ack packets and
@ -183,42 +83,41 @@ class APRSDProcessPacketThread(APRSDFilterThread):
for processing."""
def __init__(self, packet_queue):
super().__init__('ProcessPKT', packet_queue=packet_queue)
if not CONF.enable_sending_ack_packets:
LOG.warning(
'Sending ack packets is disabled, messages will not be acknowledged.',
)
self.packet_queue = packet_queue
super().__init__("ProcessPKT")
self._loop_cnt = 1
def process_ack_packet(self, packet):
"""We got an ack for a message, no need to resend it."""
ack_num = packet.msgNo
LOG.debug(f'Got ack for message {ack_num}')
collector.PacketCollector().rx(packet)
def process_piggyback_ack(self, packet):
"""We got an ack embedded in a packet."""
ack_num = packet.ackMsgNo
LOG.debug(f'Got PiggyBackAck for message {ack_num}')
collector.PacketCollector().rx(packet)
LOG.info(f"Got ack for message {ack_num}")
pkt_tracker = packets.PacketTrack()
pkt_tracker.remove(ack_num)
def process_reject_packet(self, packet):
"""We got a reject message for a packet. Stop sending the message."""
ack_num = packet.msgNo
LOG.debug(f'Got REJECT for message {ack_num}')
collector.PacketCollector().rx(packet)
LOG.info(f"Got REJECT for message {ack_num}")
pkt_tracker = packets.PacketTrack()
pkt_tracker.remove(ack_num)
def loop(self):
try:
packet = self.packet_queue.get(timeout=1)
if packet:
self.process_packet(packet)
except queue.Empty:
pass
self._loop_cnt += 1
return True
def process_packet(self, packet):
"""Process a packet received from aprs-is server."""
LOG.debug(f'ProcessPKT-LOOP {self.loop_count}')
# set this now as we are going to process it.
# This is used during dupe checking, so set it early
packet.processed = True
LOG.debug(f"RXPKT-LOOP {self._loop_cnt}")
our_call = CONF.callsign.lower()
from_call = packet.from_call
if hasattr(packet, 'addresse') and packet.addresse:
if packet.addresse:
to_call = packet.addresse
else:
to_call = packet.to_call
@ -237,24 +136,19 @@ class APRSDProcessPacketThread(APRSDFilterThread):
):
self.process_reject_packet(packet)
else:
if hasattr(packet, 'ackMsgNo') and packet.ackMsgNo:
# we got an ack embedded in this packet
# we need to handle the ack
self.process_piggyback_ack(packet)
# Only ack messages that were sent directly to us
if isinstance(packet, packets.MessagePacket):
if to_call and to_call.lower() == our_call:
# It's a MessagePacket and it's for us!
# let any threads do their thing, then ack
# send an ack last
if msg_id:
tx.send(
packets.AckPacket(
from_call=CONF.callsign,
to_call=from_call,
msgNo=msg_id,
),
)
tx.send(
packets.AckPacket(
from_call=CONF.callsign,
to_call=from_call,
msgNo=msg_id,
),
)
self.process_our_message_packet(packet)
else:
@ -262,10 +156,9 @@ class APRSDProcessPacketThread(APRSDFilterThread):
self.process_other_packet(packet, for_us=False)
else:
self.process_other_packet(
packet,
for_us=(to_call.lower() == our_call),
packet, for_us=(to_call.lower() == our_call),
)
LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
LOG.debug("Packet processing complete")
return False
@abc.abstractmethod
@ -275,9 +168,9 @@ class APRSDProcessPacketThread(APRSDFilterThread):
def process_other_packet(self, packet, for_us=False):
"""Process an APRS Packet that isn't a message or ack"""
if not for_us:
LOG.info("Got a packet meant for someone else '{packet.to_call}'")
LOG.info("Got a packet not meant for us.")
else:
LOG.info('Got a non AckPacket/MessagePacket')
LOG.info("Got a non AckPacket/MessagePacket")
class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
@ -297,7 +190,7 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
tx.send(subreply)
else:
wl = CONF.watch_list
to_call = wl['alert_callsign']
to_call = wl["alert_callsign"]
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
@ -309,7 +202,7 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
# We have a message based object.
tx.send(reply)
except Exception as ex:
LOG.error('Plugin failed!!!')
LOG.error("Plugin failed!!!")
LOG.exception(ex)
def process_our_message_packet(self, packet):
@ -363,14 +256,8 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
# If the message was for us and we didn't have a
# response, then we send a usage statement.
if to_call == CONF.callsign and not replied:
# Tailor the messages accordingly
if CONF.load_help_plugin:
LOG.warning('Sending help!')
message_text = "Unknown command! Send 'help' message for help"
else:
LOG.warning('Unknown command!')
message_text = 'Unknown command!'
LOG.warning("Sending help!")
message_text = "Unknown command! Send 'help' message for help"
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
@ -379,11 +266,11 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
),
)
except Exception as ex:
LOG.error('Plugin failed!!!')
LOG.error("Plugin failed!!!")
LOG.exception(ex)
# Do we need to send a reply?
if to_call == CONF.callsign:
reply = 'A Plugin failed! try again?'
reply = "A Plugin failed! try again?"
tx.send(
packets.MessagePacket(
from_call=CONF.callsign,
@ -392,4 +279,4 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
),
)
LOG.debug('Completed process_our_message_packet')
LOG.debug("Completed process_our_message_packet")

View File

@ -1,42 +0,0 @@
# aprsd/aprsd/threads/service.py
#
# This module is used to register threads that the service command runs.
#
# The service command is used to start and stop the APRS service.
# This is a mechanism to register threads that the service or command
# needs to run, and then start stop them as needed.
from aprsd.threads import aprsd as aprsd_threads
from aprsd.utils import singleton
@singleton
class ServiceThreads:
"""Registry for threads that the service command runs.
This enables extensions to register a thread to run during
the service command.
"""
def __init__(self):
self.threads: list[aprsd_threads.APRSDThread] = []
def register(self, thread: aprsd_threads.APRSDThread):
if not isinstance(thread, aprsd_threads.APRSDThread):
raise TypeError(f'Thread {thread} is not an APRSDThread')
self.threads.append(thread)
def unregister(self, thread: aprsd_threads.APRSDThread):
if not isinstance(thread, aprsd_threads.APRSDThread):
raise TypeError(f'Thread {thread} is not an APRSDThread')
self.threads.remove(thread)
def start(self):
"""Start all threads in the list."""
for thread in self.threads:
thread.start()
def join(self):
"""Join all the threads in the list"""
for thread in self.threads:
thread.join()

View File

@ -1,39 +0,0 @@
import logging
import time
from oslo_config import cfg
from aprsd.stats import collector
from aprsd.threads import APRSDThread
from aprsd.utils import objectstore
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
class StatsStore(objectstore.ObjectStoreMixin):
"""Container to save the stats from the collector."""
def add(self, stats: dict):
with self.lock:
self.data = stats
class APRSDStatsStoreThread(APRSDThread):
"""Save APRSD Stats to disk periodically."""
# how often in seconds to write the file
save_interval = 10
def __init__(self):
super().__init__('StatsStore')
def loop(self):
if self.loop_count % self.save_interval == 0:
stats = collector.Collector().collect()
ss = StatsStore()
ss.add(stats)
ss.save()
time.sleep(1)
return True

View File

@ -1,22 +1,21 @@
import datetime
import logging
import threading
import time
import wrapt
from oslo_config import cfg
from rush import quota, throttle
from rush.contrib import decorator
from rush.limiters import periodic
from rush.stores import dictionary
from aprsd import client
from aprsd import conf # noqa
from aprsd import threads as aprsd_threads
from aprsd.client.client import APRSDClient
from aprsd.packets import collector, core, tracker
from aprsd.packets import log as packet_log
from aprsd.packets import core, tracker
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
msg_t = throttle.Throttle(
limiter=periodic.PeriodicLimiter(
@ -37,24 +36,15 @@ ack_t = throttle.Throttle(
msg_throttle_decorator = decorator.ThrottleDecorator(throttle=msg_t)
ack_throttle_decorator = decorator.ThrottleDecorator(throttle=ack_t)
s_lock = threading.Lock()
@wrapt.synchronized(s_lock)
@msg_throttle_decorator.sleep_and_retry
def send(packet: core.Packet, direct=False, aprs_client=None):
"""Send a packet either in a thread or directly to the client."""
# prepare the packet for sending.
# This constructs the packet.raw
packet.prepare(create_msg_number=True)
# Have to call the collector to track the packet
# After prepare, as prepare assigns the msgNo
collector.PacketCollector().tx(packet)
packet.prepare()
if isinstance(packet, core.AckPacket):
if CONF.enable_sending_ack_packets:
_send_ack(packet, direct=direct, aprs_client=aprs_client)
else:
LOG.info('Sending ack packets is disabled. Not sending AckPacket.')
_send_ack(packet, direct=direct, aprs_client=aprs_client)
else:
_send_packet(packet, direct=direct, aprs_client=aprs_client)
@ -81,18 +71,11 @@ def _send_direct(packet, aprs_client=None):
if aprs_client:
cl = aprs_client
else:
cl = APRSDClient()
cl = client.factory.create()
packet.update_timestamp()
packet_log.log(packet, tx=True)
try:
cl.send(packet)
except Exception as e:
LOG.error(f'Failed to send packet: {packet}')
LOG.error(e)
return False
else:
return True
packet.log(header="TX")
cl.send(packet)
class SendPacketThread(aprsd_threads.APRSDThread):
@ -100,7 +83,10 @@ class SendPacketThread(aprsd_threads.APRSDThread):
def __init__(self, packet):
self.packet = packet
super().__init__(f'TX-{packet.to_call}-{self.packet.msgNo}')
name = self.packet.raw[:5]
super().__init__(f"TXPKT-{self.packet.msgNo}-{name}")
pkt_tracker = tracker.PacketTrack()
pkt_tracker.add(packet)
def loop(self):
"""Loop until a message is acked or it gets delayed.
@ -119,32 +105,33 @@ class SendPacketThread(aprsd_threads.APRSDThread):
# The message has been removed from the tracking queue
# So it got acked and we are done.
LOG.info(
f'{self.packet.__class__.__name__}'
f'({self.packet.msgNo}) '
'Message Send Complete via Ack.',
f"{self.packet.__class__.__name__}"
f"({self.packet.msgNo}) "
"Message Send Complete via Ack.",
)
return False
else:
send_now = False
if packet.send_count >= packet.retry_count:
if packet.send_count == packet.retry_count:
# we reached the send limit, don't send again
# TODO(hemna) - Need to put this in a delayed queue?
LOG.info(
f'{packet.__class__.__name__} '
f'({packet.msgNo}) '
'Message Send Complete. Max attempts reached'
f' {packet.retry_count}',
f"{packet.__class__.__name__} "
f"({packet.msgNo}) "
"Message Send Complete. Max attempts reached"
f" {packet.retry_count}",
)
pkt_tracker.remove(packet.msgNo)
if not packet.allow_delay:
pkt_tracker.remove(packet.msgNo)
return False
# Message is still outstanding and needs to be acked.
if packet.last_send_time:
# Message has a last send time tracking
now = int(round(time.time()))
now = datetime.datetime.now()
sleeptime = (packet.send_count + 1) * 31
delta = now - packet.last_send_time
if delta > sleeptime:
if delta > datetime.timedelta(seconds=sleeptime):
# It's time to try to send it again
send_now = True
else:
@ -153,19 +140,9 @@ class SendPacketThread(aprsd_threads.APRSDThread):
if send_now:
# no attempt time, so lets send it, and start
# tracking the time.
packet.last_send_time = int(round(time.time()))
sent = False
try:
sent = _send_direct(packet)
except Exception as ex:
LOG.error(f'Failed to send packet: {packet}')
LOG.error(ex)
else:
# If an exception happens while sending
# we don't want this attempt to count
# against the packet
if sent:
packet.send_count += 1
packet.last_send_time = datetime.datetime.now()
send(packet, direct=True)
packet.send_count += 1
time.sleep(1)
# Make sure we get called again.
@ -175,106 +152,46 @@ class SendPacketThread(aprsd_threads.APRSDThread):
class SendAckThread(aprsd_threads.APRSDThread):
loop_count: int = 1
max_retries = 3
def __init__(self, packet):
self.packet = packet
super().__init__(f'TXAck-{packet.to_call}-{self.packet.msgNo}')
self.max_retries = CONF.default_ack_send_count
super().__init__(f"SendAck-{self.packet.msgNo}")
def loop(self):
"""Separate thread to send acks with retries."""
send_now = False
if self.packet.send_count == self.max_retries:
if self.packet.send_count == self.packet.retry_count:
# we reached the send limit, don't send again
# TODO(hemna) - Need to put this in a delayed queue?
LOG.debug(
f'{self.packet.__class__.__name__}'
f'({self.packet.msgNo}) '
'Send Complete. Max attempts reached'
f' {self.max_retries}',
LOG.info(
f"{self.packet.__class__.__name__}"
f"({self.packet.msgNo}) "
"Send Complete. Max attempts reached"
f" {self.packet.retry_count}",
)
return False
if self.packet.last_send_time:
# Message has a last send time tracking
now = int(round(time.time()))
now = datetime.datetime.now()
# aprs duplicate detection is 30 secs?
# (21 only sends first, 28 skips middle)
sleep_time = 31
delta = now - self.packet.last_send_time
if delta > sleep_time:
if delta > datetime.timedelta(seconds=sleep_time):
# It's time to try to send it again
send_now = True
elif self.loop_count % 10 == 0:
LOG.debug(f'Still wating. {delta}')
LOG.debug(f"Still wating. {delta}")
else:
send_now = True
if send_now:
sent = False
try:
sent = _send_direct(self.packet)
except Exception:
LOG.error(f'Failed to send packet: {self.packet}')
else:
# If an exception happens while sending
# we don't want this attempt to count
# against the packet
if sent:
self.packet.send_count += 1
self.packet.last_send_time = int(round(time.time()))
send(self.packet, direct=True)
self.packet.send_count += 1
self.packet.last_send_time = datetime.datetime.now()
time.sleep(1)
self.loop_count += 1
return True
class BeaconSendThread(aprsd_threads.APRSDThread):
"""Thread that sends a GPS beacon packet periodically.
Settings are in the [DEFAULT] section of the config file.
"""
_loop_cnt: int = 1
def __init__(self):
super().__init__('BeaconSendThread')
self._loop_cnt = 1
# Make sure Latitude and Longitude are set.
if not CONF.latitude or not CONF.longitude:
LOG.error(
'Latitude and Longitude are not set in the config file.'
'Beacon will not be sent and thread is STOPPED.',
)
self.stop()
LOG.info(
'Beacon thread is running and will send '
f'beacons every {CONF.beacon_interval} seconds.',
)
def loop(self):
# Only dump out the stats every N seconds
if self._loop_cnt % CONF.beacon_interval == 0:
pkt = core.BeaconPacket(
from_call=CONF.callsign,
to_call='APRS',
latitude=float(CONF.latitude),
longitude=float(CONF.longitude),
comment='APRSD GPS Beacon',
symbol=CONF.beacon_symbol,
)
try:
# Only send it once
pkt.retry_count = 1
send(pkt, direct=True)
except Exception as e:
LOG.error(f'Failed to send beacon: {e}')
APRSDClient().reset()
time.sleep(5)
self._loop_cnt += 1
time.sleep(1)
return True

View File

@ -1,40 +1,24 @@
"""Utilities and helper functions."""
import errno
import functools
import math
import os
import re
import sys
import traceback
import update_checker
import aprsd
from .fuzzyclock import fuzzy # noqa: F401
# Make these available by anyone importing
# aprsd.utils
from .ring_buffer import RingBuffer # noqa: F401
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
from collections.abc import MutableMapping
else:
from collections.abc import MutableMapping
def singleton(cls):
"""Make a class a Singleton class (only one instance)"""
@functools.wraps(cls)
def wrapper_singleton(*args, **kwargs):
if wrapper_singleton.instance is None:
wrapper_singleton.instance = cls(*args, **kwargs)
return wrapper_singleton.instance
wrapper_singleton.instance = None
return wrapper_singleton
from collections import MutableMapping
def env(*vars, **kwargs):
@ -85,16 +69,6 @@ def rgb_from_name(name):
return red, green, blue
def hextriplet(colortuple):
"""Convert a color tuple to a hex triplet."""
return "#" + "".join(f"{i:02X}" for i in colortuple)
def hex_from_name(name):
"""Create a hex color from a string."""
return hextriplet(rgb_from_name(name))
def human_size(bytes, units=None):
"""Returns a human readable string representation of bytes"""
if not units:
@ -157,109 +131,3 @@ def parse_delta_str(s):
return {key: float(val) for key, val in m.groupdict().items()}
else:
return {}
def load_entry_points(group):
"""Load all extensions registered to the given entry point group"""
try:
import importlib_metadata
except ImportError:
# For python 3.10 and later
import importlib.metadata as importlib_metadata
eps = importlib_metadata.entry_points(group=group)
for ep in eps:
try:
ep.load()
except Exception as e:
print(
f"Extension {ep.name} of group {group} failed to load with {e}",
file=sys.stderr,
)
print(traceback.format_exc(), file=sys.stderr)
def calculate_initial_compass_bearing(point_a, point_b):
"""
Calculates the bearing between two points.
The formulae used is the following:
θ = atan2(sin(Δlong).cos(lat2),
cos(lat1).sin(lat2) sin(lat1).cos(lat2).cos(Δlong))
:Parameters:
- `pointA: The tuple representing the latitude/longitude for the
first point. Latitude and longitude must be in decimal degrees
- `pointB: The tuple representing the latitude/longitude for the
second point. Latitude and longitude must be in decimal degrees
:Returns:
The bearing in degrees
:Returns Type:
float
"""
if (type(point_a) != tuple) or (type(point_b) != tuple): # noqa: E721
raise TypeError("Only tuples are supported as arguments")
lat1 = math.radians(float(point_a[0]))
lat2 = math.radians(float(point_b[0]))
diff_long = math.radians(float(point_b[1]) - float(point_a[1]))
x = math.sin(diff_long) * math.cos(lat2)
y = math.cos(lat1) * math.sin(lat2) - (
math.sin(lat1) * math.cos(lat2) * math.cos(diff_long)
)
initial_bearing = math.atan2(x, y)
# Now we have the initial bearing but math.atan2 return values
# from -180° to + 180° which is not what we want for a compass bearing
# The solution is to normalize the initial bearing as shown below
initial_bearing = math.degrees(initial_bearing)
compass_bearing = (initial_bearing + 360) % 360
return compass_bearing
def degrees_to_cardinal(bearing, full_string=False):
if full_string:
directions = [
"North",
"North-Northeast",
"Northeast",
"East-Northeast",
"East",
"East-Southeast",
"Southeast",
"South-Southeast",
"South",
"South-Southwest",
"Southwest",
"West-Southwest",
"West",
"West-Northwest",
"Northwest",
"North-Northwest",
"North",
]
else:
directions = [
"N",
"NNE",
"NE",
"ENE",
"E",
"ESE",
"SE",
"SSE",
"S",
"SSW",
"SW",
"WSW",
"W",
"WNW",
"NW",
"NNW",
"N",
]
cardinal = directions[round(bearing / 22.5)]
return cardinal

View File

@ -1,54 +1,48 @@
import random
from multiprocessing import RawValue
import threading
import wrapt
MAX_PACKET_ID = 9999
class PacketCounter:
"""
Global Packet ID counter class.
Global Packet id counter class.
This is a singleton-based class that keeps
This is a singleton based class that keeps
an incrementing counter for all packets to
be sent. All new Packet objects get a new
message ID, which is the next number available
be sent. All new Packet objects gets a new
message id, which is the next number available
from the PacketCounter.
"""
_instance = None
max_count = 9999
lock = threading.Lock()
def __new__(cls, *args, **kwargs):
"""Make this a singleton class."""
if cls._instance is None:
cls._instance = super().__new__(cls, *args, **kwargs)
cls._instance._val = random.randint(1, MAX_PACKET_ID) # Initialize counter
cls._instance.val = RawValue("i", 1)
return cls._instance
@wrapt.synchronized(lock)
def increment(self):
"""Increment the counter, reset if it exceeds MAX_PACKET_ID."""
if self._val == MAX_PACKET_ID:
self._val = 1
if self.val.value == self.max_count:
self.val.value = 1
else:
self._val += 1
self.val.value += 1
@property
@wrapt.synchronized(lock)
def value(self):
"""Get the current value as a string."""
return str(self._val)
return self.val.value
@wrapt.synchronized(lock)
def __repr__(self):
"""String representation of the current value."""
return str(self._val)
return str(self.val.value)
@wrapt.synchronized(lock)
def __str__(self):
"""String representation of the current value."""
return str(self._val)
return str(self.val.value)

View File

@ -3,20 +3,13 @@ import decimal
import json
import sys
from aprsd.packets import core
class EnhancedJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
args = (
"year",
"month",
"day",
"hour",
"minute",
"second",
"microsecond",
"year", "month", "day", "hour", "minute",
"second", "microsecond",
)
return {
"__type__": "datetime.datetime",
@ -49,29 +42,11 @@ class EnhancedJSONEncoder(json.JSONEncoder):
return super().default(obj)
class SimpleJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return str(obj)
elif isinstance(obj, datetime.time):
return str(obj)
elif isinstance(obj, datetime.timedelta):
return str(obj)
elif isinstance(obj, decimal.Decimal):
return str(obj)
elif isinstance(obj, core.Packet):
return obj.to_dict()
else:
return super().default(obj)
class EnhancedJSONDecoder(json.JSONDecoder):
def __init__(self, *args, **kwargs):
super().__init__(
*args,
object_hook=self.object_hook,
*args, object_hook=self.object_hook,
**kwargs,
)

View File

@ -1,57 +0,0 @@
import logging
from typing import Callable, Protocol, runtime_checkable
from aprsd.utils import singleton
LOG = logging.getLogger('APRSD')
@runtime_checkable
class KeepAliveProducer(Protocol):
"""The KeepAliveProducer protocol is used to define the interface for running Keepalive checks."""
def keepalive_check(self) -> dict:
"""Check for keepalive."""
...
def keepalive_log(self):
"""Log any keepalive information."""
...
@singleton
class KeepAliveCollector:
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
def __init__(self):
self.producers: list[Callable] = []
def check(self) -> None:
"""Do any keepalive checks."""
for name in self.producers:
cls = name()
try:
cls.keepalive_check()
except Exception as e:
LOG.error(f'Error in producer {name} (check): {e}')
raise e
def log(self) -> None:
"""Log any relevant information during a KeepAlive check"""
for name in self.producers:
cls = name()
try:
cls.keepalive_log()
except Exception as e:
LOG.error(f'Error in producer {name} (check): {e}')
raise e
def register(self, producer_name: Callable):
if not isinstance(producer_name, KeepAliveProducer):
raise TypeError(f'Producer {producer_name} is not a KeepAliveProducer')
self.producers.append(producer_name)
def unregister(self, producer_name: Callable):
if not isinstance(producer_name, KeepAliveProducer):
raise TypeError(f'Producer {producer_name} is not a KeepAliveProducer')
self.producers.remove(producer_name)

View File

@ -2,12 +2,12 @@ import logging
import os
import pathlib
import pickle
import threading
from oslo_config import cfg
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
LOG = logging.getLogger("APRSD")
class ObjectStoreMixin:
@ -25,28 +25,16 @@ class ObjectStoreMixin:
aprsd server -f (flush) will wipe all saved objects.
"""
def __init__(self):
self.lock = threading.RLock()
def __len__(self):
with self.lock:
return len(self.data)
def __iter__(self):
with self.lock:
return iter(self.data)
return len(self.data)
def get_all(self):
with self.lock:
return self.data
def get(self, key):
def get(self, id):
with self.lock:
return self.data.get(key)
def copy(self):
with self.lock:
return self.data.copy()
return self.data[id]
def _init_store(self):
if not CONF.enable_save:
@ -62,31 +50,36 @@ class ObjectStoreMixin:
def _save_filename(self):
save_location = CONF.save_location
return '{}/{}.p'.format(
return "{}/{}.p".format(
save_location,
self.__class__.__name__.lower(),
)
def _dump(self):
dump = {}
with self.lock:
for key in self.data.keys():
dump[key] = self.data[key]
return dump
def save(self):
"""Save any queued to disk?"""
if not CONF.enable_save:
return
self._init_store()
save_filename = self._save_filename()
if len(self) > 0:
LOG.debug(
f'{self.__class__.__name__}::Saving'
f' {len(self)} entries to disk at '
f'{save_filename}',
LOG.info(
f"{self.__class__.__name__}::Saving"
f" {len(self)} entries to disk at"
f"{CONF.save_location}",
)
with self.lock:
with open(save_filename, 'wb+') as fp:
pickle.dump(self.data, fp)
with open(self._save_filename(), "wb+") as fp:
pickle.dump(self._dump(), fp)
else:
LOG.debug(
"{} Nothing to save, flushing old save file '{}'".format(
self.__class__.__name__,
save_filename,
self._save_filename(),
),
)
self.flush()
@ -96,21 +89,18 @@ class ObjectStoreMixin:
return
if os.path.exists(self._save_filename()):
try:
with open(self._save_filename(), 'rb') as fp:
with open(self._save_filename(), "rb") as fp:
raw = pickle.load(fp)
if raw:
self.data = raw
LOG.debug(
f'{self.__class__.__name__}::Loaded {len(self)} entries from disk.',
f"{self.__class__.__name__}::Loaded {len(self)} entries from disk.",
)
else:
LOG.debug(f'{self.__class__.__name__}::No data to load.')
LOG.debug(f"{self.data}")
except (pickle.UnpicklingError, Exception) as ex:
LOG.error(f'Failed to UnPickle {self._save_filename()}')
LOG.error(f"Failed to UnPickle {self._save_filename()}")
LOG.error(ex)
self.data = {}
else:
LOG.debug(f'{self.__class__.__name__}::No save file found.')
def flush(self):
"""Nuke the old pickle file that stored the old results from last aprsd run."""

View File

@ -5,6 +5,7 @@ import logging
import time
import types
VALID_TRACE_FLAGS = {"method", "api"}
TRACE_API = False
TRACE_METHOD = False
@ -26,8 +27,8 @@ def trace(*dec_args, **dec_kwargs):
"""
def _decorator(f):
func_name = f.__qualname__
func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
func_name = f.__name__
@functools.wraps(f)
def trace_logging_wrapper(*args, **kwargs):
@ -45,11 +46,10 @@ def trace(*dec_args, **dec_kwargs):
if pass_filter:
logger.debug(
"==> %(func)s: call %(all_args)r file: %(file)s",
"==> %(func)s: call %(all_args)r",
{
"func": func_name,
"all_args": str(all_args),
"file": func_file,
},
)

View File

@ -0,0 +1,84 @@
body {
background: #eeeeee;
margin: 2em;
text-align: center;
font-family: system-ui, sans-serif;
}
footer {
padding: 2em;
text-align: center;
height: 10vh;
}
.ui.segment {
background: #eeeeee;
}
#graphs {
display: grid;
width: 100%;
height: 300px;
grid-template-columns: 1fr 1fr;
}
#graphs_center {
display: block;
margin-top: 10px;
margin-bottom: 10px;
width: 100%;
height: 300px;
}
#left {
margin-right: 2px;
height: 300px;
}
#right {
height: 300px;
}
#center {
height: 300px;
}
#packetsChart, #messageChart, #emailChart, #memChart {
border: 1px solid #ccc;
background: #ddd;
}
#stats {
margin: auto;
width: 80%;
}
#jsonstats {
display: none;
}
#title {
font-size: 4em;
}
#version{
font-size: .5em;
}
#uptime, #aprsis {
font-size: 1em;
}
#callsign {
font-size: 1.4em;
color: #00F;
padding-top: 8px;
margin:10px;
}
#title_rx {
background-color: darkseagreen;
text-align: left;
}
#title_tx {
background-color: lightcoral;
text-align: left;
}
.aprsd_1 {
background-image: url(/static/images/aprs-symbols-16-0.png);
background-repeat: no-repeat;
background-position: -160px -48px;
width: 16px;
height: 16px;
}

View File

@ -0,0 +1,189 @@
/* PrismJS 1.24.1
https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+log&plugins=show-language+toolbar */
/**
* prism.js tomorrow night eighties for JavaScript, CoffeeScript, CSS and HTML
* Based on https://github.com/chriskempson/tomorrow-theme
* @author Rose Pritchard
*/
code[class*="language-"],
pre[class*="language-"] {
color: #ccc;
background: none;
font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace;
font-size: 1em;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
word-wrap: normal;
line-height: 1.5;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
/* Code blocks */
pre[class*="language-"] {
padding: 1em;
margin: .5em 0;
overflow: auto;
}
:not(pre) > code[class*="language-"],
pre[class*="language-"] {
background: #2d2d2d;
}
/* Inline code */
:not(pre) > code[class*="language-"] {
padding: .1em;
border-radius: .3em;
white-space: normal;
}
.token.comment,
.token.block-comment,
.token.prolog,
.token.doctype,
.token.cdata {
color: #999;
}
.token.punctuation {
color: #ccc;
}
.token.tag,
.token.attr-name,
.token.namespace,
.token.deleted {
color: #e2777a;
}
.token.function-name {
color: #6196cc;
}
.token.boolean,
.token.number,
.token.function {
color: #f08d49;
}
.token.property,
.token.class-name,
.token.constant,
.token.symbol {
color: #f8c555;
}
.token.selector,
.token.important,
.token.atrule,
.token.keyword,
.token.builtin {
color: #cc99cd;
}
.token.string,
.token.char,
.token.attr-value,
.token.regex,
.token.variable {
color: #7ec699;
}
.token.operator,
.token.entity,
.token.url {
color: #67cdcc;
}
.token.important,
.token.bold {
font-weight: bold;
}
.token.italic {
font-style: italic;
}
.token.entity {
cursor: help;
}
.token.inserted {
color: green;
}
div.code-toolbar {
position: relative;
}
div.code-toolbar > .toolbar {
position: absolute;
top: .3em;
right: .2em;
transition: opacity 0.3s ease-in-out;
opacity: 0;
}
div.code-toolbar:hover > .toolbar {
opacity: 1;
}
/* Separate line b/c rules are thrown out if selector is invalid.
IE11 and old Edge versions don't support :focus-within. */
div.code-toolbar:focus-within > .toolbar {
opacity: 1;
}
div.code-toolbar > .toolbar > .toolbar-item {
display: inline-block;
}
div.code-toolbar > .toolbar > .toolbar-item > a {
cursor: pointer;
}
div.code-toolbar > .toolbar > .toolbar-item > button {
background: none;
border: 0;
color: inherit;
font: inherit;
line-height: normal;
overflow: visible;
padding: 0;
-webkit-user-select: none; /* for button */
-moz-user-select: none;
-ms-user-select: none;
}
div.code-toolbar > .toolbar > .toolbar-item > a,
div.code-toolbar > .toolbar > .toolbar-item > button,
div.code-toolbar > .toolbar > .toolbar-item > span {
color: #bbb;
font-size: .8em;
padding: 0 .5em;
background: #f5f2f0;
background: rgba(224, 224, 224, 0.2);
box-shadow: 0 2px 0 0 rgba(0,0,0,0.2);
border-radius: .5em;
}
div.code-toolbar > .toolbar > .toolbar-item > a:hover,
div.code-toolbar > .toolbar > .toolbar-item > a:focus,
div.code-toolbar > .toolbar > .toolbar-item > button:hover,
div.code-toolbar > .toolbar > .toolbar-item > button:focus,
div.code-toolbar > .toolbar > .toolbar-item > span:hover,
div.code-toolbar > .toolbar > .toolbar-item > span:focus {
color: inherit;
text-decoration: none;
}

Some files were not shown because too many files have changed in this diff Show More