1
0
mirror of https://github.com/craigerl/aprsd.git synced 2026-01-13 17:17:26 -05:00

remove py310 testing

This commit is contained in:
Walter Boring 2025-12-29 19:38:14 -05:00
parent d58700b9e4
commit f9979fa3da
8 changed files with 40 additions and 47 deletions

View File

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12"]
python-version: ["3.11"]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}

View File

@ -96,11 +96,11 @@ class ListenStatsThread(APRSDThread):
# Get unique callsigns count from SeenList stats
seen_list_instance = seen_list.SeenList()
# stats() returns data while holding lock internally, so copy it immediately
seen_list_stats = seen_list_instance.stats()
seen_list_instance.save()
# we have to copy the seen_list_stats to avoid the lock being held too long
with seen_list_instance.lock:
seen_list_stats = seen_list_stats.copy()
# Copy the stats to avoid holding references to locked data
seen_list_stats = seen_list_stats.copy()
unique_callsigns_count = len(seen_list_stats)
# Calculate uptime

View File

@ -55,12 +55,13 @@ class DupePacketFilter:
if not packet.processed:
# We haven't processed this packet through the plugins.
return packet
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
elif abs(packet.timestamp - found.timestamp) < CONF.packet_dupe_timeout:
# If the packet came in within N seconds of the
# Last time seeing the packet, then we drop it as a dupe.
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.'
)
return None
else:
LOG.warning(
f'Packet {packet.from_call}:{packet.msgNo} already tracked '

View File

@ -4,6 +4,7 @@ from collections import OrderedDict
from oslo_config import cfg
from aprsd import conf # noqa: F401
from aprsd.packets import core
from aprsd.utils import objectstore

View File

@ -80,19 +80,16 @@ class TestDupePacketFilter(unittest.TestCase):
packet.processed = True
packet.timestamp = 1000
with mock.patch(
'aprsd.packets.filters.dupe_filter.packets.PacketList'
) as mock_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within 60 second timeout
mock_list_instance.find.return_value = found_packet
mock_list.return_value = mock_list_instance
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within 60 second timeout
mock_list_instance.find.return_value = found_packet
self.filter.pl = mock_list_instance
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertIsNone(result) # Should be dropped
mock_log.warning.assert_called()
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertIsNone(result) # Should be dropped
mock_log.warning.assert_called()
def test_filter_duplicate_after_timeout(self):
"""Test filter() with duplicate after timeout."""
@ -105,16 +102,13 @@ class TestDupePacketFilter(unittest.TestCase):
packet.processed = True
packet.timestamp = 2000
with mock.patch(
'aprsd.packets.filters.dupe_filter.packets.PacketList'
) as mock_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1000 # More than 60 seconds ago
mock_list_instance.find.return_value = found_packet
mock_list.return_value = mock_list_instance
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1000 # More than 60 seconds ago
mock_list_instance.find.return_value = found_packet
self.filter.pl = mock_list_instance
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertEqual(result, packet) # Should pass
mock_log.warning.assert_called()
with mock.patch('aprsd.packets.filters.dupe_filter.LOG') as mock_log:
result = self.filter.filter(packet)
self.assertEqual(result, packet) # Should pass
mock_log.warning.assert_called()

View File

@ -186,32 +186,27 @@ class TestAPRSDRXThread(unittest.TestCase):
self.assertFalse(self.packet_queue.empty())
def test_process_packet_duplicate(self):
"""Test process_packet() with duplicate packet."""
from oslo_config import cfg
CONF = cfg.CONF
CONF.packet_dupe_timeout = 60
"""Test process_packet() with duplicate packet.
Note: The rx thread's process_packet() doesn't filter duplicates.
It puts all packets on the queue. Duplicate filtering happens
later in the filter thread.
"""
mock_client = MockClientDriver()
packet = fake.fake_packet(msg_number='123')
packet.processed = True
packet.timestamp = 1000
mock_client._decode_packet_return = packet
self.rx_thread._client = mock_client
self.rx_thread.pkt_count = 0
with mock.patch('aprsd.threads.rx.packet_log'):
with mock.patch('aprsd.threads.rx.packets.PacketList') as mock_pkt_list:
mock_list_instance = mock.MagicMock()
found_packet = fake.fake_packet(msg_number='123')
found_packet.timestamp = 1050 # Within timeout
mock_list_instance.find.return_value = found_packet
mock_pkt_list.return_value = mock_list_instance
with mock.patch('aprsd.threads.rx.LOG') as mock_log:
self.rx_thread.process_packet()
mock_log.warning.assert_called()
# Should not add to queue
self.assertTrue(self.packet_queue.empty())
self.rx_thread.process_packet()
# The rx thread puts all packets on the queue regardless of duplicates
# Duplicate filtering happens in the filter thread
self.assertFalse(self.packet_queue.empty())
queued_packet = self.packet_queue.get()
self.assertEqual(queued_packet, packet)
class TestAPRSDFilterThread(unittest.TestCase):

View File

@ -2,6 +2,7 @@ import os
import pickle
import shutil
import tempfile
import threading
import unittest
from unittest import mock
@ -17,6 +18,7 @@ class TestObjectStore(objectstore.ObjectStoreMixin):
def __init__(self):
super().__init__()
self.lock = threading.RLock()
self.data = {}

View File

@ -2,7 +2,7 @@
minversion = 2.9.0
skipdist = True
skip_missing_interpreters = true
envlist = pep8,py{310,311}
envlist = pep8,py{311}
#requires = tox-pipenv
# pip==22.0.4
# pip-tools==5.4.0