1
0
mirror of https://github.com/craigerl/aprsd.git synced 2026-02-25 10:40:20 -05:00

Replace insecure pickle serialization with JSON

SECURITY FIX: Replace pickle.load() with json.load() to eliminate
remote code execution vulnerability from malicious pickle files.

Changes:
- Update ObjectStoreMixin to use JSON instead of pickle
- Add PacketJSONDecoder to reconstruct Packet objects from JSON
- Change file extension from .p to .json
- Add warning when old pickle files detected
- Add OrderedDict restoration for PacketList
- Update all tests to work with JSON format

Users with existing pickle files must run:
  aprsd dev migrate-pickle

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Walter Boring 2026-02-17 16:07:55 -05:00
parent 0701db2629
commit 202c689658
4 changed files with 161 additions and 21 deletions

View File

@ -34,6 +34,15 @@ class PacketList(objectstore.ObjectStoreMixin):
'packets': OrderedDict(),
}
def _restore_ordereddict(self):
"""Restore OrderedDict for packets after loading from JSON.
JSON doesn't preserve OrderedDict type, but Python 3.7+ dicts
maintain insertion order, so we can safely convert back.
"""
if 'packets' in self.data and not isinstance(self.data['packets'], OrderedDict):
self.data['packets'] = OrderedDict(self.data['packets'])
def rx(self, packet: type[core.Packet]):
"""Add a packet that was received."""
with self.lock:

View File

@ -83,3 +83,49 @@ class EnhancedJSONDecoder(json.JSONDecoder):
o = getattr(o, e)
args, kwargs = d.get('args', ()), d.get('kwargs', {})
return o(*args, **kwargs)
class PacketJSONDecoder(json.JSONDecoder):
"""Custom JSON decoder for reconstructing Packet objects from dicts.
This decoder is used by ObjectStoreMixin to reconstruct Packet objects
when loading from JSON files. It handles:
- Packet objects and their subclasses (AckPacket, MessagePacket, etc.)
- Datetime objects stored as ISO format strings
"""
def __init__(self, *args, **kwargs):
super().__init__(
*args,
object_hook=self.object_hook,
**kwargs,
)
def object_hook(self, obj):
"""Reconstruct objects from their dict representation."""
if not isinstance(obj, dict):
return obj
# Check if this looks like a Packet object
# Packets have _type, from_call, and to_call fields
if '_type' in obj and 'from_call' in obj and 'to_call' in obj:
try:
# Use the factory function to reconstruct the correct packet type
return core.factory(obj)
except Exception:
# If reconstruction fails, return as dict
# This prevents data loss if packet format changes
return obj
# Handle datetime strings (ISO format)
# Check for common datetime field names
for key in ['last', 'timestamp', 'last_send_time']:
if key in obj and isinstance(obj[key], str):
try:
# Try to parse as datetime
obj[key] = datetime.datetime.fromisoformat(obj[key])
except (ValueError, TypeError, AttributeError):
# Not a datetime, leave as string
pass
return obj

View File

@ -1,10 +1,12 @@
import json
import logging
import os
import pathlib
import pickle
from oslo_config import cfg
from aprsd.utils.json import PacketJSONDecoder, SimpleJSONEncoder
CONF = cfg.CONF
LOG = logging.getLogger('APRSD')
@ -61,13 +63,21 @@ class ObjectStoreMixin:
def _save_filename(self):
save_location = CONF.save_location
return '{}/{}.json'.format(
save_location,
self.__class__.__name__.lower(),
)
def _old_save_filename(self):
"""Return the old pickle filename for migration detection."""
save_location = CONF.save_location
return '{}/{}.p'.format(
save_location,
self.__class__.__name__.lower(),
)
def save(self):
"""Save any queued to disk?"""
"""Save any queued to disk as JSON."""
if not CONF.enable_save:
return
self._init_store()
@ -79,8 +89,8 @@ class ObjectStoreMixin:
f'{save_filename}',
)
with self.lock:
with open(save_filename, 'wb+') as fp:
pickle.dump(self.data, fp)
with open(save_filename, 'w') as fp:
json.dump(self.data, fp, cls=SimpleJSONEncoder, indent=2)
else:
LOG.debug(
"{} Nothing to save, flushing old save file '{}'".format(
@ -91,33 +101,49 @@ class ObjectStoreMixin:
self.flush()
def load(self):
"""Load data from JSON file."""
if not CONF.enable_save:
return
json_file = self._save_filename()
pickle_file = self._old_save_filename()
with self.lock:
if os.path.exists(self._save_filename()):
# Check if old pickle file exists but JSON doesn't
if not os.path.exists(json_file) and os.path.exists(pickle_file):
LOG.warning(
f'{self.__class__.__name__}::Found old pickle file {pickle_file}. '
f'Please run "aprsd dev migrate-pickle" to convert to JSON format. '
f'Skipping load to avoid security risk.'
)
return
if os.path.exists(json_file):
try:
with open(self._save_filename(), 'rb') as fp:
raw = pickle.load(fp)
with open(json_file, 'r') as fp:
raw = json.load(fp, cls=PacketJSONDecoder)
if raw:
self.data = raw
# Special handling for OrderedDict in PacketList
if hasattr(self, '_restore_ordereddict'):
self._restore_ordereddict()
LOG.debug(
f'{self.__class__.__name__}::Loaded {len(self)} entries from disk.',
)
else:
LOG.debug(f'{self.__class__.__name__}::No data to load.')
except (pickle.UnpicklingError, Exception) as ex:
LOG.error(f'Failed to UnPickle {self._save_filename()}')
LOG.error(ex)
except (json.JSONDecodeError, Exception) as ex:
LOG.error(f'Failed to load JSON from {json_file}')
LOG.exception(ex)
self.data = {}
else:
LOG.debug(f'{self.__class__.__name__}::No save file found.')
def flush(self):
"""Nuke the old pickle file that stored the old results from last aprsd run."""
"""Remove the JSON save file and clear data."""
if not CONF.enable_save:
return
with self.lock:
if os.path.exists(self._save_filename()):
pathlib.Path(self._save_filename()).unlink()
with self.lock:
self.data = {}
self.data = {}

View File

@ -1,5 +1,6 @@
import datetime
import json
import os
import pickle
import shutil
import tempfile
import threading
@ -8,6 +9,7 @@ from unittest import mock
from oslo_config import cfg
from aprsd.packets import core
from aprsd.utils import objectstore
CONF = cfg.CONF
@ -94,7 +96,7 @@ class TestObjectStoreMixin(unittest.TestCase):
filename = obj._save_filename()
self.assertIn('testobjectstore', filename.lower())
self.assertTrue(filename.endswith('.p'))
self.assertTrue(filename.endswith('.json'))
def test_save(self):
"""Test save() method."""
@ -107,9 +109,9 @@ class TestObjectStoreMixin(unittest.TestCase):
filename = obj._save_filename()
self.assertTrue(os.path.exists(filename))
# Verify data was saved
with open(filename, 'rb') as fp:
loaded_data = pickle.load(fp)
# Verify data was saved as JSON
with open(filename, 'r') as fp:
loaded_data = json.load(fp)
self.assertEqual(loaded_data, obj.data)
def test_save_empty(self):
@ -154,13 +156,13 @@ class TestObjectStoreMixin(unittest.TestCase):
mock_log.debug.assert_called()
def test_load_corrupted_file(self):
"""Test load() with corrupted pickle file."""
"""Test load() with corrupted JSON file."""
obj = TestObjectStore()
filename = obj._save_filename()
# Create corrupted file
with open(filename, 'wb') as fp:
fp.write(b'corrupted data')
with open(filename, 'w') as fp:
fp.write('{corrupted json data')
with mock.patch('aprsd.utils.objectstore.LOG') as mock_log:
obj.load()
@ -246,3 +248,60 @@ class TestObjectStoreMixin(unittest.TestCase):
self.assertEqual(len(errors), 0)
# All operations should complete
self.assertGreater(len(obj.data), 0)
def test_save_load_with_datetime(self):
"""Test save/load with datetime objects."""
obj = TestObjectStore()
test_time = datetime.datetime.now()
obj.data['timestamp'] = test_time
obj.data['key1'] = 'value1'
obj.save()
obj2 = TestObjectStore()
obj2.load()
self.assertEqual(obj2.data['key1'], 'value1')
# Datetime should be preserved (may lose microseconds precision)
loaded_time = obj2.data['timestamp']
self.assertIsInstance(loaded_time, (datetime.datetime, str))
def test_save_load_with_packet(self):
"""Test save/load with Packet objects."""
obj = TestObjectStore()
packet = core.MessagePacket(
from_call='N0CALL',
to_call='TEST',
message_text='Test message',
)
obj.data['packet1'] = packet
obj.data['key1'] = 'value1'
obj.save()
obj2 = TestObjectStore()
obj2.load()
self.assertEqual(obj2.data['key1'], 'value1')
# Packet should be reconstructed
loaded_packet = obj2.data['packet1']
self.assertIsInstance(loaded_packet, core.Packet)
self.assertEqual(loaded_packet.from_call, 'N0CALL')
self.assertEqual(loaded_packet.to_call, 'TEST')
def test_old_pickle_file_warning(self):
"""Test warning when old pickle file exists."""
obj = TestObjectStore()
pickle_filename = obj._old_save_filename()
# Create a fake pickle file
with open(pickle_filename, 'wb') as fp:
fp.write(b'fake pickle data')
with mock.patch('aprsd.utils.objectstore.LOG') as mock_log:
obj.load()
# Should log warning about pickle file
mock_log.warning.assert_called()
call_args = str(mock_log.warning.call_args)
self.assertIn('pickle', call_args.lower())
self.assertIn('migrate', call_args.lower())