add exception logging
This commit is contained in:
parent
74ca23af73
commit
db068d859e
@ -2,6 +2,8 @@
|
|||||||
#
|
#
|
||||||
###############################################################################
|
###############################################################################
|
||||||
# Copyright (C) 2016-2019 Cortney T. Buffington, N0MJS <n0mjs@me.com>
|
# Copyright (C) 2016-2019 Cortney T. Buffington, N0MJS <n0mjs@me.com>
|
||||||
|
# GPS/Data - Copyright (C) 2020 Eric Craw, KF7EEL <kf7eel@qsl.net>
|
||||||
|
# Annotated modifications Copyright (C) 2021 Xavier FRS2013
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
@ -42,7 +44,7 @@ from twisted.protocols.basic import NetstringReceiver
|
|||||||
from twisted.internet import reactor, task
|
from twisted.internet import reactor, task
|
||||||
|
|
||||||
# Things we import from the main hblink module
|
# Things we import from the main hblink module
|
||||||
from hblink import HBSYSTEM, OPENBRIDGE, systems, hblink_handler, reportFactory, REPORT_OPCODES, mk_aliases
|
from hblink import HBSYSTEM, OPENBRIDGE, systems, hblink_handler, reportFactory, REPORT_OPCODES, mk_aliases, aprs_upload, sendAprs
|
||||||
from dmr_utils3.utils import bytes_3, int_id, get_alias
|
from dmr_utils3.utils import bytes_3, int_id, get_alias
|
||||||
from dmr_utils3 import decode, bptc, const
|
from dmr_utils3 import decode, bptc, const
|
||||||
import config
|
import config
|
||||||
@ -55,6 +57,7 @@ import pickle
|
|||||||
# The module needs logging, but handlers, etc. are controlled by the parent
|
# The module needs logging, but handlers, etc. are controlled by the parent
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
import traceback
|
||||||
|
|
||||||
# Import UNIT time from rules.py
|
# Import UNIT time from rules.py
|
||||||
from rules import UNIT_TIME, STATIC_UNIT
|
from rules import UNIT_TIME, STATIC_UNIT
|
||||||
@ -108,7 +111,7 @@ __email__ = 'n0mjs@me.com'
|
|||||||
# AT-D878 - Compressed UDP
|
# AT-D878 - Compressed UDP
|
||||||
# MD-380 - Unified Data Transport
|
# MD-380 - Unified Data Transport
|
||||||
hdr_type = ''
|
hdr_type = ''
|
||||||
btf = ''
|
btf = -1
|
||||||
ssid = ''
|
ssid = ''
|
||||||
|
|
||||||
# From dmr_utils3, modified to decode entire packet. Works for 1/2 rate coded data.
|
# From dmr_utils3, modified to decode entire packet. Works for 1/2 rate coded data.
|
||||||
@ -249,16 +252,18 @@ def process_sms(_rf_src, sms):
|
|||||||
elif '@BB' in sms:
|
elif '@BB' in sms:
|
||||||
dashboard_bb_write(get_alias(int_id(_rf_src), subscriber_ids), int_id(_rf_src), strftime('%H:%M:%S - %m/%d/%y'), re.sub('@BB|@BB ','',sms))
|
dashboard_bb_write(get_alias(int_id(_rf_src), subscriber_ids), int_id(_rf_src), strftime('%H:%M:%S - %m/%d/%y'), re.sub('@BB|@BB ','',sms))
|
||||||
elif '@' and ' E-' in sms:
|
elif '@' and ' E-' in sms:
|
||||||
email_message = re.sub('.*@|.* E-', '', sms)
|
email_message = str(re.sub('.*@|.* E-', '', sms))
|
||||||
to_email = re.sub(' E-.*', '', sms)
|
to_email = str(re.sub(' E-.*', '', sms))
|
||||||
email_subject = 'New message from ' + str(get_alias(int_id(_rf_src), subscriber_ids))
|
email_subject = 'New message from ' + str(get_alias(int_id(_rf_src), subscriber_ids))
|
||||||
logger.info('Email to: ' + to_email)
|
logger.info('Email to: ' + to_email)
|
||||||
logger.info('Message: ' + email_message)
|
logger.info('Message: ' + email_message)
|
||||||
try:
|
try:
|
||||||
send_email(to_email, email_subject, email_message)
|
send_email(to_email, email_subject, email_message)
|
||||||
logger.info('Email sent.')
|
logger.info('Email sent.')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Failed to send email.')
|
logger.info('Failed to send email.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
elif '@MH' in sms:
|
elif '@MH' in sms:
|
||||||
grid_square = re.sub('@MH ', '', sms)
|
grid_square = re.sub('@MH ', '', sms)
|
||||||
if len(grid_square) < 6:
|
if len(grid_square) < 6:
|
||||||
@ -313,8 +318,10 @@ def process_sms(_rf_src, sms):
|
|||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
#logger.info('Sent manual position to APRS')
|
#logger.info('Sent manual position to APRS')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Exception. Not uploaded')
|
logger.info('Exception. Not uploaded')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
|
|
||||||
|
|
||||||
@ -334,15 +341,19 @@ def process_sms(_rf_src, sms):
|
|||||||
aprslib.parse(aprs_msg_pkt)
|
aprslib.parse(aprs_msg_pkt)
|
||||||
aprs_send(aprs_msg_pkt)
|
aprs_send(aprs_msg_pkt)
|
||||||
#logger.info('Packet sent.')
|
#logger.info('Packet sent.')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error uploading MSG packet.')
|
logger.info('Error uploading MSG packet.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
try:
|
try:
|
||||||
if sms in cmd_list:
|
if sms in cmd_list:
|
||||||
logger.info('Executing command/script.')
|
logger.info('Executing command/script.')
|
||||||
os.popen(cmd_list[sms]).read()
|
os.popen(cmd_list[sms]).read()
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Exception. Command possibly not in list, or other error.')
|
logger.info('Exception. Command possibly not in list, or other error.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
@ -1486,8 +1497,10 @@ class routerHBP(HBSYSTEM):
|
|||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
#logger.info('Sent APRS packet')
|
#logger.info('Sent APRS packet')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error. Failed to send packet. Packet may be malformed.')
|
logger.info('Error. Failed to send packet. Packet may be malformed.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
udt_block = 1
|
udt_block = 1
|
||||||
hdr_type = ''
|
hdr_type = ''
|
||||||
else:
|
else:
|
||||||
@ -1572,9 +1585,11 @@ class routerHBP(HBSYSTEM):
|
|||||||
logger.info('User comment: ' + comment)
|
logger.info('User comment: ' + comment)
|
||||||
logger.info('User SSID: ' + ssid)
|
logger.info('User SSID: ' + ssid)
|
||||||
logger.info('User icon: ' + icon_table + icon_icon)
|
logger.info('User icon: ' + icon_table + icon_icon)
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error or user settings file not found, proceeding with default settings.')
|
logger.info('Error or user settings file not found, proceeding with default settings.')
|
||||||
aprs_loc_packet = str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + str(user_ssid) + '>APHBL3,TCPIP*:/' + str(datetime.datetime.utcnow().strftime("%H%M%Sh")) + str(loc.lat[0:7]) + str(loc.lat_dir) + '/' + str(loc.lon[0:8]) + str(loc.lon_dir) + '[' + str(round(loc.true_course)).zfill(3) + '/' + str(round(loc.spd_over_grnd)).zfill(3) + '/' + aprs_comment + ' DMR ID: ' + str(int_id(_rf_src))
|
aprs_loc_packet = str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + str(user_ssid) + '>APHBL3,TCPIP*:/' + str(datetime.datetime.utcnow().strftime("%H%M%Sh")) + str(loc.lat[0:7]) + str(loc.lat_dir) + '/' + str(loc.lon[0:8]) + str(loc.lon_dir) + '[' + str(round(loc.true_course)).zfill(3) + '/' + str(round(loc.spd_over_grnd)).zfill(3) + '/' + aprs_comment + ' DMR ID: ' + str(int_id(_rf_src))
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
try:
|
try:
|
||||||
# Try parse of APRS packet. If it fails, it will not upload to APRS-IS
|
# Try parse of APRS packet. If it fails, it will not upload to APRS-IS
|
||||||
aprslib.parse(aprs_loc_packet)
|
aprslib.parse(aprs_loc_packet)
|
||||||
@ -1583,8 +1598,10 @@ class routerHBP(HBSYSTEM):
|
|||||||
float(loc.lon)
|
float(loc.lon)
|
||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, str(loc.lat[0:7]) + str(loc.lat_dir), str(loc.lon[0:8]) + str(loc.lon_dir), strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, str(loc.lat[0:7]) + str(loc.lat_dir), str(loc.lon[0:8]) + str(loc.lon_dir), strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Failed to parse packet. Packet may be deformed. Not uploaded.')
|
logger.info('Failed to parse packet. Packet may be deformed. Not uploaded.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
#final_packet = ''
|
#final_packet = ''
|
||||||
# Get callsign based on DMR ID
|
# Get callsign based on DMR ID
|
||||||
# End APRS-IS upload
|
# End APRS-IS upload
|
||||||
@ -1812,6 +1829,7 @@ if __name__ == '__main__':
|
|||||||
systems[system] = routerHBP(system, CONFIG, report_server)
|
systems[system] = routerHBP(system, CONFIG, report_server)
|
||||||
reactor.listenUDP(CONFIG['SYSTEMS'][system]['PORT'], systems[system], interface=CONFIG['SYSTEMS'][system]['IP'])
|
reactor.listenUDP(CONFIG['SYSTEMS'][system]['PORT'], systems[system], interface=CONFIG['SYSTEMS'][system]['IP'])
|
||||||
logger.debug('(GLOBAL) %s instance created: %s, %s', CONFIG['SYSTEMS'][system]['MODE'], system, systems[system])
|
logger.debug('(GLOBAL) %s instance created: %s, %s', CONFIG['SYSTEMS'][system]['MODE'], system, systems[system])
|
||||||
|
aprs_upload(CONFIG)
|
||||||
|
|
||||||
def loopingErrHandle(failure):
|
def loopingErrHandle(failure):
|
||||||
logger.error('(GLOBAL) STOPPING REACTOR TO AVOID MEMORY LEAK: Unhandled error in timed loop.\n %s', failure)
|
logger.error('(GLOBAL) STOPPING REACTOR TO AVOID MEMORY LEAK: Unhandled error in timed loop.\n %s', failure)
|
||||||
|
44
gps_data.py
44
gps_data.py
@ -48,6 +48,7 @@ import const
|
|||||||
# The module needs logging logging, but handlers, etc. are controlled by the parent
|
# The module needs logging logging, but handlers, etc. are controlled by the parent
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
import traceback
|
||||||
|
|
||||||
# Other modules we need for data and GPS
|
# Other modules we need for data and GPS
|
||||||
from bitarray import bitarray
|
from bitarray import bitarray
|
||||||
@ -70,12 +71,12 @@ from gps_functions import cmd_list
|
|||||||
# Module for maidenhead grids
|
# Module for maidenhead grids
|
||||||
try:
|
try:
|
||||||
import maidenhead as mh
|
import maidenhead as mh
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error importing maidenhead module, make sure it is installed.')
|
logger.info('Error importing maidenhead module, make sure it is installed.')
|
||||||
# Module for sending email
|
# Module for sending email
|
||||||
try:
|
try:
|
||||||
import smtplib
|
import smtplib
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error importing smtplib module, make sure it is installed.')
|
logger.info('Error importing smtplib module, make sure it is installed.')
|
||||||
|
|
||||||
#Modules for APRS settings
|
#Modules for APRS settings
|
||||||
@ -92,6 +93,8 @@ __maintainer__ = 'Eric Craw, KF7EEL'
|
|||||||
__email__ = 'kf7eel@qsl.net'
|
__email__ = 'kf7eel@qsl.net'
|
||||||
__status__ = 'pre-alpha'
|
__status__ = 'pre-alpha'
|
||||||
|
|
||||||
|
# Known to work with: AT-D878
|
||||||
|
|
||||||
# Must have the following at line 1054 in bridge.py to forward group vcsbk, also there is a typo there:
|
# Must have the following at line 1054 in bridge.py to forward group vcsbk, also there is a typo there:
|
||||||
# self.group_received(_peer_id, _rf_src, _dst_id, _seq, _slot, _frame_type, _dtype_vseq, _stream_id, _data)
|
# self.group_received(_peer_id, _rf_src, _dst_id, _seq, _slot, _frame_type, _dtype_vseq, _stream_id, _data)
|
||||||
|
|
||||||
@ -101,7 +104,7 @@ __status__ = 'pre-alpha'
|
|||||||
# AT-D878 - Compressed UDP
|
# AT-D878 - Compressed UDP
|
||||||
# MD-380 - Unified Data Transport
|
# MD-380 - Unified Data Transport
|
||||||
hdr_type = ''
|
hdr_type = ''
|
||||||
btf = ''
|
btf = -1
|
||||||
ssid = ''
|
ssid = ''
|
||||||
|
|
||||||
# From dmr_utils3, modified to decode entire packet. Works for 1/2 rate coded data.
|
# From dmr_utils3, modified to decode entire packet. Works for 1/2 rate coded data.
|
||||||
@ -242,16 +245,18 @@ def process_sms(_rf_src, sms):
|
|||||||
elif '@BB' in sms:
|
elif '@BB' in sms:
|
||||||
dashboard_bb_write(get_alias(int_id(_rf_src), subscriber_ids), int_id(_rf_src), time.strftime('%H:%M:%S - %m/%d/%y'), re.sub('@BB|@BB ','',sms))
|
dashboard_bb_write(get_alias(int_id(_rf_src), subscriber_ids), int_id(_rf_src), time.strftime('%H:%M:%S - %m/%d/%y'), re.sub('@BB|@BB ','',sms))
|
||||||
elif '@' and ' E-' in sms:
|
elif '@' and ' E-' in sms:
|
||||||
email_message = re.sub('.*@|.* E-', '', sms)
|
email_message = str(re.sub('.*@|.* E-', '', sms))
|
||||||
to_email = re.sub(' E-.*', '', sms)
|
to_email = str(re.sub(' E-.*', '', sms))
|
||||||
email_subject = 'New message from ' + str(get_alias(int_id(_rf_src), subscriber_ids))
|
email_subject = 'New message from ' + str(get_alias(int_id(_rf_src), subscriber_ids))
|
||||||
logger.info('Email to: ' + to_email)
|
logger.info('Email to: ' + to_email)
|
||||||
logger.info('Message: ' + email_message)
|
logger.info('Message: ' + email_message)
|
||||||
try:
|
try:
|
||||||
send_email(to_email, email_subject, email_message)
|
send_email(to_email, email_subject, email_message)
|
||||||
logger.info('Email sent.')
|
logger.info('Email sent.')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Failed to send email.')
|
logger.info('Failed to send email.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
elif '@MH' in sms:
|
elif '@MH' in sms:
|
||||||
grid_square = re.sub('@MH ', '', sms)
|
grid_square = re.sub('@MH ', '', sms)
|
||||||
if len(grid_square) < 6:
|
if len(grid_square) < 6:
|
||||||
@ -306,8 +311,10 @@ def process_sms(_rf_src, sms):
|
|||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, time.strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, time.strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
#logger.info('Sent manual position to APRS')
|
#logger.info('Sent manual position to APRS')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Exception. Not uploaded')
|
logger.info('Exception. Not uploaded')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
|
|
||||||
|
|
||||||
@ -327,16 +334,20 @@ def process_sms(_rf_src, sms):
|
|||||||
aprslib.parse(aprs_msg_pkt)
|
aprslib.parse(aprs_msg_pkt)
|
||||||
aprs_send(aprs_msg_pkt)
|
aprs_send(aprs_msg_pkt)
|
||||||
#logger.info('Packet sent.')
|
#logger.info('Packet sent.')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error uploading MSG packet.')
|
logger.info('Error uploading MSG packet.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
try:
|
try:
|
||||||
if sms in cmd_list:
|
if sms in cmd_list:
|
||||||
logger.info('Executing command/script.')
|
logger.info('Executing command/script.')
|
||||||
os.popen(cmd_list[sms]).read()
|
os.popen(cmd_list[sms]).read()
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Exception. Command possibly not in list, or other error.')
|
logger.info('Exception. Command possibly not in list, or other error.')
|
||||||
packet_assembly = ''
|
packet_assembly = ''
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -434,8 +445,10 @@ class DATA_SYSTEM(HBSYSTEM):
|
|||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, time.strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, aprs_lat, aprs_lon, time.strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
#logger.info('Sent APRS packet')
|
#logger.info('Sent APRS packet')
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error. Failed to send packet. Packet may be malformed.')
|
logger.info('Error. Failed to send packet. Packet may be malformed.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
udt_block = 1
|
udt_block = 1
|
||||||
hdr_type = ''
|
hdr_type = ''
|
||||||
else:
|
else:
|
||||||
@ -459,7 +472,7 @@ class DATA_SYSTEM(HBSYSTEM):
|
|||||||
btf = btf - 1
|
btf = btf - 1
|
||||||
logger.info('Block #: ' + str(btf))
|
logger.info('Block #: ' + str(btf))
|
||||||
#logger.info(_seq)
|
#logger.info(_seq)
|
||||||
logger.info('Data block from ' + str(get_alias(int_id(_rf_src), subscriber_ids)) + '. DMR ID: ' + str(int_id(_rf_src)) + '. Destination: ' + str(int_id(_dst_id)))
|
logger.info('Data block from ' + str(get_alias(int_id(_rf_src), subscriber_ids)) + '. DMR ID: ' + str(int_id(_rf_src)))
|
||||||
logger.info(ahex(bptc_decode(_data)))
|
logger.info(ahex(bptc_decode(_data)))
|
||||||
if _seq == 0:
|
if _seq == 0:
|
||||||
n_packet_assembly = 0
|
n_packet_assembly = 0
|
||||||
@ -474,7 +487,6 @@ class DATA_SYSTEM(HBSYSTEM):
|
|||||||
if btf == 0:
|
if btf == 0:
|
||||||
final_packet = str(bitarray(re.sub("\)|\(|bitarray|'", '', packet_assembly)).tobytes().decode('utf-8', 'ignore'))
|
final_packet = str(bitarray(re.sub("\)|\(|bitarray|'", '', packet_assembly)).tobytes().decode('utf-8', 'ignore'))
|
||||||
sms_hex = str(ba2hx(bitarray(re.sub("\)|\(|bitarray|'", '', packet_assembly))))
|
sms_hex = str(ba2hx(bitarray(re.sub("\)|\(|bitarray|'", '', packet_assembly))))
|
||||||
|
|
||||||
sms_hex_string = re.sub("b'|'", '', str(sms_hex))
|
sms_hex_string = re.sub("b'|'", '', str(sms_hex))
|
||||||
#NMEA GPS sentence
|
#NMEA GPS sentence
|
||||||
if '$GPRMC' in final_packet or '$GNRMC' in final_packet:
|
if '$GPRMC' in final_packet or '$GNRMC' in final_packet:
|
||||||
@ -521,9 +533,11 @@ class DATA_SYSTEM(HBSYSTEM):
|
|||||||
logger.info('User comment: ' + comment)
|
logger.info('User comment: ' + comment)
|
||||||
logger.info('User SSID: ' + ssid)
|
logger.info('User SSID: ' + ssid)
|
||||||
logger.info('User icon: ' + icon_table + icon_icon)
|
logger.info('User icon: ' + icon_table + icon_icon)
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Error or user settings file not found, proceeding with default settings.')
|
logger.info('Error or user settings file not found, proceeding with default settings.')
|
||||||
aprs_loc_packet = str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + str(user_ssid) + '>APHBL3,TCPIP*:/' + str(datetime.datetime.utcnow().strftime("%H%M%Sh")) + str(loc.lat[0:7]) + str(loc.lat_dir) + '/' + str(loc.lon[0:8]) + str(loc.lon_dir) + '[' + str(round(loc.true_course)).zfill(3) + '/' + str(round(loc.spd_over_grnd)).zfill(3) + '/' + aprs_comment + ' DMR ID: ' + str(int_id(_rf_src))
|
aprs_loc_packet = str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + str(user_ssid) + '>APHBL3,TCPIP*:/' + str(datetime.datetime.utcnow().strftime("%H%M%Sh")) + str(loc.lat[0:7]) + str(loc.lat_dir) + '/' + str(loc.lon[0:8]) + str(loc.lon_dir) + '[' + str(round(loc.true_course)).zfill(3) + '/' + str(round(loc.spd_over_grnd)).zfill(3) + '/' + aprs_comment + ' DMR ID: ' + str(int_id(_rf_src))
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
try:
|
try:
|
||||||
# Try parse of APRS packet. If it fails, it will not upload to APRS-IS
|
# Try parse of APRS packet. If it fails, it will not upload to APRS-IS
|
||||||
aprslib.parse(aprs_loc_packet)
|
aprslib.parse(aprs_loc_packet)
|
||||||
@ -532,8 +546,10 @@ class DATA_SYSTEM(HBSYSTEM):
|
|||||||
float(loc.lon)
|
float(loc.lon)
|
||||||
aprs_send(aprs_loc_packet)
|
aprs_send(aprs_loc_packet)
|
||||||
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, str(loc.lat[0:7]) + str(loc.lat_dir), str(loc.lon[0:8]) + str(loc.lon_dir), time.strftime('%H:%M:%S - %m/%d/%y'))
|
dashboard_loc_write(str(get_alias(int_id(_rf_src), subscriber_ids)) + '-' + ssid, str(loc.lat[0:7]) + str(loc.lat_dir), str(loc.lon[0:8]) + str(loc.lon_dir), time.strftime('%H:%M:%S - %m/%d/%y'))
|
||||||
except:
|
except Exception as error_exception:
|
||||||
logger.info('Failed to parse packet. Packet may be deformed. Not uploaded.')
|
logger.info('Failed to parse packet. Packet may be deformed. Not uploaded.')
|
||||||
|
logger.info(error_exception)
|
||||||
|
logger.info(str(traceback.extract_tb(error_exception.__traceback__)))
|
||||||
#final_packet = ''
|
#final_packet = ''
|
||||||
# Get callsign based on DMR ID
|
# Get callsign based on DMR ID
|
||||||
# End APRS-IS upload
|
# End APRS-IS upload
|
||||||
|
@ -3,7 +3,8 @@ bitarray>=0.8.1
|
|||||||
Twisted>=16.3.0
|
Twisted>=16.3.0
|
||||||
dmr_utils3>=0.1.19
|
dmr_utils3>=0.1.19
|
||||||
configparser>=3.0.0
|
configparser>=3.0.0
|
||||||
aprslib
|
aprslib>=0.6.42
|
||||||
|
traceback
|
||||||
pynmea2
|
pynmea2
|
||||||
maidenhead
|
maidenhead
|
||||||
flask
|
flask
|
||||||
|
Loading…
Reference in New Issue
Block a user