Temporary fix until merge

This commit is contained in:
Steve N4IRS 2017-06-19 06:29:02 -04:00
parent 6aee60c84e
commit bf1b2189a1
3 changed files with 390 additions and 132 deletions

View File

@ -25,11 +25,17 @@
from __future__ import print_function from __future__ import print_function
# Full imports import ConfigParser
import argparse
import sys
import csv
import os
import logging import logging
import signal
import cPickle as pickle import cPickle as pickle
# Function Imports from logging.config import dictConfig
from hmac import new as hmac_new from hmac import new as hmac_new
from binascii import b2a_hex as ahex from binascii import b2a_hex as ahex
from binascii import a2b_hex as bhex from binascii import a2b_hex as bhex
@ -38,18 +44,15 @@ from socket import inet_ntoa as IPAddr
from socket import inet_aton as IPHexStr from socket import inet_aton as IPHexStr
from time import time from time import time
# Twisted Imports
from twisted.internet.protocol import DatagramProtocol, Factory, Protocol from twisted.internet.protocol import DatagramProtocol, Factory, Protocol
from twisted.protocols.basic import NetstringReceiver from twisted.protocols.basic import NetstringReceiver
from twisted.internet import reactor, task from twisted.internet import reactor, task
# Imports files in the dmrlink subdirectory (these things shouldn't change often)
from ipsc.ipsc_const import * from ipsc.ipsc_const import *
from ipsc.ipsc_mask import * from ipsc.ipsc_mask import *
from ipsc.reporting_const import * from dmrlink_config import build_config
from dmrlink_log import config_logging
# Imports from DMR Utilities package from dmr_utils.utils import hex_str_2, hex_str_3, hex_str_4, int_id
from dmr_utils.utils import hex_str_2, hex_str_3, hex_str_4, int_id, try_download, mk_id_dict
__author__ = 'Cortney T. Buffington, N0MJS' __author__ = 'Cortney T. Buffington, N0MJS'
@ -63,83 +66,37 @@ __email__ = 'n0mjs@me.com'
# Global variables used whether we are a module or __main__ # Global variables used whether we are a module or __main__
systems = {} systems = {}
# Timed loop used for reporting IPSC status # Timed loop used for reporting IPSC status
# #
# REPORT BASED ON THE TYPE SELECTED IN THE MAIN CONFIG FILE # REPORT BASED ON THE TYPE SELECTED IN THE MAIN CONFIG FILE
def config_reports(_config, _logger, _factory): def config_reports(_config):
if _config['REPORTS']['REPORT_NETWORKS'] == 'PRINT': if _config['REPORTS']['REPORT_NETWORKS'] == 'PICKLE':
def reporting_loop(_logger):
_logger.debug('Periodic Reporting Loop Started (PICKLE)')
try:
with open(_config['REPORTS']['REPORT_PATH']+'dmrlink_stats.pickle', 'wb') as file:
pickle.dump(_config['SYSTEMS'], file, 2)
file.close()
except IOError as detail:
_logger.error('I/O Error: %s', detail)
elif _config['REPORTS']['REPORT_NETWORKS'] == 'PRINT':
def reporting_loop(_logger): def reporting_loop(_logger):
_logger.debug('Periodic Reporting Loop Started (PRINT)') _logger.debug('Periodic Reporting Loop Started (PRINT)')
for system in _config['SYSTEMS']: for system in _config['SYSTEMS']:
print_master(_config, system) print_master(_config, system)
print_peer_list(_config, system) print_peer_list(_config, system)
reporting = task.LoopingCall(reporting_loop, _logger)
reporting.start(_config['REPORTS']['REPORT_INTERVAL'])
report_server = False
elif _config['REPORTS']['REPORT_NETWORKS'] == 'NETWORK': elif _config['REPORTS']['REPORT_NETWORKS'] == 'NETWORK':
def reporting_loop(_logger, _server): def reporting_loop(_logger, _server):
_logger.debug('Periodic Reporting Loop Started (NETWORK)') _logger.debug('Periodic Reporting Loop Started (NETWORK)')
_server.send_config() _server.send_config()
_logger.info('DMRlink TCP reporting server starting')
report_server = _factory(_config, _logger)
report_server.clients = []
reactor.listenTCP(_config['REPORTS']['REPORT_PORT'], report_server)
reporting = task.LoopingCall(reporting_loop, _logger, report_server)
reporting.start(_config['REPORTS']['REPORT_INTERVAL'])
else: else:
def reporting_loop(_logger): def reporting_loop(_logger):
_logger.debug('Periodic Reporting Loop Started (NULL)') _logger.debug('Periodic Reporting Loop Started (NULL)')
report_server = False
return report_server return reporting_loop
# ID ALIAS CREATION
# Download
def build_aliases(_config, _logger):
if _config['ALIASES']['TRY_DOWNLOAD'] == True:
# Try updating peer aliases file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'], _config['ALIASES']['PEER_URL'], _config['ALIASES']['STALE_TIME'])
_logger.info(result)
# Try updating subscriber aliases file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'], _config['ALIASES']['SUBSCRIBER_URL'], _config['ALIASES']['STALE_TIME'])
_logger.info(result)
# Make Dictionaries
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'])
if peer_ids:
_logger.info('ID ALIAS MAPPER: peer_ids dictionary is available')
subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'])
if subscriber_ids:
_logger.info('ID ALIAS MAPPER: subscriber_ids dictionary is available')
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'])
if talkgroup_ids:
_logger.info('ID ALIAS MAPPER: talkgroup_ids dictionary is available')
local_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['LOCAL_FILE'])
if local_ids:
_logger.info('ID ALIAS MAPPER: local_ids dictionary is available')
return(peer_ids, subscriber_ids, talkgroup_ids, local_ids)
# Make the IPSC systems from the config and the class used to build them.
#
def mk_ipsc_systems(_config, _logger, _systems, _ipsc, _report_server):
for system in _config['SYSTEMS']:
if _config['SYSTEMS'][system]['LOCAL']['ENABLED']:
_systems[system] = _ipsc(system, _config, _logger, _report_server)
reactor.listenUDP(_config['SYSTEMS'][system]['LOCAL']['PORT'], _systems[system], interface=_config['SYSTEMS'][system]['LOCAL']['IP'])
return _systems
# Process the MODE byte in registration/peer list packets for determining master and peer capabilities # Process the MODE byte in registration/peer list packets for determining master and peer capabilities
# #
@ -279,7 +236,7 @@ def print_master(_config, _network):
#************************************************ #************************************************
class IPSC(DatagramProtocol): class IPSC(DatagramProtocol):
def __init__(self, _name, _config, _logger, _report): def __init__(self, _name, _config, _logger):
# Housekeeping: create references to the configuration and status data for this IPSC instance. # Housekeeping: create references to the configuration and status data for this IPSC instance.
# Some configuration objects that are used frequently and have lengthy names are shortened # Some configuration objects that are used frequently and have lengthy names are shortened
@ -289,9 +246,7 @@ class IPSC(DatagramProtocol):
self._system = _name self._system = _name
self._CONFIG = _config self._CONFIG = _config
self._logger = _logger self._logger = _logger
self._report = _report
self._config = self._CONFIG['SYSTEMS'][self._system] self._config = self._CONFIG['SYSTEMS'][self._system]
self._rcm = self._CONFIG['REPORTS']['REPORT_RCM'] and self._report
# #
self._local = self._config['LOCAL'] self._local = self._config['LOCAL']
self._local_id = self._local['RADIO_ID'] self._local_id = self._local['RADIO_ID']
@ -365,13 +320,7 @@ class IPSC(DatagramProtocol):
else: else:
self._logger.warning('(%s) Peer De-Registration Requested for: %s, but we don\'t have a listing for this peer', self._system, int_id(_peerid)) self._logger.warning('(%s) Peer De-Registration Requested for: %s, but we don\'t have a listing for this peer', self._system, int_id(_peerid))
pass pass
# De-register ourselves from the IPSC
def de_register_self(self):
self._logger.info('(%s) De-Registering self from the IPSC system', self._system)
de_reg_req_pkt = self.hashed_packet(self._local['AUTH_KEY'], self.DE_REG_REQ_PKT)
self.send_to_ipsc(de_reg_req_pkt)
# Take a received peer list and the network it belongs to, process and populate the # Take a received peer list and the network it belongs to, process and populate the
# data structure in my_ipsc_config with the results, and return a simple list of peers. # data structure in my_ipsc_config with the results, and return a simple list of peers.
# #
@ -442,23 +391,15 @@ class IPSC(DatagramProtocol):
#************************************************ #************************************************
# CALLBACK FUNCTIONS FOR USER PACKET TYPES # CALLBACK FUNCTIONS FOR USER PACKET TYPES
#************************************************ #************************************************
# If RCM reporting and reporting is network-based in the global configuration,
# send the RCM packet to the monitoring server
def call_mon_status(self, _data): def call_mon_status(self, _data):
self._logger.debug('(%s) Repeater Call Monitor Origin Packet Received: %s', self._system, ahex(_data)) self._logger.debug('(%s) Repeater Call Monitor Origin Packet Received: %s', self._system, ahex(_data))
if self._rcm:
self._report.send_rcm(self._system + ','+ _data)
def call_mon_rpt(self, _data): def call_mon_rpt(self, _data):
self._logger.debug('(%s) Repeater Call Monitor Repeating Packet Received: %s', self._system, ahex(_data)) self._logger.debug('(%s) Repeater Call Monitor Repeating Packet Received: %s', self._system, ahex(_data))
if self._rcm:
self._report.send_rcm(self._system + ',' + _data)
def call_mon_nack(self, _data): def call_mon_nack(self, _data):
self._logger.debug('(%s) Repeater Call Monitor NACK Packet Received: %s', self._system, ahex(_data)) self._logger.debug('(%s) Repeater Call Monitor NACK Packet Received: %s', self._system, ahex(_data))
if self._rcm:
self._report.send_rcm(self._system + ',' + _data)
def xcmp_xnl(self, _data): def xcmp_xnl(self, _data):
self._logger.debug('(%s) XCMP/XNL Packet Received: %s', self._system, ahex(_data)) self._logger.debug('(%s) XCMP/XNL Packet Received: %s', self._system, ahex(_data))
@ -997,51 +938,45 @@ class IPSC(DatagramProtocol):
# Socket-based reporting section # Socket-based reporting section
# #
class report(NetstringReceiver): class report(NetstringReceiver):
def __init__(self, factory): def __init__(self):
self._factory = factory pass
def connectionMade(self): def connectionMade(self):
self._factory.clients.append(self) report_server.clients.append(self)
self._factory._logger.info('DMRlink reporting client connected: %s', self.transport.getPeer()) logger.info('DMRlink reporting client connected: %s', self.transport.getPeer())
def connectionLost(self, reason): def connectionLost(self, reason):
self._factory._logger.info('DMRlink reporting client disconnected: %s', self.transport.getPeer()) logger.info('DMRlink reporting client disconnected: %s', self.transport.getPeer())
self._factory.clients.remove(self) report_server.clients.remove(self)
def stringReceived(self, data): def stringReceived(self, data):
self.process_message(data) self.process_message(data)
def process_message(self, _message): def process_message(self, _message):
opcode = _message[:1] opcode = _message[:1]
if opcode == REPORT_OPCODES['CONFIG_REQ']: if opcode == REP_OPC['CONFIG_REQ']:
self._factory._logger.info('DMRlink reporting client sent \'CONFIG_REQ\': %s', self.transport.getPeer()) logger.info('DMRlink reporting client sent \'CONFIG_REQ\': %s', self.transport.getPeer())
self.send_config() self.send_config()
else: else:
print('got unknown opcode') print('got unknown opcode')
class reportFactory(Factory): class reportFactory(Factory):
def __init__(self, config, logger): def __init__(self):
self._config = config pass
self._logger = logger
def buildProtocol(self, addr): def buildProtocol(self, addr):
if (addr.host) in self._config['REPORTS']['REPORT_CLIENTS'] or '*' in self._config['REPORTS']['REPORT_CLIENTS']: if (addr.host) in CONFIG['REPORTS']['REPORT_CLIENTS']:
self._logger.debug('Permitting report server connection attempt from: %s:%s', addr.host, addr.port) return report()
return report(self)
else: else:
self._logger.error('Invalid report server connection attempt from: %s:%s', addr.host, addr.port)
return None return None
def send_clients(self, _message): def send_clients(self, _message):
for client in self.clients: for client in report_server.clients:
client.sendString(_message) client.sendString(_message)
def send_config(self): def send_config(self):
serialized = pickle.dumps(self._config['SYSTEMS'], protocol=pickle.HIGHEST_PROTOCOL) serialized = pickle.dumps(CONFIG['SYSTEMS'], protocol=pickle.HIGHEST_PROTOCOL)
self.send_clients(REPORT_OPCODES['CONFIG_SND']+serialized) self.send_clients(REP_OPC['CONFIG_SND']+serialized)
def send_rcm(self, _data):
self.send_clients(REPORT_OPCODES['RCM_SND']+_data)
#************************************************ #************************************************
@ -1049,13 +984,6 @@ class reportFactory(Factory):
#************************************************ #************************************************
if __name__ == '__main__': if __name__ == '__main__':
import argparse
import sys
import os
import signal
from ipsc.dmrlink_config import build_config
from ipsc.dmrlink_log import config_logging
# Change the current directory to the location of the application # Change the current directory to the location of the application
os.chdir(os.path.dirname(os.path.realpath(sys.argv[0]))) os.chdir(os.path.dirname(os.path.realpath(sys.argv[0])))
@ -1079,28 +1007,50 @@ if __name__ == '__main__':
if cli_args.LOG_HANDLERS: if cli_args.LOG_HANDLERS:
CONFIG['LOGGER']['LOG_HANDLERS'] = cli_args.LOG_HANDLERS CONFIG['LOGGER']['LOG_HANDLERS'] = cli_args.LOG_HANDLERS
logger = config_logging(CONFIG['LOGGER']) logger = config_logging(CONFIG['LOGGER'])
logger.info('DMRlink \'dmrlink.py\' (c) 2013 - 2017 N0MJS & the K0USY Group - SYSTEM STARTING...')
# Set signal handers so that we can gracefully exit if need be config_reports(CONFIG)
logger.info('DMRlink \'dmrlink.py\' (c) 2013 - 2015 N0MJS & the K0USY Group - SYSTEM STARTING...')
# Shut ourselves down gracefully with the IPSC peers.
def sig_handler(_signal, _frame): def sig_handler(_signal, _frame):
logger.info('*** DMRLINK IS TERMINATING WITH SIGNAL %s ***', str(_signal)) logger.info('*** DMRLINK IS TERMINATING WITH SIGNAL %s ***', str(_signal))
for system in systems:
systems[system].de_register_self()
reactor.stop()
for system in systems:
this_ipsc = systems[system]
logger.info('De-Registering from IPSC %s', system)
de_reg_req_pkt = this_ipsc.hashed_packet(this_ipsc._local['AUTH_KEY'], this_ipsc.DE_REG_REQ_PKT)
this_ipsc.send_to_ipsc(de_reg_req_pkt)
reactor.stop()
# Set signal handers so that we can gracefully exit if need be
for sig in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT]: for sig in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT]:
signal.signal(sig, sig_handler) signal.signal(sig, sig_handler)
# INITIALIZE THE REPORTING LOOP # INITIALIZE AN IPSC OBJECT (SELF SUSTAINING) FOR EACH CONFIGUED IPSC
report_server = config_reports(CONFIG, logger, reportFactory) for system in CONFIG['SYSTEMS']:
if CONFIG['SYSTEMS'][system]['LOCAL']['ENABLED']:
# Build ID Aliases systems[system] = IPSC(system, CONFIG, logger)
peer_ids, subscriber_ids, talkgroup_ids, local_ids = build_aliases(CONFIG, logger) reactor.listenUDP(CONFIG['SYSTEMS'][system]['LOCAL']['PORT'], systems[system], interface=CONFIG['SYSTEMS'][system]['LOCAL']['IP'])
# INITIALIZE THE REPORTING LOOP IF CONFIGURED
if CONFIG['REPORTS']['REPORT_NETWORKS'] == 'PRINT' or CONFIG['REPORTS']['REPORT_NETWORKS'] == 'PICKLE':
reporting_loop = config_reports(CONFIG)
reporting = task.LoopingCall(reporting_loop, logger)
reporting.start(CONFIG['REPORTS']['REPORT_INTERVAL'])
# INITIALIZE AN IPSC OBJECT (SELF SUSTAINING) FOR EACH CONFIGRUED IPSC # INITIALIZE THE NETWORK-BASED REPORTING SERVER
systems = mk_ipsc_systems(CONFIG, logger, systems, IPSC, report_server) elif CONFIG['REPORTS']['REPORT_NETWORKS'] == 'NETWORK':
logger.info('(confbridge.py) TCP reporting server starting')
from ipsc.reporting_const import REPORT_OPCODES as REP_OPC
# INITIALIZATION COMPLETE -- START THE REACTOR report_server = reportFactory()
report_server.clients = []
reactor.listenTCP(CONFIG['REPORTS']['REPORT_PORT'], reportFactory())
reporting_loop = config_reports(CONFIG)
reporting = task.LoopingCall(reporting_loop, logger, report_server)
reporting.start(CONFIG['REPORTS']['REPORT_INTERVAL'])
reactor.run() reactor.run()

221
dmrlink_config.py Executable file
View File

@ -0,0 +1,221 @@
#!/usr/bin/env python
#
###############################################################################
# Copyright (C) 2016 Cortney T. Buffington, N0MJS <n0mjs@me.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
###############################################################################
import ConfigParser
import sys
from socket import gethostbyname
# Does anybody read this stuff? There's a PEP somewhere that says I should do this.
__author__ = 'Cortney T. Buffington, N0MJS'
__copyright__ = 'Copyright (c) 2016 Cortney T. Buffington, N0MJS and the K0USY Group'
__license__ = 'GNU GPLv3'
__maintainer__ = 'Cort Buffington, N0MJS'
__email__ = 'n0mjs@me.com'
def build_config(_config_file):
config = ConfigParser.ConfigParser()
if not config.read(_config_file):
sys.exit('Configuration file \''+_config_file+'\' is not a valid configuration file! Exiting...')
CONFIG = {}
CONFIG['GLOBAL'] = {}
CONFIG['REPORTS'] = {}
CONFIG['LOGGER'] = {}
CONFIG['ALIASES'] = {}
CONFIG['SYSTEMS'] = {}
try:
for section in config.sections():
if section == 'GLOBAL':
CONFIG['GLOBAL'].update({
'PATH': config.get(section, 'PATH')
})
elif section == 'REPORTS':
CONFIG['REPORTS'].update({
'REPORT_NETWORKS': config.get(section, 'REPORT_NETWORKS'),
'REPORT_INTERVAL': config.getint(section, 'REPORT_INTERVAL'),
'REPORT_PATH': config.get(section, 'REPORT_PATH'),
'REPORT_PORT': config.get(section, 'REPORT_PORT'),
'REPORT_CLIENTS': config.get(section, 'REPORT_CLIENTS').split(','),
'PRINT_PEERS_INC_MODE': config.getboolean(section, 'PRINT_PEERS_INC_MODE'),
'PRINT_PEERS_INC_FLAGS': config.getboolean(section, 'PRINT_PEERS_INC_FLAGS')
})
if CONFIG['REPORTS']['REPORT_PORT']:
CONFIG['REPORTS']['REPORT_PORT'] = int(CONFIG['REPORTS']['REPORT_PORT'])
elif section == 'LOGGER':
CONFIG['LOGGER'].update({
'LOG_FILE': config.get(section, 'LOG_FILE'),
'LOG_HANDLERS': config.get(section, 'LOG_HANDLERS'),
'LOG_LEVEL': config.get(section, 'LOG_LEVEL'),
'LOG_NAME': config.get(section, 'LOG_NAME')
})
elif section == 'ALIASES':
CONFIG['ALIASES'].update({
'TRY_DOWNLOAD': config.getboolean(section, 'TRY_DOWNLOAD'),
'PATH': config.get(section, 'PATH'),
'PEER_FILE': config.get(section, 'PEER_FILE'),
'SUBSCRIBER_FILE': config.get(section, 'SUBSCRIBER_FILE'),
'TGID_FILE': config.get(section, 'TGID_FILE'),
'PEER_URL': config.get(section, 'PEER_URL'),
'SUBSCRIBER_URL': config.get(section, 'SUBSCRIBER_URL'),
'STALE_TIME': config.getint(section, 'STALE_DAYS') * 86400,
})
elif config.getboolean(section, 'ENABLED'):
CONFIG['SYSTEMS'].update({section: {'LOCAL': {}, 'MASTER': {}, 'PEERS': {}}})
CONFIG['SYSTEMS'][section]['LOCAL'].update({
# In case we want to keep config, but not actually connect to the network
'ENABLED': config.getboolean(section, 'ENABLED'),
# These items are used to create the MODE byte
'PEER_OPER': config.getboolean(section, 'PEER_OPER'),
'IPSC_MODE': config.get(section, 'IPSC_MODE'),
'TS1_LINK': config.getboolean(section, 'TS1_LINK'),
'TS2_LINK': config.getboolean(section, 'TS2_LINK'),
'MODE': '',
# These items are used to create the multi-byte FLAGS field
'AUTH_ENABLED': config.getboolean(section, 'AUTH_ENABLED'),
'CSBK_CALL': config.getboolean(section, 'CSBK_CALL'),
'RCM': config.getboolean(section, 'RCM'),
'CON_APP': config.getboolean(section, 'CON_APP'),
'XNL_CALL': config.getboolean(section, 'XNL_CALL'),
'XNL_MASTER': config.getboolean(section, 'XNL_MASTER'),
'DATA_CALL': config.getboolean(section, 'DATA_CALL'),
'VOICE_CALL': config.getboolean(section, 'VOICE_CALL'),
'MASTER_PEER': config.getboolean(section, 'MASTER_PEER'),
'FLAGS': '',
# Things we need to know to connect and be a peer in this IPSC
'RADIO_ID': hex(int(config.get(section, 'RADIO_ID')))[2:].rjust(8,'0').decode('hex'),
'IP': gethostbyname(config.get(section, 'IP')),
'PORT': config.getint(section, 'PORT'),
'ALIVE_TIMER': config.getint(section, 'ALIVE_TIMER'),
'MAX_MISSED': config.getint(section, 'MAX_MISSED'),
'AUTH_KEY': (config.get(section, 'AUTH_KEY').rjust(40,'0')).decode('hex'),
'GROUP_HANGTIME': config.getint(section, 'GROUP_HANGTIME'),
'NUM_PEERS': 0,
})
# Master means things we need to know about the master peer of the network
CONFIG['SYSTEMS'][section]['MASTER'].update({
'RADIO_ID': '\x00\x00\x00\x00',
'MODE': '\x00',
'MODE_DECODE': '',
'FLAGS': '\x00\x00\x00\x00',
'FLAGS_DECODE': '',
'STATUS': {
'CONNECTED': False,
'PEER_LIST': False,
'KEEP_ALIVES_SENT': 0,
'KEEP_ALIVES_MISSED': 0,
'KEEP_ALIVES_OUTSTANDING': 0,
'KEEP_ALIVES_RECEIVED': 0,
'KEEP_ALIVE_RX_TIME': 0
},
'IP': '',
'PORT': ''
})
if not CONFIG['SYSTEMS'][section]['LOCAL']['MASTER_PEER']:
CONFIG['SYSTEMS'][section]['MASTER'].update({
'IP': gethostbyname(config.get(section, 'MASTER_IP')),
'PORT': config.getint(section, 'MASTER_PORT')
})
# Temporary locations for building MODE and FLAG data
MODE_BYTE = 0
FLAG_1 = 0
FLAG_2 = 0
# Construct and store the MODE field
if CONFIG['SYSTEMS'][section]['LOCAL']['PEER_OPER']:
MODE_BYTE |= 1 << 6
if CONFIG['SYSTEMS'][section]['LOCAL']['IPSC_MODE'] == 'ANALOG':
MODE_BYTE |= 1 << 4
elif CONFIG['SYSTEMS'][section]['LOCAL']['IPSC_MODE'] == 'DIGITAL':
MODE_BYTE |= 1 << 5
if CONFIG['SYSTEMS'][section]['LOCAL']['TS1_LINK']:
MODE_BYTE |= 1 << 3
else:
MODE_BYTE |= 1 << 2
if CONFIG['SYSTEMS'][section]['LOCAL']['TS2_LINK']:
MODE_BYTE |= 1 << 1
else:
MODE_BYTE |= 1 << 0
CONFIG['SYSTEMS'][section]['LOCAL']['MODE'] = chr(MODE_BYTE)
# Construct and store the FLAGS field
if CONFIG['SYSTEMS'][section]['LOCAL']['CSBK_CALL']:
FLAG_1 |= 1 << 7
if CONFIG['SYSTEMS'][section]['LOCAL']['RCM']:
FLAG_1 |= 1 << 6
if CONFIG['SYSTEMS'][section]['LOCAL']['CON_APP']:
FLAG_1 |= 1 << 5
if CONFIG['SYSTEMS'][section]['LOCAL']['XNL_CALL']:
FLAG_2 |= 1 << 7
if CONFIG['SYSTEMS'][section]['LOCAL']['XNL_CALL'] and CONFIG['SYSTEMS'][section]['LOCAL']['XNL_MASTER']:
FLAG_2 |= 1 << 6
elif CONFIG['SYSTEMS'][section]['LOCAL']['XNL_CALL'] and not CONFIG['SYSTEMS'][section]['LOCAL']['XNL_MASTER']:
FLAG_2 |= 1 << 5
if CONFIG['SYSTEMS'][section]['LOCAL']['AUTH_ENABLED']:
FLAG_2 |= 1 << 4
if CONFIG['SYSTEMS'][section]['LOCAL']['DATA_CALL']:
FLAG_2 |= 1 << 3
if CONFIG['SYSTEMS'][section]['LOCAL']['VOICE_CALL']:
FLAG_2 |= 1 << 2
if CONFIG['SYSTEMS'][section]['LOCAL']['MASTER_PEER']:
FLAG_2 |= 1 << 0
CONFIG['SYSTEMS'][section]['LOCAL']['FLAGS'] = '\x00\x00'+chr(FLAG_1)+chr(FLAG_2)
except ConfigParser.Error, err:
sys.exit('Could not parse configuration file, exiting...')
return CONFIG
# Used to run this file direclty and print the config,
# which might be useful for debugging
if __name__ == '__main__':
import sys
import os
import argparse
from pprint import pprint
# Change the current directory to the location of the application
os.chdir(os.path.dirname(os.path.realpath(sys.argv[0])))
# CLI argument parser - handles picking up the config file from the command line, and sending a "help" message
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', action='store', dest='CONFIG_FILE', help='/full/path/to/config.file (usually dmrlink.cfg)')
cli_args = parser.parse_args()
# Ensure we have a path for the config file, if one wasn't specified, then use the execution directory
if not cli_args.CONFIG_FILE:
cli_args.CONFIG_FILE = os.path.dirname(os.path.abspath(__file__))+'/dmrlink.cfg'
pprint(build_config(cli_args.CONFIG_FILE))

87
dmrlink_log.py Executable file
View File

@ -0,0 +1,87 @@
#!/usr/bin/env python
#
###############################################################################
# Copyright (C) 2016 Cortney T. Buffington, N0MJS <n0mjs@me.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
###############################################################################
import logging
from logging.config import dictConfig
# Does anybody read this stuff? There's a PEP somewhere that says I should do this.
__author__ = 'Cortney T. Buffington, N0MJS'
__copyright__ = 'Copyright (c) 2016 Cortney T. Buffington, N0MJS and the K0USY Group'
__license__ = 'GNU GPLv3'
__maintainer__ = 'Cort Buffington, N0MJS'
__email__ = 'n0mjs@me.com'
def config_logging(_logger):
dictConfig({
'version': 1,
'disable_existing_loggers': False,
'filters': {
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'timed': {
'format': '%(levelname)s %(asctime)s %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
'syslog': {
'format': '%(name)s (%(process)d): %(levelname)s %(message)s'
}
},
'handlers': {
'null': {
'class': 'logging.NullHandler'
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'console-timed': {
'class': 'logging.StreamHandler',
'formatter': 'timed'
},
'file': {
'class': 'logging.FileHandler',
'formatter': 'simple',
'filename': _logger['LOG_FILE'],
},
'file-timed': {
'class': 'logging.FileHandler',
'formatter': 'timed',
'filename': _logger['LOG_FILE'],
},
'syslog': {
'class': 'logging.handlers.SysLogHandler',
'formatter': 'syslog',
}
},
'loggers': {
_logger['LOG_NAME']: {
'handlers': _logger['LOG_HANDLERS'].split(','),
'level': _logger['LOG_LEVEL'],
'propagate': True,
}
}
})
return logging.getLogger(_logger['LOG_NAME'])