Merge branch 'master' into extrastats

extrastats
Simon 3 years ago
commit f7d4b816f1

@ -38,7 +38,7 @@ build-extrastats: # This job runs in the build stage, which runs first.
script:
- echo "Compiling the code..."
- cd docker-configs
- docker buildx build --no-cache -f Dockerfile-proxy -t hacknix/freedmr:extrastats-development-latest -t gitlab.hacknix.net:5050/hacknix/freedmr:extrastats-development-latest --platform linux/amd64 --push .
- docker buildx build --no-cache -f Dockerfile-proxy -t hacknix/freedmr:extrastats-development-latest -t gitlab.hacknix.net:5050/hacknix/freedmr:extrastats-development-latest --platform linux/arm64 --push .

@ -112,7 +112,10 @@ PEER_URL: https://www.radioid.net/static/rptrs.json
SUBSCRIBER_URL: https://www.radioid.net/static/users.json
TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subcriber_ids.json
STALE_DAYS: 7
STALE_DAYS: 1
SERVER_ID_URL: http://downloads.freedmr.uk/downloads/FreeDMR_Hosts.csv
SERVER_ID_FILE: server_ids.tsv
#Control server shared allstar instance via dial / AMI
[ALLSTAR]

@ -36,8 +36,11 @@ PEER_URL: https://www.radioid.net/static/rptrs.json
SUBSCRIBER_URL: https://www.radioid.net/static/users.json
TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subcriber_ids.json
STALE_DAYS: 7
STALE_DAYS: 1
SUB_MAP_FILE:
SERVER_ID_URL: http://downloads.freedmr.uk/downloads/FreeDMR_Hosts.csv
SERVER_ID_FILE: server_ids.tsv
#Control server shared allstar instance via dial / AMI
[ALLSTAR]

@ -941,7 +941,16 @@ if __name__ == '__main__':
signal.signal(sig, sig_handler)
# Create the name-number mapping dictionaries
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids = mk_aliases(CONFIG)
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids,server_ids = mk_aliases(CONFIG)
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'
subscriber_ids[4294967295] = 'SC'
CONFIG['_SUB_IDS'] = subscriber_ids
CONFIG['_PEER_IDS'] = peer_ids
CONFIG['_LOCAL_SUBSCRIBER_IDS'] = local_subscriber_ids
CONFIG['_SERVER_IDS'] = server_ids
# Import the ruiles file as a module, and create BRIDGES from it
spec = importlib.util.spec_from_file_location("module.name", cli_args.RULES_FILE)

@ -39,6 +39,7 @@ import importlib.util
import re
import copy
from setproctitle import setproctitle
from collections import deque
#from crccheck.crc import Crc32
from hashlib import blake2b
@ -64,10 +65,6 @@ from read_ambe import readAMBE
#Remap some words for certain languages
from i8n_voice_map import voiceMap
#MySQL
from mysql_config import useMYSQL
# Stuff for socket reporting
import pickle
# REMOVE LATER from datetime import datetime
@ -83,12 +80,9 @@ from binascii import b2a_hex as ahex
from AMI import AMI
##from hmac import new as hmac_new, compare_digest
##from hashlib import sha256, hash
# Does anybody read this stuff? There's a PEP somewhere that says I should do this.
__author__ = 'Cortney T. Buffington, N0MJS, Forked by Simon Adlem - G7RZU'
__copyright__ = 'Copyright (c) 2016-2019 Cortney T. Buffington, N0MJS and the K0USY Group, Simon Adlem, G7RZU 2020,2021'
__copyright__ = 'Copyright (c) 2016-2019 Cortney T. Buffington, N0MJS and the K0USY Group, Simon Adlem, G7RZU 2020,2021, 2022'
__credits__ = 'Colin Durbridge, G4EML, Steve Zingman, N4IRS; Mike Zingman, N4IRR; Jonathan Naylor, G4KLX; Hans Barthen, DL5DI; Torsten Shultze, DG1HT; Jon Lee, G4TSN; Norman Williams, M6NBP, Eric Craw KF7EEL'
__license__ = 'GNU GPLv3'
__maintainer__ = 'Simon Adlem G7RZU'
@ -230,12 +224,12 @@ def make_stat_bridge(_tgid):
def make_default_reflector(reflector,_tmout,system):
bridge = '#'+str(reflector)
bridge = ''.join(['#',str(reflector)])
#_tmout = CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']
if bridge not in BRIDGES:
BRIDGES[bridge] = []
make_single_reflector(bytes_3(reflector),_tmout, system)
bridgetemp = []
bridgetemp = deque()
for bridgesystem in BRIDGES[bridge]:
if bridgesystem['SYSTEM'] == system and bridgesystem['TS'] == 2:
bridgetemp.append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(9),'ACTIVE': True,'TIMEOUT': _tmout * 60,'TO_TYPE': 'OFF','OFF': [],'ON': [bytes_3(reflector),],'RESET': [], 'TIMER': time() + (_tmout * 60)})
@ -248,7 +242,7 @@ def make_static_tg(tg,ts,_tmout,system):
#_tmout = CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']
if str(tg) not in BRIDGES:
make_single_bridge(bytes_3(tg),system,ts,_tmout)
bridgetemp = []
bridgetemp = deque()
for bridgesystem in BRIDGES[str(tg)]:
if bridgesystem['SYSTEM'] == system and bridgesystem['TS'] == ts:
bridgetemp.append({'SYSTEM': system, 'TS': ts, 'TGID': bytes_3(tg),'ACTIVE': True,'TIMEOUT': _tmout * 60,'TO_TYPE': 'OFF','OFF': [],'ON': [bytes_3(tg),],'RESET': [], 'TIMER': time() + (_tmout * 60)})
@ -259,7 +253,7 @@ def make_static_tg(tg,ts,_tmout,system):
def reset_static_tg(tg,ts,_tmout,system):
#_tmout = CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']
bridgetemp = []
bridgetemp = deque()
try:
for bridgesystem in BRIDGES[str(tg)]:
if bridgesystem['SYSTEM'] == system and bridgesystem['TS'] == ts:
@ -273,12 +267,12 @@ def reset_static_tg(tg,ts,_tmout,system):
return
def reset_default_reflector(reflector,_tmout,system):
bridge = '#'+str(reflector)
bridge = ''.join(['#',str(reflector)])
#_tmout = CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']
if bridge not in BRIDGES:
BRIDGES[bridge] = []
make_single_reflector(bytes_3(reflector),_tmout, system)
bridgetemp = []
bridgetemp = deque()
for bridgesystem in BRIDGES[bridge]:
if bridgesystem['SYSTEM'] == system and bridgesystem['TS'] == 2:
bridgetemp.append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(9),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(reflector),],'RESET': [], 'TIMER': time() + (_tmout * 60)})
@ -288,7 +282,7 @@ def reset_default_reflector(reflector,_tmout,system):
def make_single_reflector(_tgid,_tmout,_sourcesystem):
_tgid_s = str(int_id(_tgid))
_bridge = '#' + _tgid_s
_bridge = ''.join(['#',_tgid_s])
#1 min timeout for echo
if _tgid_s == '9990':
_tmout = 1
@ -320,7 +314,7 @@ def remove_bridge_system(system):
def rule_timer_loop():
logger.debug('(ROUTER) routerHBP Rule timer loop started')
_now = time()
_remove_bridges = []
_remove_bridges = deque()
for _bridge in BRIDGES:
_bridge_used = False
for _system in BRIDGES[_bridge]:
@ -370,7 +364,7 @@ def rule_timer_loop():
def statTrimmer():
logger.debug('(ROUTER) STAT trimmer loop started')
_remove_bridges = []
_remove_bridges = deque()
for _bridge in BRIDGES:
_bridge_stat = False
_in_use = False
@ -402,18 +396,19 @@ def kaReporting():
#Write SUB_MAP to disk
def subMapWrite():
try:
_fh = open(CONFIG['ALIASES']['SUB_MAP_FILE'],'wb')
_fh = open(CONFIG['ALIASES']['PATH'] + CONFIG['ALIASES']['SUB_MAP_FILE'],'wb')
pickle.dump(SUB_MAP,_fh)
_fh.close()
logger.info('(SUBSCRIBER) Writing SUB_MAP to disk')
except:
logger.warning('(SUBSCRIBER) Cannot write SUB_MAP to file')
#Subscriber Map trimmer loop
def SubMapTrimmer():
logger.debug('(SUBSCRIBER) Subscriber Map trimmer loop started')
_sub_time = time()
_remove_list = []
_remove_list = deque()
for _subscriber in SUB_MAP:
if SUB_MAP[_subscriber][2] < (_sub_time - 86400):
_remove_list.append(_subscriber)
@ -423,8 +418,6 @@ def SubMapTrimmer():
if CONFIG['ALIASES']['SUB_MAP_FILE']:
subMapWrite()
# run this every 10 seconds to trim stream ids
def stream_trimmer_loop():
@ -467,8 +460,8 @@ def stream_trimmer_loop():
# OBP systems
# We can't delete items from a dicationry that's being iterated, so we have to make a temporarly list of entrys to remove later
if CONFIG['SYSTEMS'][system]['MODE'] == 'OPENBRIDGE':
remove_list = []
fin_list = []
remove_list = deque()
fin_list = deque()
for stream_id in systems[system].STATUS:
#if stream already marked as finished, just remove it
@ -531,7 +524,7 @@ def stream_trimmer_loop():
removed = systems[system].STATUS.pop(stream_id)
try:
_bcsq_remove = []
_bcsq_remove = deque()
for tgid in _sysconfig['_bcsq']:
if _sysconfig['_bcsq'][tgid] == stream_id:
_bcsq_remove.append(tgid)
@ -627,7 +620,7 @@ def playFileOnRequest(self,fileNumber):
sleep(1)
_say = []
try:
_say.append(AMBEobj.readSingleFile('/'+_lang+'/ondemand/'+str(fileNumber)+'.ambe'))
_say.append(AMBEobj.readSingleFile(''.join(['/',_lang,'/ondemand/',str(fileNumber),'.ambe'])))
except IOError:
logger.warning('(%s) cannot read file for number %s',system,fileNumber)
return
@ -652,20 +645,16 @@ def threadIdent():
logger.debug('(IDENT) starting ident thread')
reactor.callInThread(ident)
def threadedMysql():
logger.debug('(MYSQL) Starting MySQL thread')
reactor.callInThread(mysqlGetConfig)
def threadAlias():
logger.debug('(ALIAS) starting alias thread')
reactor.callInThread(aliasb)
def setAlias(_peer_ids,_subscriber_ids, _talkgroup_ids, _local_subscriber_ids):
peer_ids, subscriber_ids, talkgroup_ids,local_subscriber_ids = _peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids
def setAlias(_peer_ids,_subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids):
peer_ids, subscriber_ids, talkgroup_ids,local_subscriber_ids,server_ids = _peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids,_server_ids
def aliasb():
_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids = mk_aliases(CONFIG)
reactor.callFromThread(setAlias,_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids)
_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids = mk_aliases(CONFIG)
reactor.callFromThread(setAlias,_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids)
def ident():
for system in systems:
@ -684,11 +673,18 @@ def ident():
logger.debug("(IDENT) %s System has no peers or no recorded callsign (%s), skipping",system,_callsign)
continue
_slot = systems[system].STATUS[2]
#If slot is idle for RX and TX
#print("RX:"+str(_slot['RX_TYPE'])+" TX:"+str(_slot['TX_TYPE'])+" TIME:"+str(time() - _slot['TX_TIME']))
if (_slot['RX_TYPE'] == HBPF_SLT_VTERM) and (_slot['TX_TYPE'] == HBPF_SLT_VTERM) and (time() - _slot['TX_TIME'] > CONFIG['SYSTEMS'][system]['GROUP_HANGTIME']):
#_stream_id = hex_str_4(1234567)
logger.info('(%s) System idle. Sending voice ident',system)
#If slot is idle for RX and TX for over 30 seconds
if (_slot['RX_TYPE'] == HBPF_SLT_VTERM) and (_slot['TX_TYPE'] == HBPF_SLT_VTERM) and (time() - _slot['TX_TIME'] > 30 and time() - _slot['RX_TIME'] > 30):
_all_call = bytes_3(16777215)
_source_id= bytes_3(5000)
_dst_id = b''
if 'OVERRIDE_IDENT_TG' in CONFIG['SYSTEMS'][system] and CONFIG['SYSTEMS'][system]['OVERRIDE_IDENT_TG'] and int(CONFIG['SYSTEMS'][system]['OVERRIDE_IDENT_TG']) > 0 and int(CONFIG['SYSTEMS'][system]['OVERRIDE_IDENT_TG'] < 16777215):
_dst_id = bytes_3(CONFIG['SYSTEMS'][system]['OVERRIDE_IDENT_TG'])
else:
_dst_id = _all_call
logger.info('(%s) %s System idle. Sending voice ident to TG %s',system,_callsign,get_alias(_dst_id,talkgroup_ids))
_say = [words[_lang]['silence']]
_say.append(words[_lang]['silence'])
_say.append(words[_lang]['silence'])
@ -717,9 +713,9 @@ def ident():
#test
#_say.append(AMBEobj.readSingleFile('alpha.ambe'))
_all_call = bytes_3(16777215)
_source_id= bytes_3(5000)
speech = pkt_gen(_source_id, _all_call, bytes_4(16777215), 1, _say)
_peer_id = CONFIG['GLOBAL']['SERVER_ID']
speech = pkt_gen(_source_id, _dst_id, _peer_id, 1, _say)
sleep(1)
_slot = systems[system].STATUS[2]
@ -733,11 +729,15 @@ def ident():
_stream_id = pkt[16:20]
_pkt_time = time()
reactor.callFromThread(sendVoicePacket,systems[system],pkt,_source_id,_all_call,_slot)
reactor.callFromThread(sendVoicePacket,systems[system],pkt,_source_id,_dst_id,_slot)
def options_config():
logger.debug('(OPTIONS) Running options parser')
for _system in CONFIG['SYSTEMS']:
if '_reset' in CONFIG['SYSTEMS'][_system] and CONFIG['SYSTEMS'][_system]['_reset']:
logger.debug('(OPTIONS) Bridge reset for %s - no peers',_system)
remove_bridge_system(_system)
CONFIG['SYSTEMS'][_system]['_reset'] = False
try:
if CONFIG['SYSTEMS'][_system]['MODE'] != 'MASTER':
continue
@ -765,7 +765,13 @@ def options_config():
_options['TS1_STATIC'] = _options.pop('TS1')
if 'TS2' in _options:
_options['TS2_STATIC'] = _options.pop('TS2')
if 'IDENTTG' in _options:
_options['OVERRIDE_IDENT_TG'] = _options.pop('IDENTTG')
elif 'VOICETG' in _options:
_options['OVERRIDE_IDENT_TG'] = _options.pop('VOICETG')
if 'IDENT' in _options:
_options['VOICE'] = _options.pop('IDENT')
#DMR+ style options
if 'StartRef' in _options:
_options['DEFAULT_REFLECTOR'] = _options.pop('StartRef')
@ -774,39 +780,39 @@ def options_config():
if 'TS1_1' in _options:
_options['TS1_STATIC'] = _options.pop('TS1_1')
if 'TS1_2' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_2')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_2')])
if 'TS1_3' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_3')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_3')])
if 'TS1_4' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_4')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_4')])
if 'TS1_5' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_5')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_5')])
if 'TS1_6' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_6')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_6')])
if 'TS1_7' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_7')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_7')])
if 'TS1_8' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_8')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_8')])
if 'TS1_9' in _options:
_options['TS1_STATIC'] = _options['TS1_STATIC'] + ',' + _options.pop('TS1_9')
_options['TS1_STATIC'] = ''.join([_options['TS1_STATIC'],',',_options.pop('TS1_9')])
if 'TS2_1' in _options:
_options['TS2_STATIC'] = _options.pop('TS2_1')
if 'TS2_2' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_2')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',', _options.pop('TS2_2')])
if 'TS2_3' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_3')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_3')])
if 'TS2_4' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_4')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_4')])
if 'TS2_5' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_5')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_5')])
if 'TS2_6' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_6')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_6')])
if 'TS2_7' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_7')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_7')])
if 'TS2_8' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_8')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_8')])
if 'TS2_9' in _options:
_options['TS2_STATIC'] = _options['TS2_STATIC'] + ',' + _options.pop('TS2_9')
_options['TS2_STATIC'] = ''.join([_options['TS2_STATIC'],',',_options.pop('TS2_9')])
if 'UserLink' in _options:
_options.pop('UserLink')
@ -819,6 +825,9 @@ def options_config():
if 'DEFAULT_REFLECTOR' not in _options:
_options['DEFAULT_REFLECTOR'] = 0
if 'OVERRIDE_IDENT_TG' not in _options:
_options['OVERRIDE_IDENT_TG'] = False
if 'DEFAULT_UA_TIMER' not in _options:
_options['DEFAULT_UA_TIMER'] = CONFIG['SYSTEMS'][_system]['DEFAULT_UA_TIMER']
@ -827,10 +836,15 @@ def options_config():
CONFIG['SYSTEMS'][_system]['VOICE_IDENT'] = bool(int(_options['VOICE']))
logger.debug("(OPTIONS) %s - Setting voice ident to %s",_system,CONFIG['SYSTEMS'][_system]['VOICE_IDENT'])
if 'OVERRIDE_IDENT_TG' in _options and _options['OVERRIDE_IDENT_TG'] and (CONFIG['SYSTEMS'][_system]['OVERRIDE_IDENT_TG'] != int(_options['OVERRIDE_IDENT_TG'])):
CONFIG['SYSTEMS'][_system]['OVERRIDE_IDENT_TG'] = int(_options['OVERRIDE_IDENT_TG'])
logger.debug("(OPTIONS) %s - Setting OVERRIDE_IDENT_TG to %s",_system,CONFIG['SYSTEMS'][_system]['OVERRIDE_IDENT_TG'])
if 'LANG' in _options and _options['LANG'] in words and _options['LANG'] != CONFIG['SYSTEMS'][_system]['ANNOUNCEMENT_LANGUAGE'] :
CONFIG['SYSTEMS'][_system]['ANNOUNCEMENT_LANGUAGE'] = _options['LANG']
logger.debug("(OPTIONS) %s - Setting voice language to %s",_system,CONFIG['SYSTEMS'][_system]['ANNOUNCEMENT_LANGUAGE'])
if 'SINGLE' in _options and (CONFIG['SYSTEMS'][_system]['SINGLE_MODE'] != bool(int(_options['SINGLE']))):
CONFIG['SYSTEMS'][_system]['SINGLE_MODE'] = bool(int(_options['SINGLE']))
logger.debug("(OPTIONS) %s - Setting SINGLE_MODE to %s",_system,CONFIG['SYSTEMS'][_system]['SINGLE_MODE'])
@ -857,9 +871,14 @@ def options_config():
continue
if isinstance(_options['DEFAULT_REFLECTOR'], str) and not _options['DEFAULT_REFLECTOR'].isdigit():
logger.debug('(OPTIONS) %s - DEFAULT_UA_TIMER is not an integer, ignoring',_system)
logger.debug('(OPTIONS) %s - DEFAULT_REFLECTOR is not an integer, ignoring',_system)
continue
if isinstance(_options['OVERRIDE_IDENT_TG'], str) and not _options['OVERRIDE_IDENT_TG'].isdigit():
logger.debug('(OPTIONS) %s - OVERRIDE_IDENT_TG is not an integer, ignoring',_system)
continue
if isinstance(_options['DEFAULT_UA_TIMER'], str) and not _options['DEFAULT_UA_TIMER'].isdigit():
logger.debug('(OPTIONS) %s - DEFAULT_REFLECTOR is not an integer, ignoring',_system)
continue
@ -894,7 +913,8 @@ def options_config():
else:
logger.debug('(OPTIONS) %s default reflector disabled, updating',_system)
reset_default_reflector(int(_options['DEFAULT_REFLECTOR']),_tmout,_system)
ts1 = []
if _options['TS1_STATIC'] != CONFIG['SYSTEMS'][_system]['TS1_STATIC']:
_tmout = int(_options['DEFAULT_UA_TIMER'])
logger.debug('(OPTIONS) %s TS1 static TGs changed, updating',_system)
@ -906,23 +926,23 @@ def options_config():
continue
tg = int(tg)
reset_static_tg(tg,1,_tmout,_system)
ts1 = []
if _options['TS1_STATIC']:
ts1 = _options['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
if not tg or int(tg) == 0 or int(tg) >= 16777215 or tg == _options['DEFAULT_REFLECTOR']:
logger.debug('(OPTIONS) %s not setting TS1 Static %s. Bad TG or conflict with DIAL',_system,tg)
continue
tg = int(tg)
make_static_tg(tg,1,_tmout,_system)
ts2 = []
if _options['TS2_STATIC'] != CONFIG['SYSTEMS'][_system]['TS2_STATIC']:
_tmout = int(_options['DEFAULT_UA_TIMER'])
logger.debug('(OPTIONS) %s TS2 static TGs changed, updating',_system)
ts2 = []
if CONFIG['SYSTEMS'][_system]['TS2_STATIC']:
ts2 = CONFIG['SYSTEMS'][_system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
if not tg or int(tg) == 0 or int(tg) >= 16777215 or tg == _options['DEFAULT_REFLECTOR'] or (tg and ts1 and tg in ts1):
logger.debug('(OPTIONS) %s not setting TS2 Static %s. Bad TG or conflict with DIAL or TS1',_system,tg)
continue
tg = int(tg)
reset_static_tg(tg,2,_tmout,_system)
@ -930,7 +950,7 @@ def options_config():
if _options['TS2_STATIC']:
ts2 = _options['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
if not tg or int(tg) == 0 or int(tg) >= 16777215:
continue
tg = int(tg)
make_static_tg(tg,2,_tmout,_system)
@ -939,328 +959,10 @@ def options_config():
CONFIG['SYSTEMS'][_system]['TS2_STATIC'] = _options['TS2_STATIC']
CONFIG['SYSTEMS'][_system]['DEFAULT_REFLECTOR'] = int(_options['DEFAULT_REFLECTOR'])
CONFIG['SYSTEMS'][_system]['DEFAULT_UA_TIMER'] = int(_options['DEFAULT_UA_TIMER'])
except Exception:
logger.exception('(OPTIONS) caught exception:')
except Exception as e:
logger.exception('(OPTIONS) caught exception: %s',e)
continue
def mysqlGetConfig():
logger.debug('(MYSQL) Periodic config check')
SQLGETCONFIG = {}
if sql.con():
logger.debug('(MYSQL) reading config from database')
try:
SQLGETCONFIG = sql.getConfig()
except:
logger.debug('(MYSQL) problem with SQL query, aborting')
sql.close()
return
else:
logger.debug('(MYSQL) problem connecting to SQL server, aborting')
sql.close()
return
sql.close()
reactor.callFromThread(mysql_config_check,SQLGETCONFIG)
def mysql_config_check(SQLGETCONFIG):
SQLCONFIG = SQLGETCONFIG
for system in SQLGETCONFIG:
if system not in CONFIG['SYSTEMS']:
if SQLCONFIG[system]['ENABLED']:
logger.debug('(MYSQL) new enabled system %s, starting HBP listener',system)
CONFIG['SYSTEMS'][system] = SQLCONFIG[system]
systems[system] = routerHBP(system, CONFIG, report_server)
listeningPorts[system] = reactor.listenUDP(CONFIG['SYSTEMS'][system]['PORT'], systems[system], interface=CONFIG['SYSTEMS'][system]['IP'])
else:
logger.debug('(MYSQL) new disabled system %s',system)
_tmout = SQLCONFIG[system]['DEFAULT_UA_TIMER']
#Do ACL processing
# Subscriber and TGID ACLs
logger.debug('(MYSQL) building ACLs')
# Registration ACLs
SQLCONFIG[system]['REG_ACL'] = acl_build(SQLCONFIG[system]['REG_ACL'], PEER_MAX)
for acl in ['SUB_ACL', 'TG1_ACL', 'TG2_ACL']:
SQLCONFIG[system][acl] = acl_build(SQLCONFIG[system][acl], ID_MAX)
#Add system to bridges
if SQLCONFIG[system]['ENABLED']:
logger.debug('(MYSQL) adding new system to static bridges')
for _bridge in BRIDGES:
ts1 = False
ts2 = False
for i,e in enumerate(BRIDGES[_bridge]):
if e['SYSTEM'] == system and e['TS'] == 1:
ts1 = True
if e['SYSTEM'] == system and e['TS'] == 2:
ts2 = True
if _bridge[0:1] != '#':
if ts1 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 1, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
else:
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(9),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [bytes_3(4000)],'ON': [],'RESET': [], 'TIMER': time()})
if SQLCONFIG[system]['DEFAULT_REFLECTOR'] > 0:
logger.debug('(MYSQL) %s setting default reflector',system)
make_default_reflector(SQLCONFIG[system]['DEFAULT_REFLECTOR'],_tmout,system)
if SQLCONFIG[system]['TS1_STATIC']:
logger.debug('(MYSQL) %s setting static TGs on TS1',system)
ts1 = SQLCONFIG[system]['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,1,_tmout,system)
if SQLCONFIG[system]['TS2_STATIC']:
logger.debug('(MYSQL) %s setting static TGs on TS2',system)
ts2 = SQLCONFIG[system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,2,_tmout,system)
continue
#Preserve options line
if 'OPTIONS' in CONFIG['SYSTEMS'][system]:
SQLCONFIG[system]['OPTIONS'] = CONFIG['SYSTEMS'][system]['OPTIONS']
SQLCONFIG[system]['TS1_STATIC'] = CONFIG['SYSTEMS'][system]['TS1_STATIC']
SQLCONFIG[system]['TS2_STATIC'] = CONFIG['SYSTEMS'][system]['TS2_STATIC']
SQLCONFIG[system]['DEFAULT_UA_TIMER'] = CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']
SQLCONFIG[system]['DEFAULT_REFLECTOR'] = CONFIG['SYSTEMS'][system]['DEFAULT_REFLECTOR']
#logger.debug('(MYSQL) %s has HBP Options line - skipping',system)
#continue
if SQLCONFIG[system]['ENABLED'] == False and CONFIG['SYSTEMS'][system]['ENABLED'] == True:
logger.debug('(MYSQL) %s changed from enabled to disabled, killing HBP listener and removing from bridges',system)
systems[system].master_dereg()
if systems[system]._system_maintenance is not None and systems[system]._system_maintenance.running == True:
systems[system]._system_maintenance.stop()
systems[system]._system_maintenance = None
remove_bridge_system(system)
listeningPorts[system].stopListening()
if CONFIG['SYSTEMS'][system]['ENABLED'] == False and SQLCONFIG[system]['ENABLED'] == True:
logger.debug('(MYSQL) %s changed from disabled to enabled, starting HBP listener',system)
systems[system] = routerHBP(system, CONFIG, report_server)
listeningPorts[system] = reactor.listenUDP(CONFIG['SYSTEMS'][system]['PORT'], systems[system], interface=CONFIG['SYSTEMS'][system]['IP'])
logger.debug('(GLOBAL) %s instance created: %s, %s', CONFIG['SYSTEMS'][system]['MODE'], system, systems[system])
logger.debug('(MYSQL) adding new system to static bridges')
_tmout = SQLCONFIG[system]['DEFAULT_UA_TIMER']
for _bridge in BRIDGES:
ts1 = False
ts2 = False
for i,e in enumerate(BRIDGES[_bridge]):
if e['SYSTEM'] == system and e['TS'] == 1:
ts1 = True
if e['SYSTEM'] == system and e['TS'] == 2:
ts2 = True
if _bridge[0:1] != '#':
if ts1 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 1, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
else:
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(9),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [bytes_3(4000)],'ON': [],'RESET': [], 'TIMER': time()})
if SQLCONFIG[system]['DEFAULT_REFLECTOR'] > 0:
if 'OPTIONS' not in SQLCONFIG[system]:
logger.debug('(MYSQL) %s setting default reflector',system)
make_default_reflector(SQLCONFIG[system]['DEFAULT_REFLECTOR'],_tmout,system)
if SQLCONFIG[system]['TS1_STATIC']:
if 'OPTIONS' not in SQLCONFIG[system]:
logger.debug('(MYSQL) %s setting static TGs on TS1',system)
ts1 = SQLCONFIG[system]['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,1,_tmout,system)
if SQLCONFIG[system]['TS2_STATIC']:
logger.debug('(MYSQL) %s setting static TGs on TS2',system)
ts2 = SQLCONFIG[system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,2,_tmout,system)
if SQLCONFIG[system]['DEFAULT_UA_TIMER'] != CONFIG['SYSTEMS'][system]['DEFAULT_UA_TIMER']:
if 'OPTIONS' not in CONFIG['SYSTEMS'][system]:
logger.debug('(MYSQL) %s DEFAULT_UA_TIMER changed. Updating bridges.',system)
remove_bridge_system(system)
for _bridge in BRIDGES:
ts1 = False
ts2 = False
_tmout = CONFIG['SYSTEMS'][system][DEFAULT_UA_TIMER]
for i,e in enumerate(BRIDGES[_bridge]):
if e['SYSTEM'] == system and e['TS'] == 1:
ts1 = True
if e['SYSTEM'] == system and e['TS'] == 2:
ts2 = True
if _bridge[0:1] != '#':
if ts1 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 1, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(int(_bridge)),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [],'ON': [bytes_3(int(_bridge)),],'RESET': [], 'TIMER': time()})
else:
if ts2 == False:
BRIDGES[_bridge].append({'SYSTEM': system, 'TS': 2, 'TGID': bytes_3(9),'ACTIVE': False,'TIMEOUT': _tmout * 60,'TO_TYPE': 'ON','OFF': [bytes_3(4000)],'ON': [],'RESET': [], 'TIMER': time()})
if SQLCONFIG[system]['DEFAULT_REFLECTOR'] > 0:
# if 'OPTIONS' not in SQLCONFIG[system]:
logger.debug('(MYSQL) %s setting default reflector',system)
make_default_reflector(SQLCONFIG[system]['DEFAULT_REFLECTOR'],_tmout,system)
if SQLCONFIG[system]['TS1_STATIC']:
# if 'OPTIONS' not in SQLCONFIG[system]:
logger.debug('(MYSQL) %s setting static TGs on TS1',system)
ts1 = SQLCONFIG[system]['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,1,_tmout,system)
if SQLCONFIG[system]['TS2_STATIC']:
logger.debug('(MYSQL) %s setting static TGs on TS2',system)
ts2 = SQLCONFIG[system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,2,_tmout,system)
if SQLCONFIG[system]['IP'] != CONFIG['SYSTEMS'][system]['IP'] and CONFIG['SYSTEMS'][system]['ENABLED'] == True:
logger.debug('(MYSQL) %s IP binding changed on enabled system, killing HBP listener. Will restart in 1 minute',system)
systems[system].master_dereg()
if systems[system]._system_maintenance is not None and systems[system]._system_maintenance.running == True:
systems[system]._system_maintenance.stop()
systems[system]._system_maintenance = None
listeningPorts[system].stopListening()
SQLCONFIG[system]['ENABLED'] = False
if SQLCONFIG[system]['PORT'] != CONFIG['SYSTEMS'][system]['PORT'] and CONFIG['SYSTEMS'][system]['ENABLED'] == True:
logger.debug('(MYSQL) %s Port binding changed on enabled system, killing HBP listener. Will restart in 1 minute',system)
systems[system].master_dereg()
if systems[system]._system_maintenance is not None and systems[system]._system_maintenance.running == True:
systems[system]._system_maintenance.stop()
systems[system]._system_maintenance = None
listeningPorts[system].stopListening()
SQLCONFIG[system]['ENABLED'] = False
if SQLCONFIG[system]['MAX_PEERS'] != CONFIG['SYSTEMS'][system]['MAX_PEERS'] and CONFIG['SYSTEMS'][system]['ENABLED'] == True:
logger.debug('(MYSQL) %s MAX_PEERS changed on enabled system, killing HBP listener. Will restart in 1 minute',system)
systems[system].master_dereg()
if systems[system]._system_maintenance is not None and systems[system]._system_maintenance.running == True:
systems[system]._system_maintenance.stop()
systems[system]._system_maintenance = None
listeningPorts[system].stopListening()
SQLCONFIG[system]['ENABLED'] = False
if SQLCONFIG[system]['PASSPHRASE'] != CONFIG['SYSTEMS'][system]['PASSPHRASE'] and CONFIG['SYSTEMS'][system]['ENABLED'] == True:
logger.debug('(MYSQL) %s Passphrase changed on enabled system. Kicking peers',system)
systems[system].master_dereg()
if SQLCONFIG[system]['DEFAULT_REFLECTOR'] != CONFIG['SYSTEMS'][system]['DEFAULT_REFLECTOR']:
if 'OPTIONS' not in SQLCONFIG[system]:
_tmout = SQLCONFIG[system]['DEFAULT_UA_TIMER']
if SQLCONFIG[system]['DEFAULT_REFLECTOR'] > 0:
logger.debug('(MYSQL) %s default reflector changed, updating',system)
reset_default_reflector(CONFIG['SYSTEMS'][system]['DEFAULT_REFLECTOR'],_tmout,system)
make_default_reflector(SQLCONFIG[system]['DEFAULT_REFLECTOR'],_tmout,system)
else:
logger.debug('(MYSQL) %s default reflector disabled, updating',system)
reset_default_reflector(CONFIG['SYSTEMS'][system]['DEFAULT_REFLECTOR'],_tmout,system)
if SQLCONFIG[system]['TS1_STATIC'] != CONFIG['SYSTEMS'][system]['TS1_STATIC']:
if 'OPTIONS' not in CONFIG['SYSTEMS'][system]:
_tmout = SQLCONFIG[system]['DEFAULT_UA_TIMER']
logger.debug('(MYSQL) %s TS1 static TGs changed, updating',system)
ts1 = []
if CONFIG['SYSTEMS'][system]['TS1_STATIC']:
ts1 = CONFIG['SYSTEMS'][system]['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
continue
tg = int(tg)
reset_static_tg(tg,1,_tmout,system)
ts1 = []
if SQLCONFIG[system]['TS1_STATIC']:
ts1 = SQLCONFIG[system]['TS1_STATIC'].split(',')
for tg in ts1:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,1,_tmout,system)
if SQLCONFIG[system]['TS2_STATIC'] != CONFIG['SYSTEMS'][system]['TS2_STATIC']:
if 'OPTIONS' not in CONFIG['SYSTEMS'][system]:
_tmout = SQLCONFIG[system]['DEFAULT_UA_TIMER']
logger.debug('(MYSQL) %s TS2 static TGs changed, updating',system)
ts2 = []
if CONFIG['SYSTEMS'][system]['TS2_STATIC']:
ts2 = CONFIG['SYSTEMS'][system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
continue
tg = int(tg)
reset_static_tg(tg,2,_tmout,system)
ts2 = []
if SQLCONFIG[system]['TS2_STATIC']:
ts2 = SQLCONFIG[system]['TS2_STATIC'].split(',')
for tg in ts2:
if not tg:
continue
tg = int(tg)
make_static_tg(tg,2,_tmout,system)
if SQLCONFIG[system]['ANNOUNCEMENT_LANGUAGE'] != CONFIG['SYSTEMS'][system]['ANNOUNCEMENT_LANGUAGE']:
logger.debug('(MYSQL) %s announcement language changed to %s',system, SQLCONFIG[system]['ANNOUNCEMENT_LANGUAGE'])
#Rebuild ACLs
SQLCONFIG[system]['REG_ACL'] = acl_build(SQLCONFIG[system]['REG_ACL'], PEER_MAX)
SQLCONFIG[system]['SUB_ACL'] = acl_build(SQLCONFIG[system]['SUB_ACL'], ID_MAX)
SQLCONFIG[system]['TG1_ACL'] = acl_build(SQLCONFIG[system]['TG1_ACL'], ID_MAX)
SQLCONFIG[system]['TG2_ACL'] = acl_build(SQLCONFIG[system]['TG2_ACL'], ID_MAX)
if SQLCONFIG[system]['REG_ACL'] != CONFIG['SYSTEMS'][system]['REG_ACL']:
logger.debug('(MYSQL) registration ACL changed')
if SQLCONFIG[system]['SUB_ACL'] != CONFIG['SYSTEMS'][system]['SUB_ACL']:
logger.debug('(MYSQL) subscriber ACL changed')
if SQLCONFIG[system]['TG1_ACL'] != CONFIG['SYSTEMS'][system]['TG1_ACL']:
logger.debug('(MYSQL) TG1 ACL changed')
if SQLCONFIG[system]['TG2_ACL'] != CONFIG['SYSTEMS'][system]['TG2_ACL']:
logger.debug('(MYSQL) TG2 ACL changed')
#Preserve peers list
if system in CONFIG['SYSTEMS'] and CONFIG['SYSTEMS'][system]['ENABLED'] and 'PEERS' in CONFIG['SYSTEMS'][system] :
SQLCONFIG[system]['PEERS'] = CONFIG['SYSTEMS'][system]['PEERS']
CONFIG['SYSTEMS'][system].update(SQLCONFIG[system])
else:
CONFIG['SYSTEMS'][system].update(SQLCONFIG[system])
#Add MySQL config data to config dict
#CONFIG['SYSTEMS'].update(SQLCONFIG)
SQLCONFIG = {}
class routerOBP(OPENBRIDGE):
@ -1597,28 +1299,28 @@ class routerOBP(OPENBRIDGE):
if _dtype_vseq == 3:
logger.info('(%s) *UNIT CSBK* STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) DST_ID %s (%s), TS %s, SRC: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot, int_id(_source_server))
logger.info('(%s) *UNIT CSBK* STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) DST_ID %s (%s), TS %s, SRC: %s, RPTR: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot, int_id(_source_server),int_id(_source_rptr))
if CONFIG['REPORTS']['REPORT']:
self._report.send_bridgeEvent('UNIT CSBK,DATA,RX,{},{},{},{},{},{},{},{}'.format(self._system, int_id(_stream_id), int_id(_peer_id), int_id(_rf_src), _slot, int_id(_dst_id), int_id(_source_server), int_id(_source_rptr)).encode(encoding='utf-8', errors='ignore'))
elif _dtype_vseq == 6:
logger.info('(%s) *UNIT DATA HEADER* STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) DST_ID %s (%s), TS %s, SRC: %s', \
self._system, int_id(_stream_id),self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server))
logger.info('(%s) *UNIT DATA HEADER* STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) DST_ID %s (%s), TS %s, SRC: %s, RPTR: %s', \
self._system, int_id(_stream_id),self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server),int_id(_source_rptr))
if CONFIG['REPORTS']['REPORT']:
self._report.send_bridgeEvent('UNIT DATA HEADER,DATA,RX,{},{},{},{},{},{},{},{}'.format(self._system, int_id(_stream_id), int_id(_peer_id), int_id(_rf_src), _slot, int_id(_dst_id), int_id(_source_server), int_id(_source_rptr)).encode(encoding='utf-8', errors='ignore'))
elif _dtype_vseq == 7:
logger.info('(%s) *UNIT VCSBK 1/2 DATA BLOCK * STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot, int_id(_source_server))
logger.info('(%s) *UNIT VCSBK 1/2 DATA BLOCK * STREAM ID: %s, RPTR: %s SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s, RPTR: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot, int_id(_source_server),int_id(_source_rptr))
if CONFIG['REPORTS']['REPORT']:
self._report.send_bridgeEvent('UNIT VCSBK 1/2 DATA BLOCK,DATA,RX,{},{},{},{},{},{},{},{}'.format(self._system, int_id(_stream_id), int_id(_peer_id), int_id(_rf_src), _slot, int_id(_dst_id), int_id(_source_server), int_id(_source_rptr)).encode(encoding='utf-8', errors='ignore'))
elif _dtype_vseq == 8:
logger.info('(%s) *UNIT VCSBK 3/4 DATA BLOCK * STREAM ID: %s, RPTR: %s, SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server))
logger.info('(%s) *UNIT VCSBK 3/4 DATA BLOCK * STREAM ID: %s, RPTR: %s, SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s, RPTR: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server),int_id(_source_rptr))
if CONFIG['REPORTS']['REPORT']:
self._report.send_bridgeEvent('UNIT VCSBK 3/4 DATA BLOCK,DATA,RX,{},{},{},{},{},{},{},{}'.format(self._system, int_id(_stream_id), int_id(_peer_id), int_id(_rf_src), _slot, int_id(_dst_id), int_id(_source_server), int_id(_source_rptr)).encode(encoding='utf-8', errors='ignore'))
else:
logger.info('(%s) *UNKNOWN DATA TYPE* STREAM ID: %s, RPTR: %s, SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server))
logger.info('(%s) *UNKNOWN DATA TYPE* STREAM ID: %s, RPTR: %s, SUB: %s (%s) PEER: %s (%s) TGID %s (%s), TS %s, SRC: %s, RPTR: %s', \
self._system, int_id(_stream_id), self.get_rptr(_source_rptr), get_alias(_rf_src, subscriber_ids), int_id(_rf_src), get_alias(_peer_id, peer_ids), int_id(_peer_id), get_alias(_dst_id, talkgroup_ids), int_id(_dst_id), _slot,int_id(_source_server),int_id(_source_rptr))
#Send all data to DATA-GATEWAY if enabled and valid
if CONFIG['GLOBAL']['DATA_GATEWAY'] and 'DATA-GATEWAY' in CONFIG['SYSTEMS'] and CONFIG['SYSTEMS']['DATA-GATEWAY']['MODE'] == 'OPENBRIDGE' and CONFIG['SYSTEMS']['DATA-GATEWAY']['ENABLED']:
@ -1712,7 +1414,7 @@ class routerOBP(OPENBRIDGE):
# If we don't have a voice header then don't wait to decode the Embedded LC
# just make a new one from the HBP header. This is good enough, and it saves lots of time
else:
self.STATUS[_stream_id]['LC'] = LC_OPT + _dst_id + _rf_src
self.STATUS[_stream_id]['LC'] = b''.join([LC_OPT,_dst_id,_rf_src])
_inthops = 0
if _hops:
@ -1768,7 +1470,7 @@ class routerOBP(OPENBRIDGE):
hr_times = None
if not fi:
logger.warning("(%s) OBP *LoopControl* fi is empty for some reason : %s, STREAM ID: %s, TG: %s, TS: %s",self._system, int_id(_stream_id), int_id(_dst_id),_sysslot)
logger.warning("(%s) OBP *LoopControl* fi is empty for some reason : STREAM ID: %s, TG: %s, TS: %s",self._system, int_id(_stream_id), int_id(_dst_id),_sysslot)
return
if self._system != fi:
@ -1837,7 +1539,7 @@ class routerOBP(OPENBRIDGE):
logger.debug('(%s) Bridge for STAT TG %s does not exist. Creating',self._system, int_id(_dst_id))
make_stat_bridge(_dst_id)
_sysIgnore = []
_sysIgnore = deque()
for _bridge in BRIDGES:
for _system in BRIDGES[_bridge]:
@ -2060,7 +1762,7 @@ class routerHBP(HBSYSTEM):
_target_status[_target['TS']]['source_server'] = _source_server
_target_status[_target['TS']]['source_rptr'] = _source_rptr
# Generate LCs (full and EMB) for the TX stream
dst_lc = self.STATUS[_slot]['RX_LC'][0:3] + _target['TGID'] + _rf_src
dst_lc = b''.join([self.STATUS[_slot]['RX_LC'][0:3],_target['TGID'],_rf_src])
_target_status[_target['TS']]['TX_H_LC'] = bptc.encode_header_lc(dst_lc)
_target_status[_target['TS']]['TX_T_LC'] = bptc.encode_terminator_lc(dst_lc)
_target_status[_target['TS']]['TX_EMB_LC'] = bptc.encode_emblc(dst_lc)
@ -2126,6 +1828,8 @@ class routerHBP(HBSYSTEM):
def sendDataToOBP(self,_target,_data,dmrpkt,pkt_time,_stream_id,_dst_id,_peer_id,_rf_src,_bits,_slot,_hops = b'',_ber = b'\x00', _rssi = b'\x00',_source_server = b'\x00\x00\x00\x00', _source_rptr = b'\x00\x00\x00\x00'):
# _sysIgnore = sysIgnore
_source_server = self._CONFIG['GLOBAL']['SERVER_ID']
_source_rptr = _peer_id
_int_dst_id = int_id(_dst_id)
_target_status = systems[_target].STATUS
_target_system = self._CONFIG['SYSTEMS'][_target]
@ -2368,7 +2072,7 @@ class routerHBP(HBSYSTEM):
logger.info('(%s) Reflector: Private call from %s to %s',self._system, int_id(_rf_src), _int_dst_id)
if _int_dst_id >= 5 and _int_dst_id != 8 and _int_dst_id != 9 and _int_dst_id <= 999999:
_bridgename = '#'+ str(_int_dst_id)
_bridgename = ''.join(['#',str(_int_dst_id)])
if _bridgename not in BRIDGES and not (_int_dst_id >= 4000 and _int_dst_id <= 5000) and not (_int_dst_id >=9991 and _int_dst_id <= 9999):
logger.info('(%s) [A] Reflector for TG %s does not exist. Creating as User Activated. Timeout: %s',self._system, _int_dst_id,CONFIG['SYSTEMS'][self._system]['DEFAULT_UA_TIMER'])
make_single_reflector(_dst_id,CONFIG['SYSTEMS'][self._system]['DEFAULT_UA_TIMER'],self._system)
@ -2567,7 +2271,7 @@ class routerHBP(HBSYSTEM):
# If we don't have a voice header then don't wait to decode it from the Embedded LC
# just make a new one from the HBP header. This is good enough, and it saves lots of time
else:
self.STATUS[_slot]['RX_LC'] = LC_OPT + _dst_id + _rf_src
self.STATUS[_slot]['RX_LC'] = b''.join([LC_OPT,_dst_id,_rf_src])
#Create default bridge for unknown TG
if int_id(_dst_id) >= 5 and int_id(_dst_id) != 9 and int_id(_dst_id) != 4000 and int_id(_dst_id) != 5000 and (str(int_id(_dst_id)) not in BRIDGES):
@ -2622,7 +2326,7 @@ class routerHBP(HBSYSTEM):
self.STATUS[_slot]['LOOPLOG'] = True
self.STATUS[_slot]['LAST'] = pkt_time
if CONFIG['SYSTEMS'][self._system]['ENHANCED_OBP'] and '_bcsq' not in self.STATUS[_slot]:
if 'ENHANCED_OBP' in CONFIG['SYSTEMS'][self._system] and CONFIG['SYSTEMS'][self._system]['ENHANCED_OBP'] and '_bcsq' not in self.STATUS[_slot]:
systems[self._system].send_bcsq(_dst_id,_stream_id)
self.STATUS[_slot]['_bcsq'] = True
return
@ -2658,7 +2362,7 @@ class routerHBP(HBSYSTEM):
#Save this packet
self.STATUS[_slot]['lastData'] = _data
_sysIgnore = []
_sysIgnore = deque()
for _bridge in BRIDGES:
#if _bridge[0:1] != '#':
if True:
@ -2670,7 +2374,7 @@ class routerHBP(HBSYSTEM):
if _bridge[0:1] == '#':
_bridge = _bridge[1:]
else:
_bridge = '#'+_bridge
_bridge = ''.join(['#',_bridge])
if _bridge in BRIDGES:
_sysIgnore = self.to_target(_peer_id, _rf_src, _dst_id, _seq, _slot, _call_type, _frame_type, _dtype_vseq, _stream_id, _data, pkt_time, dmrpkt, _bits,_bridge,_system,False,_sysIgnore,_source_server,_ber,_rssi,_source_rptr)
@ -2794,12 +2498,12 @@ class bridgeReportFactory(reportFactory):
def send_bridge(self):
serialized = pickle.dumps(BRIDGES, protocol=2) #.decode("utf-8", errors='ignore')
self.send_clients(REPORT_OPCODES['BRIDGE_SND']+serialized)
self.send_clients(b''.join([REPORT_OPCODES['BRIDGE_SND'],serialized]))
def send_bridgeEvent(self, _data):
if isinstance(_data, str):
_data = _data.decode('utf-8', error='ignore')
self.send_clients(REPORT_OPCODES['BRDG_EVENT']+_data)
self.send_clients(b''.join([REPORT_OPCODES['BRDG_EVENT'],_data]))
#************************************************
@ -2835,7 +2539,22 @@ if __name__ == '__main__':
if not cli_args.CONFIG_FILE:
cli_args.CONFIG_FILE = os.path.dirname(os.path.abspath(__file__))+'/hblink.cfg'
# Call the external routine to build the configuration dictionary
#configP = False
#if os.path.isfile('config.pkl'):
#if os.path.getmtime('config.pkl') > (time() - 25):
#try:
#with open('config.pkl','rb') as _fh:
#CONFIG = pickle.load(_fh)
#print('(CONFIG) loaded config .pkl from previous shutdown')
#configP = True
#except:
#print('(CONFIG) Cannot load config.pkl file')
#CONFIG = config.build_config(cli_args.CONFIG_FILE)
#else:
#os.unlink("config.pkl")
#else:
CONFIG = config.build_config(cli_args.CONFIG_FILE)
# Ensure we have a path for the rules file, if one wasn't specified, then use the default (top of file)
@ -2846,40 +2565,15 @@ if __name__ == '__main__':
if cli_args.LOG_LEVEL:
CONFIG['LOGGER']['LOG_LEVEL'] = cli_args.LOG_LEVEL
logger = log.config_logging(CONFIG['LOGGER'])
logger.info('\n\nCopyright (c) 2020, 2021 Simon G7RZU simon@gb7fr.org.uk')
logger.info('\n\nCopyright (c) 2020, 2021, 2022 Simon G7RZU simon@gb7fr.org.uk')
logger.info('Copyright (c) 2013, 2014, 2015, 2016, 2018, 2019\n\tThe Regents of the K0USY Group. All rights reserved.\n')
logger.debug('(GLOBAL) Logging system started, anything from here on gets logged')
#If MySQL is enabled, read master config from MySQL too
if CONFIG['MYSQL']['USE_MYSQL'] == True:
logger.info('(MYSQL) MySQL config enabled')
SQLCONFIG = {}
sql = useMYSQL(CONFIG['MYSQL']['SERVER'], CONFIG['MYSQL']['USER'], CONFIG['MYSQL']['PASS'], CONFIG['MYSQL']['DB'],CONFIG['MYSQL']['TABLE'],logger)
#Run it once immediately
if sql.con():
logger.info('(MYSQL) reading config from database')
try:
SQLCONFIG = sql.getConfig()
#Add MySQL config data to config dict
except:
logger.warning('(MYSQL) problem with SQL query, aborting')
sql.close()
logger.debug('(MYSQL) building ACLs')
# Build ACLs
for system in SQLCONFIG:
SQLCONFIG[system]['REG_ACL'] = acl_build(SQLCONFIG[system]['REG_ACL'], PEER_MAX)
for acl in ['SUB_ACL', 'TG1_ACL', 'TG2_ACL']:
SQLCONFIG[system][acl] = acl_build(SQLCONFIG[system][acl], ID_MAX)
CONFIG['SYSTEMS'].update(SQLCONFIG)
else:
logger.warning('(MYSQL) problem connecting to SQL server, aborting')
if CONFIG['ALLSTAR']['ENABLED']:
logger.info('(AMI) Setting up AMI: Server: %s, Port: %s, User: %s, Pass: %s, Node: %s',CONFIG['ALLSTAR']['SERVER'],CONFIG['ALLSTAR']['PORT'],CONFIG['ALLSTAR']['USER'],CONFIG['ALLSTAR']['PASS'],CONFIG['ALLSTAR']['NODE'])
AMIOBJ = AMI(CONFIG['ALLSTAR']['SERVER'],CONFIG['ALLSTAR']['PORT'],CONFIG['ALLSTAR']['USER'],CONFIG['ALLSTAR']['PASS'],CONFIG['ALLSTAR']['NODE'])
if CONFIG['ALLSTAR']['ENABLED']:
logger.info('(AMI) Setting up AMI: Server: %s, Port: %s, User: %s, Pass: %s, Node: %s',CONFIG['ALLSTAR']['SERVER'],CONFIG['ALLSTAR']['PORT'],CONFIG['ALLSTAR']['USER'],CONFIG['ALLSTAR']['PASS'],CONFIG['ALLSTAR']['NODE'])
AMIOBJ = AMI(CONFIG['ALLSTAR']['SERVER'],CONFIG['ALLSTAR']['PORT'],CONFIG['ALLSTAR']['USER'],CONFIG['ALLSTAR']['PASS'],CONFIG['ALLSTAR']['NODE'])
# Set up the signal handler
@ -2896,7 +2590,7 @@ if __name__ == '__main__':
signal.signal(sig, sig_handler)
# Create the name-number mapping dictionaries
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids = mk_aliases(CONFIG)
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids = mk_aliases(CONFIG)
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'
@ -2905,6 +2599,7 @@ if __name__ == '__main__':
CONFIG['_SUB_IDS'] = subscriber_ids
CONFIG['_PEER_IDS'] = peer_ids
CONFIG['_LOCAL_SUBSCRIBER_IDS'] = local_subscriber_ids
CONFIG['_SERVER_IDS'] = server_ids
@ -2917,8 +2612,22 @@ if __name__ == '__main__':
except (ImportError, FileNotFoundError):
sys.exit('(ROUTER) TERMINATING: Routing bridges file not found or invalid: {}'.format(cli_args.RULES_FILE))
# Build the routing rules file
BRIDGES = make_bridges(rules_module.BRIDGES)
#Load pickle of bridges if it's less than 25 seconds old
#if os.path.isfile('bridge.pkl'):
#if os.path.getmtime('config.pkl') > (time() - 25):
#try:
#with open('bridge.pkl','rb') as _fh:
#BRIDGES = pickle.load(_fh)
#logger.info('(BRIDGE) loaded bridge.pkl from previous shutdown')
#except:
#logger.warning('(BRIDGE) Cannot load bridge.pkl file')
#BRIDGES = make_bridges(rules_module.BRIDGES)
#else:
#BRIDGES = make_bridges(rules_module.BRIDGES)
#os.unlink("bridge.pkl")
#else:
BRIDGES = make_bridges(rules_module.BRIDGES)
#Subscriber map for unit calls - complete with test entry
#SUB_MAP = {bytes_3(73578):('REP-1',1,time())}
@ -2927,7 +2636,7 @@ if __name__ == '__main__':
if CONFIG['ALIASES']['SUB_MAP_FILE']:
try:
with open(CONFIG['ALIASES']['SUB_MAP_FILE'],'rb') as _fh:
with open(CONFIG['ALIASES']['PATH'] + CONFIG['ALIASES']['SUB_MAP_FILE'],'rb') as _fh:
SUB_MAP = pickle.load(_fh)
except:
logger.warning('(SUBSCRIBER) Cannot load SUB_MAP file')
@ -2939,12 +2648,12 @@ if __name__ == '__main__':
#Generator
generator = {}
systemdelete = []
systemdelete = deque()
for system in CONFIG['SYSTEMS']:
if CONFIG['SYSTEMS'][system]['ENABLED']:
if CONFIG['SYSTEMS'][system]['MODE'] == 'MASTER' and (CONFIG['SYSTEMS'][system]['GENERATOR'] > 1):
for count in range(CONFIG['SYSTEMS'][system]['GENERATOR']):
_systemname = system+'-'+str(count)
_systemname = ''.join([system,'-',str(count)])
generator[_systemname] = copy.deepcopy(CONFIG['SYSTEMS'][system])
generator[_systemname]['PORT'] = generator[_systemname]['PORT'] + count
generator[_systemname]['_default_options'] = "TS1_STATIC={};TS2_STATIC={};SINGLE={};DEFAULT_UA_TIMER={};DEFAULT_REFLECTOR={};VOICE={};LANG={}".format(generator[_systemname]['TS1_STATIC'],generator[_systemname]['TS2_STATIC'],int(generator[_systemname]['SINGLE_MODE']),generator[_systemname]['DEFAULT_UA_TIMER'],generator[_systemname]['DEFAULT_REFLECTOR'],int(generator[_systemname]['VOICE_IDENT']), generator[_systemname]['ANNOUNCEMENT_LANGUAGE'])
@ -3065,13 +2774,6 @@ if __name__ == '__main__':
options_task = task.LoopingCall(options_config)
options = options_task.start(26)
options.addErrback(loopingErrHandle)
#Mysql config checker
#This runs in a thread so as not to block the reactor
if CONFIG['MYSQL']['USE_MYSQL'] == True:
mysql_task = task.LoopingCall(threadedMysql)
mysql = mysql_task.start(33)
mysql.addErrback(loopingErrHandle)
#STAT trimmer - once every hour (roughly - shifted so all timed tasks don't run at once
if CONFIG['GLOBAL']['GEN_STAT_BRIDGES']:

@ -128,7 +128,6 @@ def build_config(_config_file):
CONFIG['LOGGER'] = {}
CONFIG['ALIASES'] = {}
CONFIG['SYSTEMS'] = {}
CONFIG['MYSQL'] = {}
CONFIG['ALLSTAR'] = {}
try:
@ -147,7 +146,8 @@ def build_config(_config_file):
'ALLOW_NULL_PASSPHRASE': config.getboolean(section, 'ALLOW_NULL_PASSPHRASE'),
'ANNOUNCEMENT_LANGUAGES': config.get(section, 'ANNOUNCEMENT_LANGUAGES'),
'SERVER_ID': config.getint(section, 'SERVER_ID').to_bytes(4, 'big'),
'DATA_GATEWAY': config.getboolean(section, 'DATA_GATEWAY')
'DATA_GATEWAY': config.getboolean(section, 'DATA_GATEWAY'),
'VALIDATE_SERVER_IDS': config.getboolean(section, 'VALIDATE_SERVER_IDS')
})
if not CONFIG['GLOBAL']['ANNOUNCEMENT_LANGUAGES']:
@ -183,19 +183,12 @@ def build_config(_config_file):
'TGID_URL': config.get(section, 'TGID_URL'),
'STALE_TIME': config.getint(section, 'STALE_DAYS') * 86400,
'SUB_MAP_FILE': config.get(section, 'SUB_MAP_FILE'),
'LOCAL_SUBSCRIBER_FILE': config.get(section, 'LOCAL_SUBSCRIBER_FILE')
'LOCAL_SUBSCRIBER_FILE': config.get(section, 'LOCAL_SUBSCRIBER_FILE'),
'SERVER_ID_URL': config.get(section, 'SERVER_ID_URL'),
'SERVER_ID_FILE': config.get(section, 'SERVER_ID_FILE')
})
elif section == 'MYSQL':
CONFIG['MYSQL'].update({
'USE_MYSQL': config.getboolean(section, 'USE_MYSQL'),
'USER': config.get(section, 'USER'),
'PASS': config.get(section, 'PASS'),
'DB': config.get(section, 'DB'),
'SERVER': config.get(section, 'SERVER'),
'PORT': config.getint(section,'PORT'),
'TABLE': config.get(section, 'TABLE')
})
elif section == 'ALLSTAR':
CONFIG['ALLSTAR'].update({
@ -209,8 +202,6 @@ def build_config(_config_file):
elif section == 'PROXY':
pass
elif config.getboolean(section, 'ENABLED'):
if config.get(section, 'MODE') == 'PEER':
@ -332,7 +323,8 @@ def build_config(_config_file):
'GENERATOR': config.getint(section, 'GENERATOR'),
'ANNOUNCEMENT_LANGUAGE': config.get(section, 'ANNOUNCEMENT_LANGUAGE'),
'ALLOW_UNREG_ID': config.getboolean(section,'ALLOW_UNREG_ID'),
'PROXY_CONTROL' : config.getboolean(section,'PROXY_CONTROL')
'PROXY_CONTROL' : config.getboolean(section,'PROXY_CONTROL'),
'OVERRIDE_IDENT_TG': config.get(section, 'OVERRIDE_IDENT_TG')
}})
CONFIG['SYSTEMS'][section].update({'PEERS': {}})

@ -24,13 +24,11 @@ services:
mem_reservation: 600m
volumes:
- '/etc/freedmr/freedmr.cfg:/opt/freedmr/freedmr.cfg'
- '/var/log/freedmr/freedmr.log:/opt/freedmr/freedmr.log'
- '/var/log/freedmr/:/opt/freedmr/log/'
- '/etc/freedmr/rules.py:/opt/freedmr/rules.py'
#Write JSON files outside of container
- '/etc/freedmr/json/talkgroup_ids.json:/opt/freedmr/talkgroup_ids.json'
- '/etc/freedmr/json/subscriber_ids.json:/opt/freedmr/subscriber_ids.json'
- '/etc/freedmr/json/peer_ids.json:/opt/freedmr/peer_ids.json'
- '/etc/freedmr/json/sub_map.pkl:/opt/freedmr/sub_map.pkl'
- '/etc/freedmr/json/:/opt/freedmr/json/'
ports:
- '62031:62031/udp'
#Change the below to inlude ports used for your OBP(s)
@ -62,12 +60,9 @@ services:
volumes:
#This should be kept to a manageable size from
#cron or logrotate outisde of the container.
- '/var/log/FreeDMRmonitor/lastheard.log:/opt/FreeDMRmonitor/log/lastheard.log'
- '/var/log/FreeDMRmonitor/hbmon.log:/opt/FreeDMRmonitor/log/hbmon.log'
- '/var/log/FreeDMRmonitor/:/opt/FreeDMRmonitor/log/'
#Write JSON files outside of container
- '/etc/freedmr/json/talkgroup_ids.json:/opt/FreeDMRmonitor/talkgroup_ids.json'
- '/etc/freedmr/json/subscriber_ids.json:/opt/FreeDMRmonitor/subscriber_ids.json'
- '/etc/freedmr/json/peer_ids.json:/opt/FreeDMRmonitor/peer_ids.json'
- '/etc/freedmr/json/:/opt/FreeDMRmonitor/json/'
#Override config file
# - '/etc/freedmr/config.py:/opt/FreeDMRmonitor/config.py'

@ -37,14 +37,7 @@ chmod 755 /etc/freedmr &&
echo make json directory...
mkdir -p /etc/freedmr/json &&
echo get json files...
cd /etc/freedmr/json &&
curl http://downloads.freedmr.uk/downloads/local_subscriber_ids.json -o subscriber_ids.json &&
curl http://downloads.freedmr.uk/downloads/talkgroup_ids.json -o talkgroup_ids.json &&
curl https://www.radioid.net/static/rptrs.json -o peer_ids.json &&
touch /etc/freedmr/json/sub_map.pkl &&
chmod -R 777 /etc/freedmr/json &&
chown 54000:54000 /etc/freedmr/json &&
echo Install /etc/freedmr/freedmr.cfg ...
cat << EOF > /etc/freedmr/freedmr.cfg
@ -70,21 +63,21 @@ REPORT_PORT: 4321
REPORT_CLIENTS: *
[LOGGER]
LOG_FILE: freedmr.log
LOG_FILE: log/freedmr.log
LOG_HANDLERS: file-timed
LOG_LEVEL: INFO
LOG_NAME: FreeDMR
[ALIASES]
TRY_DOWNLOAD: False
PATH: ./
TRY_DOWNLOAD: True
PATH: ./json/
PEER_FILE: peer_ids.json
SUBSCRIBER_FILE: subscriber_ids.json
TGID_FILE: talkgroup_ids.json
PEER_URL: https://www.radioid.net/static/rptrs.json
SUBSCRIBER_URL: http://downloads.freedmr.uk/downloads/local_subscriber_ids.json
TGID_URL: TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
STALE_DAYS: 7
TGID_URL: TGID_URL: https://freedmr.cymru/talkgroups/talkgroup_ids_json.php
STALE_DAYS: 1
LOCAL_SUBSCRIBER_FILE: local_subcriber_ids.json
SUB_MAP_FILE: sub_map.pkl

@ -33,8 +33,9 @@ TGID_TS1_ACL: PERMIT:ALL
TGID_TS2_ACL: PERMIT:ALL
GEN_STAT_BRIDGES: True
ALLOW_NULL_PASSPHRASE: True
ANNOUNCEMENT_LANGUAGES: en_GB,en_GB_2,en_US,es_ES,es_ES_2,fr_FR,de_DE,dk_DK,it_IT,no_NO,pl_PL,se_SE
ANNOUNCEMENT_LANGUAGES:
SERVER_ID: 0
DATA_GATEWAY: False
[REPORTS]
REPORT: True
@ -57,7 +58,10 @@ TGID_FILE: talkgroup_ids.json
PEER_URL: https://www.radioid.net/static/rptrs.json
SUBSCRIBER_URL: https://www.radioid.net/static/users.json
TGID_URL: TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
STALE_DAYS: 7
STALE_DAYS: 1
SERVER_ID_URL: http://downloads.freedmr.uk/downloads/FreeDMR_Hosts.csv
SERVER_ID_FILE: server_ids.tsv
#Control server shared allstar instance via dial / AMI
[ALLSTAR]

@ -62,6 +62,8 @@ import ssl
from os.path import isfile, getmtime
from urllib.request import urlopen
import csv
logging.TRACE = 5
logging.addLevelName(logging.TRACE, 'TRACE')
@ -70,7 +72,7 @@ logging.trace = partial(logging.log, logging.TRACE)
# Does anybody read this stuff? There's a PEP somewhere that says I should do this.
__author__ = 'Cortney T. Buffington, N0MJS, Forked by Simon Adlem - G7RZU'
__copyright__ = 'Copyright (c) 2016-2019 Cortney T. Buffington, N0MJS and the K0USY Group, Simon Adlem, G7RZU 2020,2021'
__copyright__ = 'Copyright (c) 2016-2019 Cortney T. Buffington, N0MJS and the K0USY Group, Simon Adlem, G7RZU 2020,2021,2022'
__credits__ = 'Colin Durbridge, G4EML, Steve Zingman, N4IRS; Mike Zingman, N4IRR; Jonathan Naylor, G4KLX; Hans Barthen, DL5DI; Torsten Shultze, DG1HT; Jon Lee, G4TSN; Norman Williams, M6NBP'
__license__ = 'GNU GPLv3'
__maintainer__ = 'Simon Adlem G7RZU'
@ -89,7 +91,7 @@ def config_reports(_config, _factory):
logger.info('(GLOBAL) HBlink TCP reporting server configured')
report_server = _factory(_config)
report_server.clients = []
report_server.clients = deque()
reactor.listenTCP(_config['REPORTS']['REPORT_PORT'], report_server)
reporting = task.LoopingCall(reporting_loop, logger, report_server)
@ -254,14 +256,12 @@ class OPENBRIDGE(DatagramProtocol):
def send_bcve(self):
if self._config['ENHANCED_OBP'] and self._config['TARGET_IP']:
_packet = BCVE + VER.to_bytes(1,'big')
_packet = b''.join([BCVE,VER.to_bytes(1,'big')])
_packet = b''.join([_packet, (hmac_new(self._config['PASSPHRASE'],_packet[4:5],sha1).digest())])
self.transport.write(_packet, (self._config['TARGET_IP'], self._config['TARGET_PORT']))
logger.trace('(%s) *BridgeControl* sent BCVE. Ver: %s',self._system,VER)
else:
logger.trace('(%s) *BridgeControl* not sending BCVE, TARGET_IP currently not known',self._system)
def dmrd_received(self, _peer_id, _rf_src, _dst_id, _seq, _slot, _call_type, _frame_type, _dtype_vseq, _stream_id, _data,_hash,_hops = b'', _source_server = b'\x00\x00\x00\x00', _ber = b'\x00', _rssi = b'\x00', _source_rptr = b'\x00\x00\x00\x00'):
pass
@ -445,16 +445,25 @@ class OPENBRIDGE(DatagramProtocol):
#Discard old packets
if (int.from_bytes(_timestamp,'big')/1000000000) < (time() - 5):
logger.warning('(%s) Packet more than 5s old!, discarding', self._system)
if _stream_id not in self._laststrid:
logger.warning('(%s) Packet from server %s more than 5s old!, discarding', self._system,int.from_bytes(_source_server,'big'))
self.send_bcsq(_dst_id,_stream_id)
self._laststrid.append(_stream_id)
return
#Discard bad source server
if ((len(str(int.from_bytes(_source_server,'big'))) < 4) or (len(str(int.from_bytes(_source_server,'big'))) > 7)) and int.from_bytes(_source_server,'big') > 0:
if ((len(str(int.from_bytes(_source_server,'big'))) < 4) or (len(str(int.from_bytes(_source_server,'big'))) > 7)):
if _stream_id not in self._laststrid:
logger.warning('(%s) Source Server should be between 4 and 7 digits, discarding Src: %s', self._system, int.from_bytes(_source_server,'big'))
self.send_bcsq(_dst_id,_stream_id)
self._laststrid.append(_stream_id)
return
elif self._CONFIG['GLOBAL']['VALIDATE_SERVER_IDS'] and (len(str(int.from_bytes(_source_server,'big'))) == 4 or (len(str(int.from_bytes(_source_server,'big'))) == 5)) and ((str(int.from_bytes(_source_server,'big'))[:4]) not in self._CONFIG['_SERVER_IDS'] ):
if _stream_id not in self._laststrid:
logger.warning('(%s) Source Server ID is 4 or 5 digits but not in list: %s', self._system, int.from_bytes(_source_server,'big'))
self.send_bcsq(_dst_id,_stream_id)
self._laststrid.append(_stream_id)
return
elif len(str(int.from_bytes(_source_server,'big'))) > 5 and not self.validate_id(_source_server):
if _stream_id not in self._laststrid:
logger.warning('(%s) Source Server 6 or 7 digits but not a valid DMR ID, discarding Src: %s', self._system, int.from_bytes(_source_server,'big'))
@ -466,7 +475,7 @@ class OPENBRIDGE(DatagramProtocol):
_inthops = _hops +1
if _inthops > 10:
logger.warning('(%s) MAX HOPS exceed, dropping. Hops: %s, DST: %s', self._system, _inthops, _int_dst_id)
logger.warning('(%s) MAX HOPS exceed, dropping. Hops: %s, DST: %s, SRC: %s', self._system, _inthops, _int_dst_id, int.from_bytes(_source_server,'big'))
self.send_bcsq(_dst_id,_stream_id)
return
@ -753,7 +762,7 @@ class HBSYSTEM(DatagramProtocol):
# Aliased in __init__ to maintenance_loop if system is a master
def master_maintenance_loop(self):
logger.debug('(%s) Master maintenance loop started', self._system)
remove_list = []
remove_list = deque()
for peer in self._peers:
_this_peer = self._peers[peer]
# Check to see if any of the peers have been quiet (no ping) longer than allowed
@ -761,6 +770,8 @@ class HBSYSTEM(DatagramProtocol):
remove_list.append(peer)
for peer in remove_list:
logger.info('(%s) Peer %s (%s) has timed out and is being removed', self._system, self._peers[peer]['CALLSIGN'], self._peers[peer]['RADIO_ID'])
#First, MSTCL the peer
self.transport.write(b''.join([MSTCL, peer]),self._CONFIG['SYSTEMS'][self._system]['PEERS'][peer]['SOCKADDR'])
# Remove any timed out peers from the configuration
del self._CONFIG['SYSTEMS'][self._system]['PEERS'][peer]
if 'PEERS' not in self._CONFIG['SYSTEMS'][self._system] and 'OPTIONS' in self._CONFIG['SYSTEMS'][self._system]:
@ -768,8 +779,10 @@ class HBSYSTEM(DatagramProtocol):
if '_default_options' in self._CONFIG['SYSTEMS'][self._system]:
logger.info('(%s) Setting default Options: %s',self._system, self._CONFIG['SYSTEMS'][self._system]['_default_options'])
self._CONFIG['SYSTEMS'][self._system]['OPTIONS'] = self._CONFIG['SYSTEMS'][self._system]['_default_options']
self._CONFIG['SYSTEMS'][self._system]['_reset'] = True
else:
del self._CONFIG['SYSTEMS'][self._system]['OPTIONS']
w
logger.info('(%s) Deleting HBP Options',self._system)
# Aliased in __init__ to maintenance_loop if system is a peer
@ -859,11 +872,11 @@ class HBSYSTEM(DatagramProtocol):
def master_dereg(self):
for _peer in self._peers:
self.send_peer(_peer, MSTCL + _peer)
self.send_peer(_peer, b''.join([MSTCL,_peer]))
logger.info('(%s) De-Registration sent to Peer: %s (%s)', self._system, self._peers[_peer]['CALLSIGN'], self._peers[_peer]['RADIO_ID'])
def peer_dereg(self):
self.send_master(RPTCL + self._config['RADIO_ID'])
self.send_master(b''.join([RPTCL,self._config['RADIO_ID']]))
logger.info('(%s) De-Registration sent to Master: %s:%s', self._system, self._config['MASTER_SOCKADDR'][0], self._config['MASTER_SOCKADDR'][1])
def proxy_IPBlackList(self,peer_id,sockaddr):
@ -1066,9 +1079,11 @@ class HBSYSTEM(DatagramProtocol):
if '_default_options' in self._CONFIG['SYSTEMS'][self._system]:
self._CONFIG['SYSTEMS'][self._system]['OPTIONS'] = self._CONFIG['SYSTEMS'][self._system]['_default_options']
logger.info('(%s) Setting default Options: %s',self._system, self._CONFIG['SYSTEMS'][self._system]['_default_options'])
self._CONFIG['SYSTEMS'][self._system]['_reset'] = True
else:
logger.info('(%s) Deleting HBP Options',self._system)
del self._CONFIG['SYSTEMS'][self._system]['OPTIONS']
self._CONFIG['SYSTEMS'][self._system]['_reset'] = True
else:
_peer_id = _data[4:8] # Configure Command
@ -1361,9 +1376,9 @@ class reportFactory(Factory):
def try_download(_path, _file, _url, _stale,):
no_verify = ssl._create_unverified_context()
now = time()
file_exists = isfile(_path+_file) == True
file_exists = isfile(''.join([_path,_file])) == True
if file_exists:
file_old = (getmtime(_path+_file) + _stale) < now
file_old = (getmtime(''.join([_path,_file])) + _stale) < now
if not file_exists or (file_exists and file_old):
try:
with urlopen(_url, context=no_verify) as response:
@ -1373,21 +1388,44 @@ def try_download(_path, _file, _url, _stale,):
result = 'ID ALIAS MAPPER: \'{}\' successfully downloaded'.format(_file)
except IOError:
result = 'ID ALIAS MAPPER: \'{}\' could not be downloaded due to an IOError'.format(_file)
try:
with open(_path+_file, 'wb') as outfile:
outfile.write(data)
outfile.close()
except IOError:
result = 'ID ALIAS mapper \'{}\' file could not be written due to an IOError'.format(_file)
else:
if data and (data != b'{}'):
try:
with open(''.join([_path,_file]), 'wb') as outfile:
outfile.write(data)
outfile.close()
except IOError:
result = 'ID ALIAS mapper \'{}\' file could not be written due to an IOError'.format(_file)
else:
result = 'ID ALIAS mapper \'{}\' file not written because downloaded data is empty for some reason'.format(_file)
else:
result = 'ID ALIAS MAPPER: \'{}\' is current, not downloaded'.format(_file)
return result
#Read list of listed servers from CSV (actually TSV) file
def mk_server_dict(path,filename):
server_ids = {}
try:
with open(''.join([path,filename]),newline='') as csvfile:
reader = csv.DictReader(csvfile,dialect='excel-tab')
for _row in reader:
server_ids[_row['OPB Net ID']] = _row['Country']
return(server_ids)
except IOError as err:
logger.warning('ID ALIAS MAPPER: %s could not be read due to IOError: %s',filename,err)
return(False)
# ID ALIAS CREATION
# Download
def mk_aliases(_config):
peer_ids = {}
subscriber_ids = {}
local_subscriber_ids = {}
talkgroup_ids = {}
server_ids = {}
if _config['ALIASES']['TRY_DOWNLOAD'] == True:
# Try updating peer aliases file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'], _config['ALIASES']['PEER_URL'], _config['ALIASES']['STALE_TIME'])
@ -1398,32 +1436,53 @@ def mk_aliases(_config):
#Try updating tgid aliases file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'], _config['ALIASES']['TGID_URL'], _config['ALIASES']['STALE_TIME'])
logger.info('(ALIAS) %s', result)
#Try updating server ids file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE'], _config['ALIASES']['SERVER_ID_URL'], _config['ALIASES']['STALE_TIME'])
logger.info('(ALIAS) %s', result)
# Make Dictionaries
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'])
if peer_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: peer_ids dictionary is available')
subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'])
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'
subscriber_ids[4294967295] = 'SC'
if subscriber_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: subscriber_ids dictionary is available')
try:
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'])
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with data in peer_ids dictionary, not updating: %s',e)
else:
if peer_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: peer_ids dictionary is available')
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'])
if talkgroup_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: talkgroup_ids dictionary is available')
local_subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['LOCAL_SUBSCRIBER_FILE'])
if subscriber_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: local_subscriber_ids dictionary is available')
try:
subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in subscriber_ids dictionary, not updating: %s',e)
else:
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'
subscriber_ids[4294967295] = 'SC'
if subscriber_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: subscriber_ids dictionary is available')
try:
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in talkgroup_ids dictionary, not updating: %s',e)
else:
if talkgroup_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: talkgroup_ids dictionary is available')
try:
local_subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['LOCAL_SUBSCRIBER_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in local_subscriber_ids dictionary, not updating: %s',e)
else:
if subscriber_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: local_subscriber_ids dictionary is available')
try:
server_ids = mk_server_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in server_ids dictionary, not updating: %s',e)
if server_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: server_ids dictionary is available')
return peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids
return peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids
#************************************************

@ -71,6 +71,10 @@ class Proxy(DatagramProtocol):
if self.clientinfo and _peer_id != b'\xff\xff\xff\xff':
print(f"{datetime.now().replace(microsecond=0)} Client: ID:{str(int_id(_peer_id)).rjust(9)} IP:{self.peerTrack[_peer_id]['shost'].rjust(15)} Port:{self.peerTrack[_peer_id]['sport']} Removed.")
self.transport.write(b'RPTCL'+_peer_id, (self.master,self.peerTrack[_peer_id]['dport']))
#Tell client we have closed the session - 3 times, in case they are on a lossy network
self.transport.write(b'MSTCL',(self.peerTrack[_peer_id]['shost'],self.peerTrack[_peer_id]['sport']))
self.transport.write(b'MSTCL',(self.peerTrack[_peer_id]['shost'],self.peerTrack[_peer_id]['sport']))
self.transport.write(b'MSTCL',(self.peerTrack[_peer_id]['shost'],self.peerTrack[_peer_id]['sport']))
self.connTrack[self.peerTrack[_peer_id]['dport']] = False
del self.peerTrack[_peer_id]

@ -50,6 +50,7 @@ ALLOW_NULL_PASSPHRASE: False
ANNOUNCEMENT_LANGUAGES: es_ES
SERVER_ID: 9990
DATA_GATEWAY: False
VALIDATE_SERVER_IDS: False
@ -113,16 +114,9 @@ TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subscriber_ids.json
STALE_DAYS: 7
SUB_MAP_FILE:
SERVER_ID_URL: http://downloads.freedmr.uk/downloads/FreeDMR_Hosts.csv
SERVER_ID_FILE: server_ids.tsv
#Read further repeater configs from MySQL
[MYSQL]
USE_MYSQL: False
USER: hblink
PASS: mypassword
DB: hblink
SERVER: 127.0.0.1
PORT: 3306
TABLE: repeaters
# OPENBRIDGE INSTANCES - DUPLICATE SECTION FOR MULTIPLE CONNECTIONS
# OpenBridge is a protocol originall created by DMR+ for connection between an
@ -193,4 +187,5 @@ GENERATOR: 1
ANNOUNCEMENT_LANGUAGE:es_ES
ALLOW_UNREG_ID: True
PROXY_CONTROL: False
OVERRIDE_IDENT_TG:

@ -1,107 +0,0 @@
import mysql.connector
from mysql.connector import errorcode
#import mysql.connector.pooling
# Does anybody read this stuff? There's a PEP somewhere that says I should do this.
__author__ = 'Simon Adlem - G7RZU'
__copyright__ = 'Copyright (c) Simon Adlem, G7RZU 2020,2021'
__credits__ = 'Colin Durbridge, G4EML, Steve Zingman, N4IRS; Mike Zingman, N4IRR; Jonathan Naylor, G4KLX; Hans Barthen, DL5DI; Torsten Shultze, DG1HT; Jon Lee, G4TSN; Norman Williams, M6NBP'
__license__ = 'GNU GPLv3'
__maintainer__ = 'Simon Adlem G7RZU'
__email__ = 'simon@gb7fr.org.uk'
class useMYSQL:
#Init new object
def __init__(self, server,user,password,database,table,logger):
self.server = server
self.user = user
self.password = password
self.database = database
self.table = table
self.logger = logger
#Connect
def con(self):
logger = self.logger
try:
self.db = mysql.connector.connect(
host=self.server,
user=self.user,
password=self.password,
database=self.database,
# pool_name = "hblink_master",
# pool_size = 2
)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
logger.info('(MYSQL) username or password error')
return (False)
elif err.errno == errorcode.ER_BAD_DB_ERROR:
logger.info('(MYSQL) DB Error')
return (False)
else:
logger.info('(MYSQL) error: %s',err)
return(False)
return(True)
#Close DB connection
def close(self):
self.db.close()
#Get config from DB
def getConfig(self):
CONFIG = {}
CONFIG['SYSTEMS'] = {}
_cursor = self.db.cursor()
try:
_cursor.execute("select * from {} where MODE='MASTER'".format(self.table))
except mysql.connector.Error as err:
_cursor.close()
logger.info('(MYSQL) error, problem with cursor execute')
raise Exception('Problem with cursor execute')
for (callsign, mode, enabled, _repeat, max_peers, export_ambe, ip, port, passphrase, group_hangtime, use_acl, reg_acl, sub_acl, tgid_ts1_acl, tgid_ts2_acl, default_ua_timer, single_mode, voice_ident,ts1_static,ts2_static,default_reflector, announce_lang) in _cursor:
try:
CONFIG['SYSTEMS'].update({callsign: {
'MODE': mode,
'ENABLED': bool(enabled),
'REPEAT': bool(_repeat),
'MAX_PEERS': int(max_peers),
'IP': ip,
'PORT': int(port),
'PASSPHRASE': bytes(passphrase, 'utf-8'),
'GROUP_HANGTIME': int(group_hangtime),
'USE_ACL': bool(use_acl),
'REG_ACL': reg_acl,
'SUB_ACL': sub_acl,
'TG1_ACL': tgid_ts1_acl,
'TG2_ACL': tgid_ts2_acl,
'DEFAULT_UA_TIMER': int(default_ua_timer),
'SINGLE_MODE': bool(single_mode),
'VOICE_IDENT': bool(voice_ident),
'TS1_STATIC': ts1_static,
'TS2_STATIC': ts2_static,
'DEFAULT_REFLECTOR': int(default_reflector),
'GENERATOR': int(1),
'ANNOUNCEMENT_LANGUAGE': announce_lang
}})
CONFIG['SYSTEMS'][callsign].update({'PEERS': {}})
except TypeError:
logger.info('(MYSQL) Problem with data from MySQL - TypeError, carrying on to next row')
return(CONFIG['SYSTEMS'])
#For testing
if __name__ == '__main__':
sql = useMYSQL("ip","user","pass","db")
sql.con()
print( sql.getConfig())

@ -50,6 +50,7 @@ ALLOW_NULL_PASSPHRASE: False
ANNOUNCEMENT_LANGUAGES: es_ES
SERVER_ID: 9990
DATA_GATEWAY: False
VALIDATE_SERVER_IDS: False
@ -113,6 +114,8 @@ TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subscriber_ids.json
STALE_DAYS: 7
SUB_MAP_FILE:
SERVER_ID_FILE:
SERVER_ID_URL:
#Read further repeater configs from MySQL
[MYSQL]

@ -76,8 +76,8 @@ def playFileOnRequest(system,fileName,dstTG,subid):
_say.append(AMBEobj.readSingleFile(fileName))
_say.append(SILENCE)
_say.append(SILENCE)
except IOError:
logger.warning('(%s) cannot read file %s',system,fileName)
except IOError as err:
logger.warning('(%s) cannot read file %s: %s',system,fileName,err)
return
speech = pkt_gen(_source_id, _dst_id, bytes_4(5000), 0, _say)
sleep(1)

@ -27,6 +27,8 @@ from twisted.protocols.basic import NetstringReceiver
from reporting_const import *
from pprint import pprint
class reportClient(NetstringReceiver):
def stringReceived(self, data):
@ -34,9 +36,11 @@ class reportClient(NetstringReceiver):
if data[:1] == REPORT_OPCODES['BRDG_EVENT']:
self.bridgeEvent(data[1:].decode('UTF-8'))
elif data[:1] == REPORT_OPCODES['CONFIG_SND']:
self.configSend(data[1:])
if cli_args.CONFIG:
self.configSend(data[1:])
elif data[:1] == REPORT_OPCODES['BRIDGE_SND']:
self.bridgeSend(data[1:])
if cli_args.BRIDGES:
self.bridgeSend(data[1:])
elif data == b'bridge updated':
pass
else:
@ -60,15 +64,25 @@ class reportClient(NetstringReceiver):
if len(datalist) > 9:
event['duration'] = datalist[9]
print(event)
if cli_args.EVENTS:
pprint(event, compact=True)
def bridgeSend(self,data):
self.BRIDGES = pickle.loads(data)
print(self.BRIDGES)
if cli_args.STATS:
print('There are currently {} active bridges in the bridge table:\n'.format(len(self.BRIDGES)))
for _bridge in self.BRIDGES.keys():
print('{},'.format({str(_bridge)}))
else:
if cli_args.WATCH and cli_args.WATCH in self.BRIDGES:
pprint(self.BRIDGES[cli_args.WATCH], compact=True)
else:
pprint(self.BRIDGES, compact=True, indent=4)
def configSend(self,data):
self.CONFIG = pickle.loads(data)
print(self.CONFIG)
pprint(self.CONFIG, compact=True)
@ -100,6 +114,7 @@ if __name__ == '__main__':
import signal
import sys
import os
import argparse
#Set process title early
setproctitle(__file__)
@ -110,10 +125,22 @@ if __name__ == '__main__':
def sig_handler(_signal, _frame):
print('SHUTDOWN: TERMINATING WITH SIGNAL {}'.format(str(_signal)))
reactor.stop()
# CLI argument parser - handles picking up the config file from the command line, and sending a "help" message
parser = argparse.ArgumentParser()
parser.add_argument('-e', '--events', action='store', dest='EVENTS', help='print events [0|1]')
parser.add_argument('-c', '--config', action='store', dest='CONFIG', help='print config [0|1]')
parser.add_argument('-b', '--bridges', action='store', dest='BRIDGES', help='print bridges [0|1]')
parser.add_argument('-w', '--watch', action='store', dest='WATCH', help='watch bridge <name>')
parser.add_argument('-o', '--host', action='store', dest='HOST', help='host to connect to <ip address>')
parser.add_argument('-p', '--port', action='store', dest='PORT', help='port to connect to <port>')
parser.add_argument('-s', '--stats', action='store', dest='STATS', help='print stats only')
cli_args = parser.parse_args()
# Set signal handers so that we can gracefully exit if need be
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, sig_handler)
reactor.connectTCP(sys.argv[1],int(sys.argv[2]), reportClientFactory(reportClient))
reactor.connectTCP(cli_args.HOST,int(cli_args.PORT), reportClientFactory(reportClient))
reactor.run()

@ -3,6 +3,5 @@ bitarray>=0.8.1
Twisted>=16.3.0
dmr_utils3>=0.1.19
configparser>=3.0.0
mysql-connector
resettabletimer>=0.7.0
setproctitle

Loading…
Cancel
Save

Powered by TurnKey Linux.