Better handling if downloaded files - including checksum checking.

testing
Simon 3 years ago
parent ab56871eea
commit 3fcd75e711

@ -39,11 +39,8 @@ build-extrastats: # This job runs in the build stage, which runs first.
- buildx
script:
- echo "Compiling the code..."
- cd docker-configs
- docker buildx build --no-cache -f Dockerfile-proxy -t hacknix/freedmr:extrastats-development-latest -t gitlab.hacknix.net:5050/hacknix/freedmr:extrastats-development-latest --platform linux/arm64 --push .
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker buildx build --no-cache -f Dockerfile-ci -t hacknix/freedmr:extrastats-development-latest -t gitlab.hacknix.net:5050/hacknix/freedmr:extrastats-development-latest --platform linux/amd64 --push .
- echo "Compile complete."
only:
- extrastats2
@ -54,8 +51,8 @@ build-testing: # This job runs in the build stage, which runs first.
- buildx
script:
- echo "Compiling the code..."
- cd docker-configs
- docker buildx build --no-cache -f Dockerfile-proxy -t gitlab.hacknix.net:5050/hacknix/freedmr:testing --platform linux/amd64 --push .
- cd docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker buildx build --no-cache -f Dockerfile-ci -t gitlab.hacknix.net:5050/hacknix/freedmr:testing --platform linux/amd64 --push .
only:
- testing
@ -65,8 +62,8 @@ build-debug: # This job runs in the build stage, which runs first.
- buildx
script:
- echo "Compiling the code..."
- cd docker-configs
- docker buildx build --no-cache -f Dockerfile-proxy -t gitlab.hacknix.net:5050/hacknix/freedmr:debug --platform linux/amd64 --push .
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker buildx build --no-cache -f Dockerfile-ci -t gitlab.hacknix.net:5050/hacknix/freedmr:debug --platform linux/amd64 --push .
only:
- debug

@ -33,14 +33,16 @@ PATH: ./
PEER_FILE: peer_ids.json
SUBSCRIBER_FILE: subscriber_ids.json
TGID_FILE: talkgroup_ids.json
PEER_URL: https://www.radioid.net/static/rptrs.json
SUBSCRIBER_URL: https://www.radioid.net/static/users.json
TGID_URL: http://downloads.freedmr.uk/downloads/talkgroup_ids.json
PEER_URL: https://freedmr-lh.gb7fr.org.uk/json/peer_ids.json
SUBSCRIBER_URL: https://freedmr-lh.gb7fr.org.uk/json/subscriber_ids.json
TGID_URL: https://freedmr-lh.gb7fr.org.uk/json/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subcriber_ids.json
STALE_DAYS: 1
SUB_MAP_FILE:
SERVER_ID_URL: http://downloads.freedmr.uk/downloads/FreeDMR_Hosts.csv
SERVER_ID_URL: https://freedmr-lh.gb7fr.org.uk/json/server_ids.tsv
SERVER_ID_FILE: server_ids.tsv
CHECKSUM_URL: https://freedmr-lh.gb7fr.org.uk/file_checksums.json
CHECKSUM_FILE: file_checksums.json
#Control server shared allstar instance via dial / AMI

@ -941,7 +941,7 @@ if __name__ == '__main__':
signal.signal(sig, sig_handler)
# Create the name-number mapping dictionaries
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids,server_ids = mk_aliases(CONFIG)
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids,_server_ids,_checksums = mk_aliases(CONFIG)
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'

@ -648,12 +648,12 @@ def threadAlias():
logger.debug('(ALIAS) starting alias thread')
reactor.callInThread(aliasb)
def setAlias(_peer_ids,_subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids):
peer_ids, subscriber_ids, talkgroup_ids,local_subscriber_ids,server_ids = _peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids,_server_ids
def setAlias(_peer_ids,_subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids, _checksums):
peer_ids, subscriber_ids, talkgroup_ids,local_subscriber_ids,server_ids,checksums = _peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids,_server_ids,_checksums
def aliasb():
_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids = mk_aliases(CONFIG)
reactor.callFromThread(setAlias,_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids)
_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids, _checksums = mk_aliases(CONFIG)
reactor.callFromThread(setAlias,_peer_ids, _subscriber_ids, _talkgroup_ids, _local_subscriber_ids, _server_ids, _checksums)
def ident():
for system in systems:
@ -2560,7 +2560,7 @@ if __name__ == '__main__':
signal.signal(sig, sig_handler)
# Create the name-number mapping dictionaries
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids = mk_aliases(CONFIG)
peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids, checksums = mk_aliases(CONFIG)
#Add special IDs to DB
subscriber_ids[900999] = 'D-APRS'
@ -2570,6 +2570,7 @@ if __name__ == '__main__':
CONFIG['_PEER_IDS'] = peer_ids
CONFIG['_LOCAL_SUBSCRIBER_IDS'] = local_subscriber_ids
CONFIG['_SERVER_IDS'] = server_ids
CONFIG['CHECKSUMS'] = checksums

@ -185,7 +185,10 @@ def build_config(_config_file):
'SUB_MAP_FILE': config.get(section, 'SUB_MAP_FILE'),
'LOCAL_SUBSCRIBER_FILE': config.get(section, 'LOCAL_SUBSCRIBER_FILE'),
'SERVER_ID_URL': config.get(section, 'SERVER_ID_URL'),
'SERVER_ID_FILE': config.get(section, 'SERVER_ID_FILE')
'SERVER_ID_FILE': config.get(section, 'SERVER_ID_FILE'),
'CHECKSUM_URL': config.get(section, 'CHECKSUM_URL'),
'CHECKSUM_FILE': config.get(section, 'CHECKSUM_FILE')
})

@ -105,16 +105,16 @@ PATH: ./json/
PEER_FILE: peer_ids.json
SUBSCRIBER_FILE: subscriber_ids.json
TGID_FILE: talkgroup_ids.json
PEER_URL: http://freedmr-lh.gb7fr.org.uk/json/peer_ids.json
SUBSCRIBER_URL: http://freedmr-lh.gb7fr.org.uk/json/subscriber_ids.json
TGID_URL: http://freedmr-lh.gb7fr.org.uk/json/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subscriber_ids.json
PEER_URL: https://freedmr-lh.gb7fr.org.uk/json/peer_ids.json
SUBSCRIBER_URL: https://freedmr-lh.gb7fr.org.uk/json/subscriber_ids.json
TGID_URL: https://freedmr-lh.gb7fr.org.uk/json/talkgroup_ids.json
LOCAL_SUBSCRIBER_FILE: local_subcriber_ids.json
STALE_DAYS: 1
SUB_MAP_FILE: sub_map.pkl
SERVER_ID_URL: http://freedmr-lh.gb7fr.org.uk/json/server_ids.tsv
SERVER_ID_URL: https://freedmr-lh.gb7fr.org.uk/json/server_ids.tsv
SERVER_ID_FILE: server_ids.tsv
TOPO_FILE: topography.json
CHECKSUM_URL: https://freedmr-lh.gb7fr.org.uk/file_checksums.json
CHECKSUM_FILE: file_checksums.json
#Control server shared allstar instance via dial / AMI
[ALLSTAR]

@ -45,7 +45,7 @@ from twisted.internet import reactor, task
import log
import config
from const import *
from utils import mk_id_dict, try_download
from utils import mk_id_dict, try_download,load_json,blake2bsum
from dmr_utils3.utils import int_id, bytes_4
# Imports for the reporting server
@ -1391,9 +1391,9 @@ def mk_server_dict(path,filename):
for _row in reader:
server_ids[_row['OPB Net ID']] = _row['Country']
return(server_ids)
except IOError as err:
logger.warning('ID ALIAS MAPPER: %s could not be read due to IOError: %s',filename,err)
return(False)
except Exception as err:
logger.warning('ID ALIAS MAPPER: %s could not be read: %s',filename,err)
raise(err)
# ID ALIAS CREATION
@ -1404,7 +1404,19 @@ def mk_aliases(_config):
local_subscriber_ids = {}
talkgroup_ids = {}
server_ids = {}
checksums = {}
if _config['ALIASES']['TRY_DOWNLOAD'] == True:
#Try updating checksum file
if _config['ALIASES']['CHECKSUM_FILE'] and _config['ALIASES']['CHECKSUM_URL']:
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['CHECKSUM_FILE'], _config['ALIASES']['CHECKSUM_URL'], _config['ALIASES']['STALE_TIME'])
logger.info('(ALIAS) %s', result)
try:
checksums = load_json(''.join([_config['ALIASES']['PATH'], _config['ALIASES']['CHECKSUM_FILE']]))
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: Cannot load checksums: %s',e)
# Try updating peer aliases file
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'], _config['ALIASES']['PEER_URL'], _config['ALIASES']['STALE_TIME'])
logger.info('(ALIAS) %s', result)
@ -1418,29 +1430,47 @@ def mk_aliases(_config):
result = try_download(_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE'], _config['ALIASES']['SERVER_ID_URL'], _config['ALIASES']['STALE_TIME'])
logger.info('(ALIAS) %s', result)
# Make Dictionaries
#Peer IDs
try:
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'])
if exists(_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'] + '.bak') and (getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'] + '.bak') > getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'])):
raise Exception('backup peer_ids file is larger than new file')
try:
if blake2bsum(''.join([_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE']])) != checksums['peer_ids']:
raise(Exception('bad checksum'))
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with blake2bsum of peer_ids file. not updating.: %s',e)
else:
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'])
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with data in peer_ids dictionary, not updating: %s',e)
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in peer_ids dictionary, not updating: %s',e)
try:
peer_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'] + '.bak')
except Exception as f:
logger.error('(ALIAS) ID ALIAS MAPPER: Tried backup peer_ids file, but couldn\'t load that either: %s',f)
else:
if peer_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: peer_ids dictionary is available')
try:
shutil.copy(_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'],_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'] + '.bak')
except IOError as g:
try:
shutil.copy(_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'],_config['ALIASES']['PATH'] + _config['ALIASES']['PEER_FILE'] + '.bak')
except IOError as g:
logger.info('(ALIAS) ID ALIAS MAPPER: couldn\'t make backup copy of peer_ids file %s',g)
#Subscriber IDs
try:
if exists(_config['ALIASES']['PATH'] + _config['ALIASES']['SUBSCRIBER_FILE'] + '.bak') and (getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['SUBSCRIBER_FILE'] + '.bak') > getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['SUBSCRIBER_FILE'])):
raise Exception('backup subscriber_ids file is larger than new file')
subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'])
try:
if blake2bsum(''.join([_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE']])) != checksums['subscriber_ids']:
raise(Exception('bad checksum'))
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with blake2bsum of subscriber_ids file. not updating.: %s',e)
else:
subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SUBSCRIBER_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in subscriber_ids dictionary, not updating: %s',e)
try:
@ -1461,22 +1491,34 @@ def mk_aliases(_config):
except IOError as g:
logger.info('(ALIAS) ID ALIAS MAPPER: couldn\'t make backup copy of subscriber_ids file %s',g)
#Talkgroup IDs
try:
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'])
if exists(_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'] + '.bak') and (getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'] + '.bak') > getsize(_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'])):
raise Exception('backup talkgroup_ids file is larger than new file')
try:
if blake2bsum(''.join([_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE']])) != checksums['talkgroup_ids']:
raise(Exception('bad checksum'))
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with blake2bsum of talkgroup_ids file. not updating.: %s',e)
else:
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in talkgroup_ids dictionary, not updating: %s',e)
try:
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['PEER_FILE'] + '.bak')
talkgroup_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['TGID_FILE'] + '.bak')
except Exception as f:
logger.error('(ALIAS) ID ALIAS MAPPER: Tried backup talkgroup_ids file, but couldn\'t load that either: %s',f)
else:
if talkgroup_ids:
logger.info('(ALIAS) ID ALIAS MAPPER: talkgroup_ids dictionary is available')
try:
shutil.copy(_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'],_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'] + '.bak')
except IOError as g:
try:
shutil.copy(_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'],_config['ALIASES']['PATH'] + _config['ALIASES']['TGID_FILE'] + '.bak')
except IOError as g:
logger.info('(ALIAS) ID ALIAS MAPPER: couldn\'t make backup copy of talkgroup_ids file %s',g)
#Local subscriber override
if exists(_config['ALIASES']['PATH'] + _config['ALIASES']['LOCAL_SUBSCRIBER_FILE']):
try:
local_subscriber_ids = mk_id_dict(_config['ALIASES']['PATH'], _config['ALIASES']['LOCAL_SUBSCRIBER_FILE'])
@ -1496,8 +1538,16 @@ def mk_aliases(_config):
else:
logger.info('(ALIAS) ID ALIAS MAPPER: local subscriber file does not exist and is optional, skipping. ')
#Server IDs
try:
server_ids = mk_server_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE'])
try:
if blake2bsum(''.join([_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE']])) != checksums['server_ids']:
raise(Exception('bad checksum'))
except Exception as e:
logger.error('(ALIAS) ID ALIAS MAPPER: problem with blake2bsum of server_ids file: %s',e)
raise(e)
else:
server_ids = mk_server_dict(_config['ALIASES']['PATH'], _config['ALIASES']['SERVER_ID_FILE'])
except Exception as e:
logger.info('(ALIAS) ID ALIAS MAPPER: problem with data in server_ids dictionary, not updating: %s',e)
try:
@ -1513,7 +1563,7 @@ def mk_aliases(_config):
logger.info('(ALIAS) ID ALIAS MAPPER: couldn\'t make backup copy of server_ids file %s',g)
return peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids
return peer_ids, subscriber_ids, talkgroup_ids, local_subscriber_ids, server_ids, checksums
#************************************************

@ -27,6 +27,7 @@ from time import time
from os.path import isfile, getmtime
from urllib.request import urlopen
from json import load as jload
import hashlib
@ -81,4 +82,24 @@ def mk_id_dict(_path, _file):
except:
raise
#Read JSON from file
def load_json(filename):
try:
with open(filename) as f:
data = jload(f)
except:
raise
else:
return(data)
#Calculate blake2b checksum of file
def blake2bsum(filename):
blake2b_hash = hashlib.blake2b()
try:
with open(filename,"rb") as f:
for byte_block in iter(lambda: f.read(4096),b""):
blake2b_hash.update(byte_block)
return(blake2b_hash.hexdigest())
except:
raise

Loading…
Cancel
Save

Powered by TurnKey Linux.