core implementation
code bomb implementing the bulk of the osmo-gsm-tester Change-Id: I53610becbf643ed51b90cfd9debc6992fe211ec9
This commit is contained in:
parent
dae3d3c479
commit
3531a192ae
|
@ -4,3 +4,6 @@ pid
|
|||
version
|
||||
_version.py
|
||||
tags
|
||||
set_pythonpath
|
||||
test_work
|
||||
state
|
||||
|
|
2
Makefile
2
Makefile
|
@ -9,7 +9,7 @@ version:
|
|||
./update_version.sh
|
||||
|
||||
check:
|
||||
$(MAKE) -C test check
|
||||
$(MAKE) -C selftest check
|
||||
@echo "make check: success"
|
||||
|
||||
# vim: noexpandtab tabstop=8 shiftwidth=8
|
||||
|
|
|
@ -22,5 +22,6 @@ import tempfile
|
|||
import time
|
||||
import traceback
|
||||
import yaml
|
||||
import pydbus
|
||||
|
||||
print('ok')
|
||||
print('dependencies ok')
|
||||
|
|
|
@ -18,9 +18,9 @@ osmo_gsm_tester_host=root@10.9.1.190
|
|||
osmo_gsm_tester_dir="/var/tmp/osmo-gsm-tester"
|
||||
tmp_dir="/var/tmp/prep-osmo-gsm-tester"
|
||||
arch="x86_64"
|
||||
archive_name="openbsc-$arch-build-$BUILD_NUMBER"
|
||||
archive_name="openbsc.$arch-build-$BUILD_NUMBER"
|
||||
archive="$archive_name.tgz"
|
||||
manifest="manifest.txt"
|
||||
manifest="checksums.md5"
|
||||
test_report="test-report.xml"
|
||||
test_timeout_sec=120
|
||||
|
||||
|
@ -78,7 +78,9 @@ rm -rf "$local_ts_base" || true
|
|||
mkdir -p "$local_ts_dir"
|
||||
|
||||
# create archive of openbsc build
|
||||
tar czf "$local_ts_dir/$archive" "$prefix_dirname"/*
|
||||
cd "$prefix_dirname"
|
||||
tar czf "../$local_ts_dir/$archive" *
|
||||
cd ..
|
||||
# move archived bts builds into test session directory
|
||||
mv $WORKSPACE/osmo-bts-*.tgz "$local_ts_dir"
|
||||
cd "$local_ts_dir"
|
||||
|
|
|
@ -91,4 +91,4 @@ main "$octbts_repos"
|
|||
|
||||
# build the archive that is going to be copied to the tester and then to the BTS
|
||||
rm -f $WORKSPACE/osmo-bts-octphy*.tgz
|
||||
tar czf $WORKSPACE/osmo-bts-octphy-build-$BUILD_NUMBER.tgz inst-osmo-bts-octphy
|
||||
tar czf $WORKSPACE/osmo-bts-octphy.build-$BUILD_NUMBER.tgz inst-osmo-bts-octphy
|
||||
|
|
|
@ -60,9 +60,6 @@ for dep in $deps; do
|
|||
done
|
||||
|
||||
# build the archive that is going to be copied to the tester and then to the BTS
|
||||
tar_name="osmo-bts-sysmo-build-"
|
||||
if ls "$base/$tar_name"* ; then
|
||||
rm -f "$base/$tar_name"*
|
||||
fi
|
||||
rm -f "$base/osmo-bts-sysmo.*.tgz"
|
||||
cd "$prefix_base_real"
|
||||
tar cvzf "$base/$tar_name${BUILD_NUMBER}.tgz" *
|
||||
tar cvzf "$base/osmo-bts-sysmo.build-${BUILD_NUMBER}.tgz" *
|
||||
|
|
|
@ -56,6 +56,6 @@ for dep in $deps; do
|
|||
done
|
||||
|
||||
# build the archive that is going to be copied to the tester
|
||||
rm -f "$base/osmo-bts-trx*.tgz"
|
||||
cd "$base"
|
||||
rm -f osmo-bts-trx*.tgz
|
||||
tar czf "osmo-bts-trx-build-${BUILD_NUMBER}.tgz" "$inst"
|
||||
tar czf "osmo-bts-trx.build-${BUILD_NUMBER}.tgz" "$inst"
|
||||
|
|
|
@ -89,4 +89,4 @@ LAUNCHING A TEST RUN
|
|||
|
||||
osmo-gsm-tester watches /var/tmp/osmo-gsm-tester for instructions to launch
|
||||
test runs. A test run is triggered by a subdirectory containing binaries and a
|
||||
manifest file, typically created by jenkins using the enclosed scripts.
|
||||
checksums file, typically created by jenkins using the scripts in contrib/.
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
.PHONY: check update
|
||||
|
||||
check:
|
||||
check: set_pythonpath
|
||||
./all_tests.py
|
||||
|
||||
update:
|
||||
./all_tests.py -u
|
||||
|
||||
set_pythonpath:
|
||||
echo "export PYTHONPATH=\"$(PWD)/../src\"" > set_pythonpath
|
||||
|
||||
# vim: noexpandtab tabstop=8 shiftwidth=8
|
|
@ -10,6 +10,7 @@ sys.path.append(src_dir)
|
|||
from osmo_gsm_tester import log
|
||||
|
||||
log.targets = [ log.TestsTarget() ]
|
||||
log.set_all_levels(log.L_DBG)
|
||||
|
||||
if '-v' in sys.argv:
|
||||
log.style_change(trace=True)
|
|
@ -6,6 +6,7 @@ import subprocess
|
|||
import time
|
||||
import difflib
|
||||
import argparse
|
||||
import re
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('testdir_or_test', nargs='*',
|
||||
|
@ -37,6 +38,21 @@ def udiff(expect, got, expect_path):
|
|||
|
||||
def verify_output(got, expect_file, update=False):
|
||||
if os.path.isfile(expect_file):
|
||||
ign_file = expect_file + '.ign'
|
||||
if os.path.isfile(ign_file):
|
||||
with open(ign_file, 'r') as f:
|
||||
ign_rules = f.readlines()
|
||||
for ign_rule in ign_rules:
|
||||
if not ign_rule:
|
||||
continue
|
||||
if '\t' in ign_rule:
|
||||
ign_rule, repl = ign_rule.split('\t')
|
||||
repl = repl.strip()
|
||||
else:
|
||||
repl = '*'
|
||||
ir = re.compile(ign_rule)
|
||||
got = repl.join(ir.split(got))
|
||||
|
||||
if update:
|
||||
with open(expect_file, 'w') as f:
|
||||
f.write(got)
|
||||
|
@ -44,6 +60,7 @@ def verify_output(got, expect_file, update=False):
|
|||
|
||||
with open(expect_file, 'r') as f:
|
||||
expect = f.read()
|
||||
|
||||
if expect != got:
|
||||
udiff(expect, got, expect_file)
|
||||
sys.stderr.write('output mismatch: %r\n'
|
||||
|
@ -93,12 +110,7 @@ for test in sorted(tests):
|
|||
success = False
|
||||
|
||||
if not success:
|
||||
sys.stderr.write('--- stdout ---\n')
|
||||
sys.stderr.write(out)
|
||||
sys.stderr.write('--- stderr ---\n')
|
||||
sys.stderr.write(err)
|
||||
sys.stderr.write('---\n')
|
||||
sys.stderr.write('Test failed: %r\n\n' % os.path.basename(test))
|
||||
sys.stderr.write('\nTest failed: %r\n\n' % os.path.basename(test))
|
||||
errors.append(test)
|
||||
|
||||
if errors:
|
|
@ -0,0 +1,2 @@
|
|||
state_dir: ./test_work/state_dir
|
||||
suites_dir: ./suite_test
|
|
@ -1,37 +1,55 @@
|
|||
# all hardware and interfaces available to this osmo-gsm-tester
|
||||
|
||||
nitb_iface:
|
||||
- 10.42.42.1
|
||||
- 10.42.42.2
|
||||
- 10.42.42.3
|
||||
- addr: 10.42.42.1
|
||||
- addr: 10.42.42.2
|
||||
- addr: 10.42.42.3
|
||||
|
||||
bts:
|
||||
- label: sysmoBTS 1002
|
||||
type: sysmo
|
||||
unit_id: 1
|
||||
addr: 10.42.42.114
|
||||
trx:
|
||||
- band: GSM-1800
|
||||
band: GSM-1800
|
||||
|
||||
- label: octBTS 3000
|
||||
type: oct
|
||||
unit_id: 5
|
||||
addr: 10.42.42.115
|
||||
band: GSM-1800
|
||||
trx:
|
||||
- band: GSM-1800
|
||||
hwaddr: 00:0c:90:32:b5:8a
|
||||
- hwaddr: 00:0c:90:32:b5:8a
|
||||
|
||||
- label: nanoBTS 1900
|
||||
type: nanobts
|
||||
unit_id: 1902
|
||||
addr: 10.42.42.190
|
||||
band: GSM-1900
|
||||
trx:
|
||||
- band: GSM-1900
|
||||
hwaddr: 00:02:95:00:41:b3
|
||||
- hwaddr: 00:02:95:00:41:b3
|
||||
|
||||
arfcn:
|
||||
- GSM-1800: [512, 514, 516, 518, 520]
|
||||
- GSM-1900: [540, 542, 544, 546, 548]
|
||||
- arfcn: 512
|
||||
band: GSM-1800
|
||||
- arfcn: 514
|
||||
band: GSM-1800
|
||||
- arfcn: 516
|
||||
band: GSM-1800
|
||||
- arfcn: 518
|
||||
band: GSM-1800
|
||||
- arfcn: 520
|
||||
band: GSM-1800
|
||||
|
||||
- arfcn: 540
|
||||
band: GSM-1900
|
||||
- arfcn: 542
|
||||
band: GSM-1900
|
||||
- arfcn: 544
|
||||
band: GSM-1900
|
||||
- arfcn: 546
|
||||
band: GSM-1900
|
||||
- arfcn: 548
|
||||
band: GSM-1900
|
||||
|
||||
modem:
|
||||
- label: m7801
|
|
@ -0,0 +1,95 @@
|
|||
{'addr': ['0.0.0.0',
|
||||
'255.255.255.255',
|
||||
'10.11.12.13',
|
||||
'10.0.99.1',
|
||||
'192.168.0.14'],
|
||||
'bts': [{'addr': '10.42.42.114',
|
||||
'name': 'sysmoBTS 1002',
|
||||
'trx': [{'band': 'GSM-1800',
|
||||
'timeslots': ['CCCH+SDCCH4',
|
||||
'SDCCH8',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH']},
|
||||
{'band': 'GSM-1900',
|
||||
'timeslots': ['SDCCH8',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH']}],
|
||||
'type': 'sysmobts'}],
|
||||
'hwaddr': ['ca:ff:ee:ba:aa:be',
|
||||
'00:00:00:00:00:00',
|
||||
'CA:FF:EE:BA:AA:BE',
|
||||
'cA:Ff:eE:Ba:aA:Be',
|
||||
'ff:ff:ff:ff:ff:ff'],
|
||||
'imsi': ['012345', '012345678', '012345678912345'],
|
||||
'ki': ['000102030405060708090a0b0c0d0e0f', '000102030405060708090a0b0c0d0e0f'],
|
||||
'modems': [{'dbus_path': '/sierra_0',
|
||||
'imsi': '901700000009001',
|
||||
'ki': 'D620F48487B1B782DA55DF6717F08FF9',
|
||||
'msisdn': '7801'},
|
||||
{'dbus_path': '/sierra_1',
|
||||
'imsi': '901700000009002',
|
||||
'ki': 'D620F48487B1B782DA55DF6717F08FF9',
|
||||
'msisdn': '7802'}]}
|
||||
- expect validation success:
|
||||
Validation: OK
|
||||
- unknown item:
|
||||
--- -: ERR: ValueError: config item not known: 'bts[].unknown_item'
|
||||
Validation: Error
|
||||
- wrong type modems[].imsi:
|
||||
--- -: ERR: ValueError: config item is dict but should be a leaf node of type 'imsi': 'modems[].imsi'
|
||||
Validation: Error
|
||||
- invalid key with space:
|
||||
--- -: ERR: ValueError: invalid config key: 'imsi '
|
||||
Validation: Error
|
||||
- list instead of dict:
|
||||
--- -: ERR: ValueError: config item not known: 'a_dict[]'
|
||||
Validation: Error
|
||||
- unknown band:
|
||||
--- (item='bts[].trx[].band'): ERR: ValueError: Unknown GSM band: 'what'
|
||||
Validation: Error
|
||||
- invalid v4 addrs:
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: '1.2.3'
|
||||
Validation: Error
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: '1.2.3 .4'
|
||||
Validation: Error
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: '91.2.3'
|
||||
Validation: Error
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: 'go away'
|
||||
Validation: Error
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: ''
|
||||
Validation: Error
|
||||
--- (item='addr[]'): ERR: ValueError: Invalid IPv4 address: None
|
||||
Validation: Error
|
||||
- invalid hw addrs:
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: '1.2.3'
|
||||
Validation: Error
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: '0b:0c:0d:0e:0f:0g'
|
||||
Validation: Error
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: '0b:0c:0d:0e : 0f:0f'
|
||||
Validation: Error
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: 'go away'
|
||||
Validation: Error
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: ''
|
||||
Validation: Error
|
||||
--- (item='hwaddr[]'): ERR: ValueError: Invalid hardware address: None
|
||||
Validation: Error
|
||||
- invalid imsis:
|
||||
--- (item='imsi[]'): ERR: ValueError: Invalid IMSI: '99999999x9'
|
||||
Validation: Error
|
||||
--- (item='imsi[]'): ERR: ValueError: Invalid IMSI: '123 456 789 123'
|
||||
Validation: Error
|
||||
--- (item='imsi[]'): ERR: ValueError: Invalid IMSI: 'go away'
|
||||
Validation: Error
|
||||
--- (item='imsi[]'): ERR: ValueError: Invalid IMSI: ''
|
||||
Validation: Error
|
||||
--- (item='imsi[]'): ERR: ValueError: Invalid IMSI: None
|
||||
Validation: Error
|
|
@ -0,0 +1,115 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import _prep
|
||||
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
import pprint
|
||||
import copy
|
||||
|
||||
from osmo_gsm_tester import config, log, schema
|
||||
|
||||
example_config_file = 'test.cfg'
|
||||
example_config = os.path.join(_prep.script_dir, 'config_test', example_config_file)
|
||||
cfg = config.read(example_config)
|
||||
|
||||
pprint.pprint(cfg)
|
||||
|
||||
test_schema = {
|
||||
'modems[].dbus_path': schema.STR,
|
||||
'modems[].msisdn': schema.STR,
|
||||
'modems[].imsi': schema.IMSI,
|
||||
'modems[].ki': schema.STR,
|
||||
'bts[].name' : schema.STR,
|
||||
'bts[].type' : schema.STR,
|
||||
'bts[].addr' : schema.STR,
|
||||
'bts[].trx[].timeslots[]' : schema.STR,
|
||||
'bts[].trx[].band' : schema.BAND,
|
||||
'a_dict.foo' : schema.INT,
|
||||
'addr[]' : schema.IPV4,
|
||||
'hwaddr[]' : schema.HWADDR,
|
||||
'imsi[]' : schema.IMSI,
|
||||
'ki[]' : schema.KI,
|
||||
}
|
||||
|
||||
def val(which):
|
||||
try:
|
||||
schema.validate(which, test_schema)
|
||||
print('Validation: OK')
|
||||
except ValueError:
|
||||
log.log_exn()
|
||||
print('Validation: Error')
|
||||
|
||||
print('- expect validation success:')
|
||||
val(cfg)
|
||||
|
||||
print('- unknown item:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['bts'][0]['unknown_item'] = 'no'
|
||||
val(c)
|
||||
|
||||
print('- wrong type modems[].imsi:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['modems'][0]['imsi'] = {'no':'no'}
|
||||
val(c)
|
||||
|
||||
print('- invalid key with space:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['modems'][0]['imsi '] = '12345'
|
||||
val(c)
|
||||
|
||||
print('- list instead of dict:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['a_dict'] = [ 1, 2, 3 ]
|
||||
val(c)
|
||||
|
||||
print('- unknown band:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['bts'][0]['trx'][0]['band'] = 'what'
|
||||
val(c)
|
||||
|
||||
print('- invalid v4 addrs:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['addr'][3] = '1.2.3'
|
||||
val(c)
|
||||
c['addr'][3] = '1.2.3 .4'
|
||||
val(c)
|
||||
c['addr'][3] = '91.2.3'
|
||||
val(c)
|
||||
c['addr'][3] = 'go away'
|
||||
val(c)
|
||||
c['addr'][3] = ''
|
||||
val(c)
|
||||
c['addr'][3] = None
|
||||
val(c)
|
||||
|
||||
print('- invalid hw addrs:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['hwaddr'][3] = '1.2.3'
|
||||
val(c)
|
||||
c['hwaddr'][3] = '0b:0c:0d:0e:0f:0g'
|
||||
val(c)
|
||||
c['hwaddr'][3] = '0b:0c:0d:0e : 0f:0f'
|
||||
val(c)
|
||||
c['hwaddr'][3] = 'go away'
|
||||
val(c)
|
||||
c['hwaddr'][3] = ''
|
||||
val(c)
|
||||
c['hwaddr'][3] = None
|
||||
val(c)
|
||||
|
||||
print('- invalid imsis:')
|
||||
c = copy.deepcopy(cfg)
|
||||
c['imsi'][2] = '99999999x9'
|
||||
val(c)
|
||||
c['imsi'][2] = '123 456 789 123'
|
||||
val(c)
|
||||
c['imsi'][2] = 'go away'
|
||||
val(c)
|
||||
c['imsi'][2] = ''
|
||||
val(c)
|
||||
c['imsi'][2] = None
|
||||
val(c)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -37,3 +37,23 @@ BTS:
|
|||
- PDCH
|
||||
- PDCH
|
||||
band: GSM-1900
|
||||
|
||||
addr:
|
||||
- 0.0.0.0
|
||||
- 255.255.255.255
|
||||
- 10.11.12.13
|
||||
- 10.0.99.1
|
||||
- 192.168.0.14
|
||||
hwaddr:
|
||||
- ca:ff:ee:ba:aa:be
|
||||
- 00:00:00:00:00:00
|
||||
- CA:FF:EE:BA:AA:BE
|
||||
- cA:Ff:eE:Ba:aA:Be
|
||||
- ff:ff:ff:ff:ff:ff
|
||||
imsi:
|
||||
- '012345'
|
||||
- '012345678'
|
||||
- '012345678912345'
|
||||
ki:
|
||||
- 000102030405060708090a0b0c0d0e0f
|
||||
- 000102030405060708090a0b0c0d0e0f
|
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Based on http://stackoverflow.com/questions/22390064/use-dbus-to-just-send-a-message-in-python
|
||||
|
||||
# Python DBUS Test Server
|
||||
# runs until the Quit() method is called via DBUS
|
||||
|
||||
from gi.repository import GLib
|
||||
from pydbus import SessionBus
|
||||
|
||||
loop = GLib.MainLoop()
|
||||
|
||||
class MyDBUSService(object):
|
||||
"""
|
||||
<node>
|
||||
<interface name='net.lew21.pydbus.ClientServerExample'>
|
||||
<method name='Hello'>
|
||||
<arg type='s' name='response' direction='out'/>
|
||||
</method>
|
||||
<method name='EchoString'>
|
||||
<arg type='s' name='a' direction='in'/>
|
||||
<arg type='s' name='response' direction='out'/>
|
||||
</method>
|
||||
<method name='Quit'/>
|
||||
</interface>
|
||||
</node>
|
||||
"""
|
||||
|
||||
def Hello(self):
|
||||
"""returns the string 'Hello, World!'"""
|
||||
return "Hello, World!"
|
||||
|
||||
def EchoString(self, s):
|
||||
"""returns whatever is passed to it"""
|
||||
return s
|
||||
|
||||
def Quit(self):
|
||||
"""removes this object from the DBUS connection and exits"""
|
||||
loop.quit()
|
||||
|
||||
bus = SessionBus()
|
||||
bus.publish("net.lew21.pydbus.ClientServerExample", MyDBUSService())
|
||||
loop.run()
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Power on and off some modem on ofono, while running the glib main loop in a
|
||||
thread and receiving modem state changes by dbus signals.
|
||||
'''
|
||||
|
||||
from pydbus import SystemBus, Variant
|
||||
import time
|
||||
import threading
|
||||
import pprint
|
||||
|
||||
from gi.repository import GLib
|
||||
loop = GLib.MainLoop()
|
||||
|
||||
def propchanged(*args, **kwargs):
|
||||
print('-> PROP CHANGED: %r %r' % (args, kwargs))
|
||||
|
||||
class GlibMainloop(threading.Thread):
|
||||
def run(self):
|
||||
loop.run()
|
||||
|
||||
ml = GlibMainloop()
|
||||
ml.start()
|
||||
|
||||
try:
|
||||
bus = SystemBus()
|
||||
|
||||
print('\n- list modems')
|
||||
root = bus.get("org.ofono", '/')
|
||||
print(root.Introspect())
|
||||
modems = sorted(root.GetModems())
|
||||
pprint.pprint(modems)
|
||||
|
||||
first_modem_path = modems[0][0]
|
||||
print('\n- first modem %r' % first_modem_path)
|
||||
modem = bus.get("org.ofono", first_modem_path)
|
||||
modem.PropertyChanged.connect(propchanged)
|
||||
|
||||
print(modem.Introspect())
|
||||
print(modem.GetProperties())
|
||||
|
||||
print('\n- set Powered = True')
|
||||
modem.SetProperty('Powered', Variant('b', True))
|
||||
print('call returned')
|
||||
print(modem.GetProperties())
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
print('\n- set Powered = False')
|
||||
modem.SetProperty('Powered', Variant('b', False))
|
||||
print('call returned')
|
||||
|
||||
print(modem.GetProperties())
|
||||
finally:
|
||||
loop.quit()
|
||||
ml.join()
|
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Power on and off some modem on ofono, while running the glib main loop in a
|
||||
thread and receiving modem state changes by dbus signals.
|
||||
'''
|
||||
|
||||
from pydbus import SystemBus, Variant
|
||||
import time
|
||||
import pprint
|
||||
|
||||
from gi.repository import GLib
|
||||
glib_main_loop = GLib.MainLoop()
|
||||
glib_main_ctx = glib_main_loop.get_context()
|
||||
|
||||
def propchanged(*args, **kwargs):
|
||||
print('-> PROP CHANGED: %r %r' % (args, kwargs))
|
||||
|
||||
|
||||
def pump():
|
||||
global glib_main_ctx
|
||||
print('pump?')
|
||||
while glib_main_ctx.pending():
|
||||
print('* pump')
|
||||
glib_main_ctx.iteration()
|
||||
|
||||
def wait(condition):
|
||||
pump()
|
||||
while not condition():
|
||||
time.sleep(.1)
|
||||
pump()
|
||||
|
||||
bus = SystemBus()
|
||||
|
||||
print('\n- list modems')
|
||||
root = bus.get("org.ofono", '/')
|
||||
print(root.Introspect())
|
||||
modems = sorted(root.GetModems())
|
||||
pprint.pprint(modems)
|
||||
pump()
|
||||
|
||||
first_modem_path = modems[0][0]
|
||||
print('\n- first modem %r' % first_modem_path)
|
||||
modem = bus.get("org.ofono", first_modem_path)
|
||||
modem.PropertyChanged.connect(propchanged)
|
||||
|
||||
print(modem.Introspect())
|
||||
print(modem.GetProperties())
|
||||
|
||||
print('\n- set Powered = True')
|
||||
modem.SetProperty('Powered', Variant('b', True))
|
||||
print('call returned')
|
||||
print('- pump dbus events')
|
||||
pump()
|
||||
pump()
|
||||
print('sleep 1')
|
||||
time.sleep(1)
|
||||
pump()
|
||||
|
||||
|
||||
print('- modem properties:')
|
||||
print(modem.GetProperties())
|
||||
|
||||
|
||||
print('\n- set Powered = False')
|
||||
modem.SetProperty('Powered', Variant('b', False))
|
||||
print('call returned')
|
||||
|
||||
print(modem.GetProperties())
|
||||
|
||||
# vim: tabstop=4 shiftwidth=4 expandtab
|
|
@ -3,7 +3,7 @@ import time
|
|||
|
||||
import _prep
|
||||
|
||||
from osmo_gsm_tester.utils import FileLock
|
||||
from osmo_gsm_tester.util import FileLock
|
||||
|
||||
fl = FileLock('/tmp/lock_test', '_'.join(sys.argv[1:]))
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
- Testing global log functions
|
||||
01:02:03 tst <origin>: from log.log()
|
||||
01:02:03 tst <origin>: DBG: from log.dbg()
|
||||
01:02:03 tst <origin>: ERR: from log.err()
|
||||
- Testing log.Origin functions
|
||||
01:02:03 tst some-name(some='detail'): hello log
|
||||
01:02:03 tst some-name(some='detail'): ERR: hello err
|
||||
01:02:03 tst some-name(some='detail'): message {int=3, none=None, str='str\n', tuple=('foo', 42)}
|
||||
01:02:03 tst some-name(some='detail'): DBG: hello dbg
|
||||
- Testing log.style()
|
||||
01:02:03: only time
|
||||
tst: only category
|
||||
DBG: only level
|
||||
some-name(some='detail'): only origin
|
||||
only src [log_test.py:70]
|
||||
- Testing log.style_change()
|
||||
no log format
|
||||
01:02:03: add time
|
||||
but no time format
|
||||
01:02:03: DBG: add level
|
||||
01:02:03 tst: DBG: add category
|
||||
01:02:03 tst: DBG: add src [log_test.py:85]
|
||||
01:02:03 tst some-name(some='detail'): DBG: add origin [log_test.py:87]
|
||||
- Testing origin_width
|
||||
01:02:03 tst shortname: origin str set to 23 chars [log_test.py:94]
|
||||
01:02:03 tst very long name(and_some=(3, 'things', 'in a tuple'), some='details'): long origin str [log_test.py:96]
|
||||
01:02:03 tst very long name(and_some=(3, 'things', 'in a tuple'), some='details'): DBG: long origin str dbg [log_test.py:97]
|
||||
01:02:03 tst very long name(and_some=(3, 'things', 'in a tuple'), some='details'): ERR: long origin str err [log_test.py:98]
|
||||
- Testing log.Origin with omitted info
|
||||
01:02:03 tst LogTest: hello log, name implicit from class name [log_test.py:103]
|
||||
01:02:03 --- explicit_name: hello log, no category set [log_test.py:107]
|
||||
01:02:03 --- LogTest: hello log, no category nor name set [log_test.py:110]
|
||||
01:02:03 --- LogTest: DBG: debug message, no category nor name set [log_test.py:113]
|
||||
- Testing logging of Exceptions, tracing origins
|
||||
Not throwing an exception in 'with:' works.
|
||||
nested print just prints
|
||||
01:02:03 tst level3: nested log() [level1↪level2↪level3] [log_test.py:145]
|
||||
01:02:03 tst level2: nested l2 log() from within l3 scope [level1↪level2] [log_test.py:146]
|
||||
01:02:03 tst level3: ERR: ValueError: bork [level1↪level2↪level3] [log_test.py:147: raise ValueError('bork')]
|
||||
- Enter the same Origin context twice
|
||||
01:02:03 tst level2: nested log [level1↪level2] [log_test.py:159]
|
|
@ -29,6 +29,7 @@ from osmo_gsm_tester import log
|
|||
#log.targets[0].get_time_str = lambda: '01:02:03'
|
||||
fake_time = '01:02:03'
|
||||
log.style_change(time=True, time_fmt=fake_time)
|
||||
log.set_all_levels(None)
|
||||
|
||||
print('- Testing global log functions')
|
||||
log.log('<origin>', log.C_TST, 'from log.log()')
|
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
msisdn = '0000'
|
||||
|
||||
l = len(msisdn)
|
||||
next_msisdn = ('%%0%dd' % l) % (int(msisdn) + 1)
|
||||
print(next_msisdn)
|
|
@ -0,0 +1,33 @@
|
|||
run foo: DBG: cd '[TMP]'; PATH=[$PATH] foo.py arg1 arg2 [foo↪foo]
|
||||
run foo: DBG: [TMP]/stdout [foo↪foo]
|
||||
run foo: DBG: [TMP]/stderr [foo↪foo]
|
||||
run foo(pid=[PID]): Launched [foo(pid=[PID])↪foo(pid=[PID])]
|
||||
stdout:
|
||||
(launched: [DATETIME])
|
||||
foo stdout
|
||||
[[$0], 'arg1', 'arg2']
|
||||
|
||||
stderr:
|
||||
(launched: [DATETIME])
|
||||
foo stderr
|
||||
|
||||
run foo(pid=[PID]): Terminating (SIGINT)
|
||||
run foo(pid=[PID]): DBG: Cleanup
|
||||
run foo(pid=[PID]): Terminated {rc=1}
|
||||
result: 1
|
||||
stdout:
|
||||
(launched: [DATETIME])
|
||||
foo stdout
|
||||
[[$0], 'arg1', 'arg2']
|
||||
Exiting (stdout)
|
||||
|
||||
stderr:
|
||||
(launched: [DATETIME])
|
||||
foo stderr
|
||||
Traceback (most recent call last):
|
||||
File [$0], line [LINE], in <module>
|
||||
time.sleep(1)
|
||||
KeyboardInterrupt
|
||||
Exiting (stderr)
|
||||
|
||||
done.
|
|
@ -0,0 +1,7 @@
|
|||
PATH='[^']*' PATH=[$PATH]
|
||||
/tmp/[^/ '"]* [TMP]
|
||||
pid=[0-9]* pid=[PID]
|
||||
....-..-.._..:..:.. [DATETIME]
|
||||
'[^']*/selftest/process_test/foo.py' [$0]
|
||||
"[^"]*/selftest/process_test/foo.py" [$0]
|
||||
, line [0-9]* , line [LINE]
|
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import _prep
|
||||
import time
|
||||
import os
|
||||
|
||||
from osmo_gsm_tester import process, util, log
|
||||
|
||||
tmpdir = util.Dir(util.get_tempdir())
|
||||
|
||||
dollar_path = '%s:%s' % (
|
||||
os.path.join(os.getcwd(), 'process_test'),
|
||||
os.getenv('PATH'))
|
||||
|
||||
p = process.Process('foo', tmpdir, ('foo.py', 'arg1', 'arg2'),
|
||||
env={'PATH': dollar_path})
|
||||
|
||||
p.launch()
|
||||
time.sleep(.5)
|
||||
p.poll()
|
||||
print('stdout:')
|
||||
print(p.get_stdout())
|
||||
print('stderr:')
|
||||
print(p.get_stderr())
|
||||
|
||||
assert not p.terminated()
|
||||
p.terminate()
|
||||
assert p.terminated()
|
||||
print('result: %r' % p.result)
|
||||
|
||||
print('stdout:')
|
||||
print(p.get_stdout())
|
||||
print('stderr:')
|
||||
print(p.get_stderr())
|
||||
print('done.')
|
||||
|
||||
test_ssh = True
|
||||
test_ssh = False
|
||||
if test_ssh:
|
||||
# this part of the test requires ability to ssh to localhost
|
||||
p = process.RemoteProcess('localhost', '/tmp', 'ssh-test', tmpdir,
|
||||
('ls', '-al'))
|
||||
p.launch()
|
||||
p.wait()
|
||||
assert p.terminated()
|
||||
print('stdout:')
|
||||
print(p.get_stdout())
|
||||
print('stderr:')
|
||||
print(p.get_stderr())
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import atexit
|
||||
import time
|
||||
|
||||
|
||||
sys.stdout.write('foo stdout\n')
|
||||
sys.stderr.write('foo stderr\n')
|
||||
|
||||
print(repr(sys.argv))
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
def x():
|
||||
sys.stdout.write('Exiting (stdout)\n')
|
||||
sys.stdout.flush()
|
||||
sys.stderr.write('Exiting (stderr)\n')
|
||||
sys.stderr.flush()
|
||||
atexit.register(x)
|
||||
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import support
|
||||
import importlib.util
|
||||
|
||||
if hasattr(importlib.util, 'module_from_spec'):
|
||||
def run_test(path):
|
||||
print('py 3.5+')
|
||||
spec = importlib.util.spec_from_file_location("tests.script", path)
|
||||
spec.loader.exec_module( importlib.util.module_from_spec(spec) )
|
||||
else:
|
||||
def run_test(path):
|
||||
print('py 3.4-')
|
||||
from importlib.machinery import SourceFileLoader
|
||||
SourceFileLoader("tests.script", path).load_module()
|
||||
|
||||
path = './subdir/script.py'
|
||||
|
||||
support.config = 'specifics'
|
||||
run_test(path)
|
||||
|
||||
support.config = 'specifics2'
|
||||
run_test(path)
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
from support import *
|
||||
|
||||
print('hello')
|
||||
|
||||
def run(what):
|
||||
print(what)
|
||||
print(what)
|
||||
|
||||
run(config)
|
|
@ -0,0 +1,2 @@
|
|||
|
||||
config = None
|
|
@ -0,0 +1,18 @@
|
|||
This a real gsm test suite configured and ready to use.
|
||||
The only thing missing is a trial dir containing binaries.
|
||||
|
||||
If you have your trial with binary tar archives in ~/my_trial
|
||||
you can run the suite for example like this:
|
||||
|
||||
. ./env # point your environment at all the right places
|
||||
run_once.py ~/my_trial -s sms:trx
|
||||
|
||||
This combines the suites/sms test suite with the scenarios/trx choice of
|
||||
osmo-bts-trx and runs all tests in the 'sms' suite.
|
||||
|
||||
A ./state dir will be created to store the current osmo-gsm-tester state. If
|
||||
you prefer not to write to this dir, set up an own configuration pointing at a
|
||||
different path (see paths.conf: 'state_dir' and the env file). When there is
|
||||
no OSMO_GSM_TESTER_CONF set (from ./env), osmo-gsm-tester will instead look for
|
||||
conf files in several locations like ~/.config/osmo-gsm-tester,
|
||||
/usr/local/etc/osmo-gsm-tester, /etc/osmo-gsm-tester
|
|
@ -0,0 +1,31 @@
|
|||
nitb:
|
||||
net:
|
||||
mcc: 1
|
||||
mnc: 868
|
||||
short_name: osmo-gsm-tester
|
||||
long_name: osmo-gsm-tester
|
||||
auth_policy: closed
|
||||
encryption: a5 0
|
||||
|
||||
nitb_bts:
|
||||
location_area_code: 23
|
||||
base_station_id_code: 63
|
||||
stream_id: 255
|
||||
trx_list:
|
||||
- max_power_red: 22
|
||||
arfcn: 868
|
||||
timeslot_list:
|
||||
- phys_chan_config: CCCH+SDCCH4
|
||||
- phys_chan_config: SDCCH8
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
- phys_chan_config: TCH_F/TCH_H/PDCH
|
||||
|
||||
osmo_bts_sysmo:
|
||||
ipa_unit_id: 1123
|
||||
|
||||
osmo_bts_trx:
|
||||
ipa_unit_id: 1124
|
|
@ -0,0 +1,4 @@
|
|||
OSMO_GSM_TESTER_SRC="$(readlink -f ../../src)"
|
||||
export PYTHONPATH="$OSMO_GSM_TESTER_SRC"
|
||||
export PATH="$OSMO_GSM_TESTER_SRC:$PATH"
|
||||
export OSMO_GSM_TESTER_CONF="$PWD"
|
|
@ -0,0 +1,3 @@
|
|||
state_dir: './state'
|
||||
suites_dir: './suites'
|
||||
scenarios_dir: './scenarios'
|
|
@ -0,0 +1,139 @@
|
|||
# all hardware and interfaces available to this osmo-gsm-tester
|
||||
|
||||
nitb_iface:
|
||||
- addr: 127.0.0.10
|
||||
- addr: 127.0.0.11
|
||||
- addr: 127.0.0.12
|
||||
|
||||
bts:
|
||||
- label: sysmoBTS 1002
|
||||
type: sysmo
|
||||
unit_id: 1
|
||||
addr: 10.42.42.114
|
||||
band: GSM-1800
|
||||
|
||||
- label: octBTS 3000
|
||||
type: oct
|
||||
unit_id: 5
|
||||
addr: 10.42.42.115
|
||||
band: GSM-1800
|
||||
trx:
|
||||
- hwaddr: 00:0c:90:32:b5:8a
|
||||
|
||||
- label: Ettus B210
|
||||
type: osmotrx
|
||||
unit_id: 6
|
||||
addr: 10.42.42.116
|
||||
band: GSM-1800
|
||||
|
||||
- label: nanoBTS 1900
|
||||
type: nanobts
|
||||
unit_id: 1902
|
||||
addr: 10.42.42.190
|
||||
band: GSM-1900
|
||||
trx:
|
||||
- hwaddr: 00:02:95:00:41:b3
|
||||
|
||||
arfcn:
|
||||
- arfcn: 512
|
||||
band: GSM-1800
|
||||
- arfcn: 514
|
||||
band: GSM-1800
|
||||
- arfcn: 516
|
||||
band: GSM-1800
|
||||
- arfcn: 518
|
||||
band: GSM-1800
|
||||
- arfcn: 520
|
||||
band: GSM-1800
|
||||
|
||||
- arfcn: 540
|
||||
band: GSM-1900
|
||||
- arfcn: 542
|
||||
band: GSM-1900
|
||||
- arfcn: 544
|
||||
band: GSM-1900
|
||||
- arfcn: 546
|
||||
band: GSM-1900
|
||||
- arfcn: 548
|
||||
band: GSM-1900
|
||||
|
||||
modem:
|
||||
- label: m7801
|
||||
path: '/wavecom_0'
|
||||
imsi: 901700000007801
|
||||
ki: D620F48487B1B782DA55DF6717F08FF9
|
||||
|
||||
- label: m7802
|
||||
path: '/wavecom_1'
|
||||
imsi: 901700000007802
|
||||
ki: 47FDB2D55CE6A10A85ABDAD034A5B7B3
|
||||
|
||||
- label: m7803
|
||||
path: '/wavecom_2'
|
||||
imsi: 901700000007803
|
||||
ki: ABBED4C91417DF710F60675B6EE2C8D2
|
||||
|
||||
- label: m7804
|
||||
path: '/wavecom_3'
|
||||
imsi: 901700000007804
|
||||
ki: 8BA541179156F2BF0918CA3CFF9351B0
|
||||
|
||||
- label: m7805
|
||||
path: '/wavecom_4'
|
||||
imsi: 901700000007805
|
||||
ki: 82BEC24B5B50C9FAA69D17DEC0883A23
|
||||
|
||||
- label: m7806
|
||||
path: '/wavecom_5'
|
||||
imsi: 901700000007806
|
||||
ki: DAF6BD6A188F7A4F09866030BF0F723D
|
||||
|
||||
- label: m7807
|
||||
path: '/wavecom_6'
|
||||
imsi: 901700000007807
|
||||
ki: AEB411CFE39681A6352A1EAE4DDC9DBA
|
||||
|
||||
- label: m7808
|
||||
path: '/wavecom_7'
|
||||
imsi: 901700000007808
|
||||
ki: F5DEF8692B305D7A65C677CA9EEE09C4
|
||||
|
||||
- label: m7809
|
||||
path: '/wavecom_8'
|
||||
imsi: 901700000007809
|
||||
ki: A644F4503E812FD75329B1C8D625DA44
|
||||
|
||||
- label: m7810
|
||||
path: '/wavecom_9'
|
||||
imsi: 901700000007810
|
||||
ki: EF663BDF3477DCD18D3D2293A2BAED67
|
||||
|
||||
- label: m7811
|
||||
path: '/wavecom_10'
|
||||
imsi: 901700000007811
|
||||
ki: E88F37F048A86A9BC4D652539228C039
|
||||
|
||||
- label: m7812
|
||||
path: '/wavecom_11'
|
||||
imsi: 901700000007812
|
||||
ki: E8D940DD66FCF6F1CD2C0F8F8C45633D
|
||||
|
||||
- label: m7813
|
||||
path: '/wavecom_12'
|
||||
imsi: 901700000007813
|
||||
ki: DBF534700C10141C49F699B0419107E3
|
||||
|
||||
- label: m7814
|
||||
path: '/wavecom_13'
|
||||
imsi: 901700000007814
|
||||
ki: B36021DEB90C4EA607E408A92F3B024D
|
||||
|
||||
- label: m7815
|
||||
path: '/wavecom_14'
|
||||
imsi: 901700000007815
|
||||
ki: 1E209F6F839F9195778C4F96BE281A24
|
||||
|
||||
- label: m7816
|
||||
path: '/wavecom_15'
|
||||
imsi: 901700000007816
|
||||
ki: BF827D219E739DD189F6F59E60D6455C
|
|
@ -0,0 +1,3 @@
|
|||
resources:
|
||||
bts:
|
||||
- type: osmotrx
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python3
|
||||
from osmo_gsm_tester.test import *
|
||||
|
||||
print('use resources...')
|
||||
nitb = suite.nitb()
|
||||
bts = suite.bts()
|
||||
ms_mo = suite.modem()
|
||||
ms_mt = suite.modem()
|
||||
|
||||
print('start nitb and bts...')
|
||||
nitb.add_bts(bts)
|
||||
nitb.start()
|
||||
sleep(.1)
|
||||
assert nitb.running()
|
||||
bts.start()
|
||||
|
||||
nitb.add_subscriber(ms_mo)
|
||||
nitb.add_subscriber(ms_mt)
|
||||
|
||||
ms_mo.connect(nitb)
|
||||
ms_mt.connect(nitb)
|
||||
wait(nitb.subscriber_attached, ms_mo, ms_mt)
|
||||
|
||||
sms = ms_mo.sms_send(ms_mt.msisdn)
|
||||
sleep(3)
|
||||
wait(nitb.sms_received, sms)
|
|
@ -0,0 +1,10 @@
|
|||
resources:
|
||||
nitb_iface:
|
||||
- times: 1
|
||||
bts:
|
||||
- times: 1
|
||||
modem:
|
||||
- times: 2
|
||||
|
||||
defaults:
|
||||
timeout: 60s
|
|
@ -0,0 +1,207 @@
|
|||
- expect solutions:
|
||||
[0, 1, 2]
|
||||
[0, 1, 2]
|
||||
[1, 0, 2]
|
||||
[1, 2, 0]
|
||||
- expect failure to solve:
|
||||
The requested resource requirements are not solvable [[0, 2], [2], [0, 2]]
|
||||
- test removing a Resources list from itself
|
||||
ok, caused exception: RuntimeError('Refusing to drop a list of resources from itself. This is probably a bug where a list of Resources() should have been copied but is passed as-is. use Resources.clear() instead.',)
|
||||
- test removing a Resources list from one with the same list in it
|
||||
- test resources config and state dir:
|
||||
*** all resources:
|
||||
{'arfcn': [{'_hash': 'e620569450f8259b3f0212ec19c285dd07df063c',
|
||||
'arfcn': '512',
|
||||
'band': 'GSM-1800'},
|
||||
{'_hash': '022621e513c5a5bf33b77430a1e9c886be676fa1',
|
||||
'arfcn': '514',
|
||||
'band': 'GSM-1800'},
|
||||
{'_hash': '3199abf375a1dd899e554e9d63a552e06d7f38bf',
|
||||
'arfcn': '516',
|
||||
'band': 'GSM-1800'},
|
||||
{'_hash': '57aa7bd1da62495f2857ae6b859193dd592a0a02',
|
||||
'arfcn': '518',
|
||||
'band': 'GSM-1800'},
|
||||
{'_hash': '53dd2e2682b736f427abd2ce59a9a50ca8130678',
|
||||
'arfcn': '520',
|
||||
'band': 'GSM-1800'},
|
||||
{'_hash': '31687a5e6d5140a4b3877606ca5f18244f11d706',
|
||||
'arfcn': '540',
|
||||
'band': 'GSM-1900'},
|
||||
{'_hash': '1def43a5c88a83cdb21279eacab0679ea08ffaf3',
|
||||
'arfcn': '542',
|
||||
'band': 'GSM-1900'},
|
||||
{'_hash': '1d6e3b08a3861fd4d748f111295ec5a93ecd3d23',
|
||||
'arfcn': '544',
|
||||
'band': 'GSM-1900'},
|
||||
{'_hash': '8fb36927de15466fcdbee01f7f65704c312cb36c',
|
||||
'arfcn': '546',
|
||||
'band': 'GSM-1900'},
|
||||
{'_hash': 'dc9ce027a257da087f31a5bc1ee6b4abd2637369',
|
||||
'arfcn': '548',
|
||||
'band': 'GSM-1900'}],
|
||||
'bts': [{'_hash': 'a7c6d2ebaeb139e8c2e7d45c3495d046d7439007',
|
||||
'addr': '10.42.42.114',
|
||||
'band': 'GSM-1800',
|
||||
'label': 'sysmoBTS 1002',
|
||||
'type': 'sysmo',
|
||||
'unit_id': '1'},
|
||||
{'_hash': '02540ab9eb556056a0b4d28443bc9f4793f6d549',
|
||||
'addr': '10.42.42.115',
|
||||
'band': 'GSM-1800',
|
||||
'label': 'octBTS 3000',
|
||||
'trx': [{'hwaddr': '00:0c:90:32:b5:8a'}],
|
||||
'type': 'oct',
|
||||
'unit_id': '5'},
|
||||
{'_hash': '556c954d475d12cf0dc622c0df5743cac5543fa0',
|
||||
'addr': '10.42.42.190',
|
||||
'band': 'GSM-1900',
|
||||
'label': 'nanoBTS 1900',
|
||||
'trx': [{'hwaddr': '00:02:95:00:41:b3'}],
|
||||
'type': 'nanobts',
|
||||
'unit_id': '1902'}],
|
||||
'modem': [{'_hash': '19c69e45aa090fb511446bd00797690aa82ff52f',
|
||||
'imsi': '901700000007801',
|
||||
'ki': 'D620F48487B1B782DA55DF6717F08FF9',
|
||||
'label': 'm7801',
|
||||
'path': '/wavecom_0'},
|
||||
{'_hash': 'e1a46516a1fb493b2617ab14fc1693a9a45ec254',
|
||||
'imsi': '901700000007802',
|
||||
'ki': '47FDB2D55CE6A10A85ABDAD034A5B7B3',
|
||||
'label': 'm7802',
|
||||
'path': '/wavecom_1'},
|
||||
{'_hash': '4fe91500a309782bb0fd8ac6fc827834089f8b00',
|
||||
'imsi': '901700000007803',
|
||||
'ki': 'ABBED4C91417DF710F60675B6EE2C8D2',
|
||||
'label': 'm7803',
|
||||
'path': '/wavecom_2'},
|
||||
{'_hash': 'c895badf0c2faaa4a997cd9f2313b5ebda7486e4',
|
||||
'imsi': '901700000007804',
|
||||
'ki': '8BA541179156F2BF0918CA3CFF9351B0',
|
||||
'label': 'm7804',
|
||||
'path': '/wavecom_3'},
|
||||
{'_hash': '60f182abed05adb530e3d06d88cc47703b65d7d8',
|
||||
'imsi': '901700000007805',
|
||||
'ki': '82BEC24B5B50C9FAA69D17DEC0883A23',
|
||||
'label': 'm7805',
|
||||
'path': '/wavecom_4'},
|
||||
{'_hash': 'd1f0fbf089a4bf32dd566af956d23b89e3d60821',
|
||||
'imsi': '901700000007806',
|
||||
'ki': 'DAF6BD6A188F7A4F09866030BF0F723D',
|
||||
'label': 'm7806',
|
||||
'path': '/wavecom_5'},
|
||||
{'_hash': '2445e3b5949d15f4351c0db1d3f3f593f9d73aa5',
|
||||
'imsi': '901700000007807',
|
||||
'ki': 'AEB411CFE39681A6352A1EAE4DDC9DBA',
|
||||
'label': 'm7807',
|
||||
'path': '/wavecom_6'},
|
||||
{'_hash': '80247388b2ca382382c4aec678102355b7922965',
|
||||
'imsi': '901700000007808',
|
||||
'ki': 'F5DEF8692B305D7A65C677CA9EEE09C4',
|
||||
'label': 'm7808',
|
||||
'path': '/wavecom_7'},
|
||||
{'_hash': '5b9e4e117a8889430542d22a9693e7b999362856',
|
||||
'imsi': '901700000007809',
|
||||
'ki': 'A644F4503E812FD75329B1C8D625DA44',
|
||||
'label': 'm7809',
|
||||
'path': '/wavecom_8'},
|
||||
{'_hash': '219a7abb057050eef3ce4b99c487f32bbaae9a41',
|
||||
'imsi': '901700000007810',
|
||||
'ki': 'EF663BDF3477DCD18D3D2293A2BAED67',
|
||||
'label': 'm7810',
|
||||
'path': '/wavecom_9'},
|
||||
{'_hash': '75d45c2d975b893da34c7cae827c25a2039cecd2',
|
||||
'imsi': '901700000007811',
|
||||
'ki': 'E88F37F048A86A9BC4D652539228C039',
|
||||
'label': 'm7811',
|
||||
'path': '/wavecom_10'},
|
||||
{'_hash': '1777362f556b249a5c1d6a83110704dbd037bc20',
|
||||
'imsi': '901700000007812',
|
||||
'ki': 'E8D940DD66FCF6F1CD2C0F8F8C45633D',
|
||||
'label': 'm7812',
|
||||
'path': '/wavecom_11'},
|
||||
{'_hash': '21d7eb4b0c782e004821a9f7f778891c93956924',
|
||||
'imsi': '901700000007813',
|
||||
'ki': 'DBF534700C10141C49F699B0419107E3',
|
||||
'label': 'm7813',
|
||||
'path': '/wavecom_12'},
|
||||
{'_hash': 'f53e4e79bdbc63eb2845de671007d4f733f28409',
|
||||
'imsi': '901700000007814',
|
||||
'ki': 'B36021DEB90C4EA607E408A92F3B024D',
|
||||
'label': 'm7814',
|
||||
'path': '/wavecom_13'},
|
||||
{'_hash': 'df1abec7704ebc89b2c062a69bd299cf3663ed9e',
|
||||
'imsi': '901700000007815',
|
||||
'ki': '1E209F6F839F9195778C4F96BE281A24',
|
||||
'label': 'm7815',
|
||||
'path': '/wavecom_14'},
|
||||
{'_hash': '11df1e4c7708157e5b89020c757763f58d6e610b',
|
||||
'imsi': '901700000007816',
|
||||
'ki': 'BF827D219E739DD189F6F59E60D6455C',
|
||||
'label': 'm7816',
|
||||
'path': '/wavecom_15'}],
|
||||
'nitb_iface': [{'_hash': 'cde1debf28f07f94f92c761b4b7c6bf35785ced4',
|
||||
'addr': '10.42.42.1'},
|
||||
{'_hash': 'fd103b22c7cf2480d609150e06f4bbd92ac78d8c',
|
||||
'addr': '10.42.42.2'},
|
||||
{'_hash': '1c614d6210c551d142aadca8f25e1534ebb2a70f',
|
||||
'addr': '10.42.42.3'}]}
|
||||
*** end: all resources
|
||||
|
||||
- request some resources
|
||||
--- (want='nitb_iface'): DBG: Looking for 1 x nitb_iface , candidates: 3
|
||||
--- (want='arfcn'): DBG: Looking for 2 x arfcn , candidates: 10
|
||||
--- (want='bts'): DBG: Looking for 2 x bts , candidates: 3
|
||||
--- (want='modem'): DBG: Looking for 2 x modem , candidates: 16
|
||||
~~~ currently reserved:
|
||||
arfcn:
|
||||
- _hash: e620569450f8259b3f0212ec19c285dd07df063c
|
||||
_reserved_by: testowner-123-1490837279
|
||||
arfcn: '512'
|
||||
band: GSM-1800
|
||||
- _hash: 022621e513c5a5bf33b77430a1e9c886be676fa1
|
||||
_reserved_by: testowner-123-1490837279
|
||||
arfcn: '514'
|
||||
band: GSM-1800
|
||||
bts:
|
||||
- _hash: a7c6d2ebaeb139e8c2e7d45c3495d046d7439007
|
||||
_reserved_by: testowner-123-1490837279
|
||||
addr: 10.42.42.114
|
||||
band: GSM-1800
|
||||
label: sysmoBTS 1002
|
||||
type: sysmo
|
||||
unit_id: '1'
|
||||
- _hash: 02540ab9eb556056a0b4d28443bc9f4793f6d549
|
||||
_reserved_by: testowner-123-1490837279
|
||||
addr: 10.42.42.115
|
||||
band: GSM-1800
|
||||
label: octBTS 3000
|
||||
trx:
|
||||
- hwaddr: 00:0c:90:32:b5:8a
|
||||
type: oct
|
||||
unit_id: '5'
|
||||
modem:
|
||||
- _hash: 19c69e45aa090fb511446bd00797690aa82ff52f
|
||||
_reserved_by: testowner-123-1490837279
|
||||
imsi: '901700000007801'
|
||||
ki: D620F48487B1B782DA55DF6717F08FF9
|
||||
label: m7801
|
||||
path: /wavecom_0
|
||||
- _hash: e1a46516a1fb493b2617ab14fc1693a9a45ec254
|
||||
_reserved_by: testowner-123-1490837279
|
||||
imsi: '901700000007802'
|
||||
ki: 47FDB2D55CE6A10A85ABDAD034A5B7B3
|
||||
label: m7802
|
||||
path: /wavecom_1
|
||||
nitb_iface:
|
||||
- _hash: cde1debf28f07f94f92c761b4b7c6bf35785ced4
|
||||
_reserved_by: testowner-123-1490837279
|
||||
addr: 10.42.42.1
|
||||
|
||||
~~~ end: currently reserved
|
||||
|
||||
~~~ currently reserved:
|
||||
{}
|
||||
|
||||
~~~ end: currently reserved
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import tempfile
|
||||
import os
|
||||
import pprint
|
||||
import shutil
|
||||
import atexit
|
||||
import _prep
|
||||
from osmo_gsm_tester import config, log, resource, util
|
||||
|
||||
workdir = util.get_tempdir()
|
||||
|
||||
# override config locations to make sure we use only the test conf
|
||||
config.ENV_CONF = './conf'
|
||||
|
||||
log.get_process_id = lambda: '123-1490837279'
|
||||
|
||||
print('- expect solutions:')
|
||||
pprint.pprint(
|
||||
resource.solve([ [0, 1, 2],
|
||||
[0, 1, 2],
|
||||
[0, 1, 2] ]) )
|
||||
pprint.pprint(
|
||||
resource.solve([ [0, 1, 2],
|
||||
[0, 1],
|
||||
[0, 2] ]) ) # == [0, 1, 2]
|
||||
pprint.pprint(
|
||||
resource.solve([ [0, 1, 2],
|
||||
[0],
|
||||
[0, 2] ]) ) # == [1, 0, 2]
|
||||
pprint.pprint(
|
||||
resource.solve([ [0, 1, 2],
|
||||
[2],
|
||||
[0, 2] ]) ) # == [1, 2, 0]
|
||||
|
||||
print('- expect failure to solve:')
|
||||
try:
|
||||
resource.solve([ [0, 2],
|
||||
[2],
|
||||
[0, 2] ])
|
||||
assert False
|
||||
except resource.NoResourceExn as e:
|
||||
print(e)
|
||||
|
||||
print('- test removing a Resources list from itself')
|
||||
try:
|
||||
r = resource.Resources({ 'k': [ {'a': 1, 'b': 2}, {'a': 3, 'b': 4}, ],
|
||||
'i': [ {'c': 1, 'd': 2}, {'c': 3, 'd': 4}, ] })
|
||||
r.drop(r)
|
||||
assert False
|
||||
except RuntimeError as e:
|
||||
print('ok, caused exception: %r' % e)
|
||||
|
||||
print('- test removing a Resources list from one with the same list in it')
|
||||
r = resource.Resources({ 'k': [ {'a': 1, 'b': 2}, {'a': 3, 'b': 4}, ],
|
||||
'i': [ {'c': 1, 'd': 2}, {'c': 3, 'd': 4}, ] })
|
||||
r.drop({ 'k': r.get('k'), 'i': r.get('i') })
|
||||
assert not r
|
||||
|
||||
print('- test resources config and state dir:')
|
||||
resources_conf = os.path.join(_prep.script_dir, 'resource_test', 'etc',
|
||||
'resources.conf')
|
||||
|
||||
state_dir = config.get_state_dir()
|
||||
rrfile = state_dir.child(resource.RESERVED_RESOURCES_FILE)
|
||||
|
||||
pool = resource.ResourcesPool()
|
||||
|
||||
print('*** all resources:')
|
||||
pprint.pprint(pool.all_resources)
|
||||
print('*** end: all resources\n')
|
||||
|
||||
print('- request some resources')
|
||||
want = {
|
||||
'nitb_iface': [ { 'times': 1 } ],
|
||||
'bts': [ { 'type': 'sysmo', 'times': 1 }, { 'type': 'oct', 'times': 1 } ],
|
||||
'arfcn': [ { 'band': 'GSM-1800', 'times': 2 } ],
|
||||
'modem': [ { 'times': 2 } ],
|
||||
}
|
||||
|
||||
origin = log.Origin('testowner')
|
||||
|
||||
resources = pool.reserve(origin, want)
|
||||
|
||||
print('~~~ currently reserved:')
|
||||
with open(rrfile, 'r') as f:
|
||||
print(f.read())
|
||||
print('~~~ end: currently reserved\n')
|
||||
|
||||
resources.free()
|
||||
|
||||
print('~~~ currently reserved:')
|
||||
with open(rrfile, 'r') as f:
|
||||
print(f.read())
|
||||
print('~~~ end: currently reserved\n')
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,40 @@
|
|||
- non-existing suite dir
|
||||
--- -: ERR: RuntimeError: Suite not found: 'does_not_exist' in ./suite_test
|
||||
- no suite.conf
|
||||
cnf empty_dir: DBG: reading suite.conf [empty_dir↪empty_dir]
|
||||
--- ./suite_test/empty_dir/suite.conf: ERR: FileNotFoundError: [Errno 2] No such file or directory: './suite_test/empty_dir/suite.conf' [empty_dir↪./suite_test/empty_dir/suite.conf]
|
||||
- valid suite dir
|
||||
cnf test_suite: DBG: reading suite.conf [test_suite↪test_suite]
|
||||
defaults:
|
||||
timeout: 60s
|
||||
resources:
|
||||
bts:
|
||||
- times: '1'
|
||||
modem:
|
||||
- times: '2'
|
||||
nitb_iface:
|
||||
- times: '1'
|
||||
|
||||
- run hello world test
|
||||
tst test_suite: reserving resources...
|
||||
--- (want='nitb_iface'): DBG: Looking for 1 x nitb_iface , candidates: 3
|
||||
--- (want='modem'): DBG: Looking for 2 x modem , candidates: 16
|
||||
--- (want='bts'): DBG: Looking for 1 x bts , candidates: 3
|
||||
tst hello_world.py: START [test_suite↪hello_world.py]
|
||||
tst hello_world.py:3: hello world [test_suite↪hello_world.py:3]
|
||||
tst hello_world.py:4: I am 'test_suite' / 'hello_world.py:4' [test_suite↪hello_world.py:4]
|
||||
tst hello_world.py:5: one [test_suite↪hello_world.py:5]
|
||||
tst hello_world.py:5: two [test_suite↪hello_world.py:5]
|
||||
tst hello_world.py:5: three [test_suite↪hello_world.py:5]
|
||||
tst hello_world.py: PASS [test_suite↪hello_world.py]
|
||||
pass: all 1 tests passed.
|
||||
|
||||
- a test with an error
|
||||
tst test_error.py: START [test_suite↪test_error.py] [suite.py:96]
|
||||
tst test_error.py:3: I am 'test_suite' / 'test_error.py:3' [test_suite↪test_error.py:3] [test_error.py:3]
|
||||
tst test_error.py:5: FAIL [test_suite↪test_error.py:5] [suite.py:108]
|
||||
tst test_error.py:5: ERR: AssertionError: [test_suite↪test_error.py:5] [test_error.py:5: assert False]
|
||||
FAIL: 1 of 1 tests failed:
|
||||
test_error.py
|
||||
|
||||
- graceful exit.
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import _prep
|
||||
from osmo_gsm_tester import log, suite, config
|
||||
|
||||
config.ENV_CONF = os.path.join(os.getcwd(), 'conf')
|
||||
|
||||
#log.style_change(trace=True)
|
||||
|
||||
print('- non-existing suite dir')
|
||||
assert(log.run_logging_exceptions(suite.load, 'does_not_exist') == None)
|
||||
|
||||
print('- no suite.conf')
|
||||
assert(log.run_logging_exceptions(suite.load, 'empty_dir') == None)
|
||||
|
||||
print('- valid suite dir')
|
||||
example_suite_dir = os.path.join('test_suite')
|
||||
s_def = suite.load(example_suite_dir)
|
||||
assert(isinstance(s_def, suite.SuiteDefinition))
|
||||
print(config.tostr(s_def.conf))
|
||||
|
||||
print('- run hello world test')
|
||||
s = suite.SuiteRun(None, s_def)
|
||||
results = s.run_tests('hello_world.py')
|
||||
print(str(results))
|
||||
|
||||
log.style_change(src=True)
|
||||
#log.style_change(trace=True)
|
||||
print('\n- a test with an error')
|
||||
results = s.run_tests('test_error.py')
|
||||
print(str(results))
|
||||
|
||||
print('\n- graceful exit.')
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,5 @@
|
|||
from osmo_gsm_tester.test import *
|
||||
|
||||
print('hello world')
|
||||
print('I am %r / %r' % (suite.name(), test.name()))
|
||||
print('one\ntwo\nthree')
|
|
@ -0,0 +1,10 @@
|
|||
resources:
|
||||
nitb_iface:
|
||||
- times: 1
|
||||
bts:
|
||||
- times: 1
|
||||
modem:
|
||||
- times: 2
|
||||
|
||||
defaults:
|
||||
timeout: 60s
|
|
@ -0,0 +1,5 @@
|
|||
from osmo_gsm_tester.test import *
|
||||
|
||||
print('I am %r / %r' % (suite.name(), test.name()))
|
||||
|
||||
assert False
|
|
@ -1,8 +1,6 @@
|
|||
- Testing: fill a config file with values
|
||||
cnf Templates DBG: rendering osmo-nitb.cfg.tmpl
|
||||
!
|
||||
! OpenBSC configuration saved from vty
|
||||
!
|
||||
cnf Templates: DBG: rendering osmo-nitb.cfg.tmpl [osmo-nitb.cfg.tmpl↪Templates]
|
||||
! Configuration rendered by osmo-gsm-tester
|
||||
password foo
|
||||
!
|
||||
log stderr
|
||||
|
@ -14,19 +12,19 @@ log stderr
|
|||
!
|
||||
line vty
|
||||
no login
|
||||
bind val_vty_bind_ip
|
||||
bind val_nitb_iface_addr
|
||||
!
|
||||
e1_input
|
||||
e1_line 0 driver ipa
|
||||
ipa bind val_abis_bind_ip
|
||||
ipa bind val_nitb_iface_addr
|
||||
network
|
||||
network country code val_mcc
|
||||
mobile network code val_mnc
|
||||
short name val_net_name_short
|
||||
long name val_net_name_long
|
||||
auth policy val_net_auth_policy
|
||||
short name val_short_name
|
||||
long name val_long_name
|
||||
auth policy val_auth_policy
|
||||
location updating reject cause 13
|
||||
encryption a5 val_encryption
|
||||
encryption val_encryption
|
||||
neci 1
|
||||
rrlp mode none
|
||||
mm info 1
|
||||
|
@ -48,15 +46,6 @@ network
|
|||
timer t3117 0
|
||||
timer t3119 0
|
||||
timer t3141 0
|
||||
smpp
|
||||
local-tcp-ip val_smpp_bind_ip 2775
|
||||
system-id test
|
||||
policy closed
|
||||
esme test
|
||||
password test
|
||||
default-route
|
||||
ctrl
|
||||
bind val_ctrl_bind_ip
|
||||
bts 0
|
||||
type val_type_bts0
|
||||
band val_band_bts0
|
||||
|
@ -145,6 +134,15 @@ ctrl
|
|||
phys_chan_config val_phys_chan_config_2
|
||||
timeslot 3
|
||||
phys_chan_config val_phys_chan_config_3
|
||||
smpp
|
||||
local-tcp-ip val_nitb_iface_addr 2775
|
||||
system-id test
|
||||
policy closed
|
||||
esme test
|
||||
password test
|
||||
default-route
|
||||
ctrl
|
||||
bind val_nitb_iface_addr
|
||||
|
||||
- Testing: expect to fail on invalid templates dir
|
||||
sucess: setting non-existing templates dir raised RuntimeError
|
|
@ -23,7 +23,7 @@ mock_bts = {
|
|||
'band': 'val_band',
|
||||
'location_area_code': 'val_bts.location_area_code',
|
||||
'base_station_id_code': 'val_bts.base_station_id_code',
|
||||
'unit_id': 'val_bts.unit_id',
|
||||
'ipa_unit_id': 'val_bts.unit_id',
|
||||
'stream_id': 'val_bts.stream_id',
|
||||
'trx_list': (
|
||||
dict(arfcn='val_trx_arfcn_trx0',
|
||||
|
@ -47,19 +47,19 @@ def clone_mod(d, val_ext):
|
|||
mock_bts0 = clone_mod(mock_bts, '_bts0')
|
||||
mock_bts1 = clone_mod(mock_bts, '_bts1')
|
||||
|
||||
vals = dict(
|
||||
vty_bind_ip='val_vty_bind_ip',
|
||||
abis_bind_ip='val_abis_bind_ip',
|
||||
mcc='val_mcc',
|
||||
mnc='val_mnc',
|
||||
net_name_short='val_net_name_short',
|
||||
net_name_long='val_net_name_long',
|
||||
net_auth_policy='val_net_auth_policy',
|
||||
encryption='val_encryption',
|
||||
smpp_bind_ip='val_smpp_bind_ip',
|
||||
ctrl_bind_ip='val_ctrl_bind_ip',
|
||||
bts_list=(mock_bts0, mock_bts1)
|
||||
)
|
||||
vals = dict(nitb=dict(
|
||||
net=dict(
|
||||
mcc='val_mcc',
|
||||
mnc='val_mnc',
|
||||
short_name='val_short_name',
|
||||
long_name='val_long_name',
|
||||
auth_policy='val_auth_policy',
|
||||
encryption='val_encryption',
|
||||
bts_list=(mock_bts0, mock_bts1)
|
||||
),
|
||||
),
|
||||
nitb_iface=dict(addr='val_nitb_iface_addr'),
|
||||
)
|
||||
|
||||
print(template.render('osmo-nitb.cfg', vals))
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
- make a few trials dirs
|
||||
[TMP]/first
|
||||
[TMP]/second
|
||||
[TMP]/third
|
||||
- fetch trial dirs in order
|
||||
[TMP]/first
|
||||
['taken']
|
||||
[TMP]/second
|
||||
[TMP]/third
|
||||
- no more trial dirs left
|
||||
None
|
||||
- test checksum verification
|
||||
- detect wrong checksum
|
||||
ok, got RuntimeError("Checksum mismatch for 'trial_test/invalid_checksum/file2' vs. 'trial_test/invalid_checksum/checksums.md5' line 2",)
|
||||
- detect missing file
|
||||
ok, got RuntimeError("File listed in checksums file but missing in trials dir: 'trial_test/missing_file/file2' vs. 'trial_test/missing_file/checksums.md5' line 2",)
|
|
@ -0,0 +1,2 @@
|
|||
/tmp/[^/]* [TMP]
|
||||
....-..-.._..-..-.. [TIMESTAMP]
|
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
import _prep
|
||||
import os
|
||||
from osmo_gsm_tester import util
|
||||
from osmo_gsm_tester.trial import Trial
|
||||
|
||||
workdir = util.get_tempdir()
|
||||
|
||||
trials_dir = util.Dir(workdir)
|
||||
|
||||
print('- make a few trials dirs')
|
||||
print(trials_dir.mkdir('first'))
|
||||
time.sleep(1)
|
||||
print(trials_dir.mkdir('second'))
|
||||
time.sleep(1)
|
||||
print(trials_dir.mkdir('third'))
|
||||
|
||||
print('- fetch trial dirs in order')
|
||||
t = Trial.next(trials_dir)
|
||||
print(t)
|
||||
print(repr(sorted(t.dir.children())))
|
||||
print(Trial.next(trials_dir))
|
||||
print(Trial.next(trials_dir))
|
||||
|
||||
print('- no more trial dirs left')
|
||||
print(repr(Trial.next(trials_dir)))
|
||||
|
||||
print('- test checksum verification')
|
||||
d = util.Dir('trial_test')
|
||||
t = Trial(d.child('valid_checksums'))
|
||||
t.verify()
|
||||
|
||||
print('- detect wrong checksum')
|
||||
t = Trial(d.child('invalid_checksum'))
|
||||
try:
|
||||
t.verify()
|
||||
except RuntimeError as e:
|
||||
print('ok, got %r' % e)
|
||||
|
||||
print('- detect missing file')
|
||||
t = Trial(d.child('missing_file'))
|
||||
try:
|
||||
t.verify()
|
||||
except RuntimeError as e:
|
||||
print('ok, got %r' % e)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,3 @@
|
|||
5149d403009a139c7e085405ef762e1a file1
|
||||
3d709e89c8ce201e3c928eb917989aef file2
|
||||
60b91f1875424d3b4322b0fdd0529d5d file3
|
|
@ -0,0 +1 @@
|
|||
file1
|
|
@ -0,0 +1 @@
|
|||
no no no
|
|
@ -0,0 +1 @@
|
|||
file3
|
|
@ -0,0 +1,3 @@
|
|||
5149d403009a139c7e085405ef762e1a file1
|
||||
3d709e89c8ce201e3c928eb917989aef file2
|
||||
60b91f1875424d3b4322b0fdd0529d5d file3
|
|
@ -0,0 +1 @@
|
|||
file1
|
|
@ -0,0 +1 @@
|
|||
file3
|
|
@ -0,0 +1,3 @@
|
|||
5149d403009a139c7e085405ef762e1a file1
|
||||
3d709e89c8ce201e3c928eb917989aef file2
|
||||
60b91f1875424d3b4322b0fdd0529d5d file3
|
|
@ -0,0 +1 @@
|
|||
file1
|
|
@ -0,0 +1 @@
|
|||
file2
|
|
@ -0,0 +1 @@
|
|||
file3
|
|
@ -0,0 +1,5 @@
|
|||
- expect the same hashes on every test run
|
||||
a9993e364706816aba3e25717850c26c9cd0d89d
|
||||
356a192b7913b04c54574d18c28d46e6395428ab
|
||||
40bd001563085fc35165329ea1ff5c5ecbdbbeef
|
||||
c129b324aee662b04eccf68babba85851346dff9
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env python3
|
||||
import _prep
|
||||
|
||||
from osmo_gsm_tester.util import hash_obj
|
||||
|
||||
print('- expect the same hashes on every test run')
|
||||
print(hash_obj('abc'))
|
||||
print(hash_obj(1))
|
||||
print(hash_obj([1, 2, 3]))
|
||||
print(hash_obj({ 'k': [ {'a': 1, 'b': 2}, {'a': 3, 'b': 4}, ],
|
||||
'i': [ {'c': 1, 'd': 2}, {'c': 3, 'd': 4}, ] }))
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# osmo_gsm_tester: bts model specifics
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from . import log, schema, util
|
||||
|
||||
class TestContext(log.Origin):
|
||||
'''
|
||||
API to allow testing various BTS models.
|
||||
'''
|
||||
|
||||
def __init__(self,
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,104 @@
|
|||
# osmo_gsm_tester: specifics for running a sysmoBTS
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from . import log, config, util, template, process
|
||||
|
||||
class OsmoBtsTrx(log.Origin):
|
||||
suite_run = None
|
||||
nitb = None
|
||||
run_dir = None
|
||||
processes = None
|
||||
inst = None
|
||||
env = None
|
||||
|
||||
BIN_TRX = 'osmo-trx'
|
||||
BIN_BTS_TRX = 'osmo-bts-trx'
|
||||
BIN_PCU = 'osmo-pcu'
|
||||
|
||||
def __init__(self, suite_run, conf):
|
||||
self.suite_run = suite_run
|
||||
self.conf = conf
|
||||
self.set_name('osmo-bts-trx')
|
||||
self.set_log_category(log.C_RUN)
|
||||
self.processes = {}
|
||||
self.inst = None
|
||||
self.env = {}
|
||||
|
||||
def start(self):
|
||||
if self.nitb is None:
|
||||
raise RuntimeError('BTS needs to be added to a NITB before it can be started')
|
||||
self.suite_run.poll()
|
||||
|
||||
self.log('Starting to connect to', self.nitb)
|
||||
self.run_dir = util.Dir(self.suite_run.trial.get_run_dir().new_dir(self.name()))
|
||||
self.configure()
|
||||
|
||||
self.inst = util.Dir(os.path.abspath(self.suite_run.trial.get_inst('osmo-bts-trx')))
|
||||
self.env = { 'LD_LIBRARY_PATH': str(self.inst) }
|
||||
|
||||
self.launch_process(OsmoBtsTrx.BIN_TRX)
|
||||
self.launch_process(OsmoBtsTrx.BIN_BTS_TRX, '-r', '1', '-c', os.path.abspath(self.config_file))
|
||||
#self.launch_process(OsmoBtsTrx.BIN_PCU, '-r', '1')
|
||||
self.suite_run.poll()
|
||||
|
||||
def launch_process(self, binary_name, *args):
|
||||
if self.processes.get(binary_name) is not None:
|
||||
raise RuntimeError('Attempt to launch twice: %r' % binary_name)
|
||||
|
||||
binary = os.path.abspath(self.inst.child('bin', binary_name))
|
||||
run_dir = self.run_dir.new_dir(binary_name)
|
||||
if not os.path.isfile(binary):
|
||||
raise RuntimeError('Binary missing: %r' % binary)
|
||||
proc = process.Process(binary_name, run_dir,
|
||||
(binary,) + args,
|
||||
env=self.env)
|
||||
self.processes[binary_name] = proc
|
||||
self.suite_run.remember_to_stop(proc)
|
||||
proc.launch()
|
||||
|
||||
def configure(self):
|
||||
if self.nitb is None:
|
||||
raise RuntimeError('BTS needs to be added to a NITB before it can be configured')
|
||||
self.config_file = self.run_dir.new_file('osmo-bts-trx.cfg')
|
||||
self.dbg(config_file=self.config_file)
|
||||
|
||||
values = dict(osmo_bts_trx=config.get_defaults('osmo_bts_trx'))
|
||||
config.overlay(values, self.suite_run.config())
|
||||
config.overlay(values, dict(osmo_bts_trx=dict(oml_remote_ip=self.nitb.addr())))
|
||||
config.overlay(values, dict(osmo_bts_trx=self.conf))
|
||||
self.dbg(conf=values)
|
||||
|
||||
with open(self.config_file, 'w') as f:
|
||||
r = template.render('osmo-bts-trx.cfg', values)
|
||||
self.dbg(r)
|
||||
f.write(r)
|
||||
|
||||
def conf_for_nitb(self):
|
||||
values = config.get_defaults('nitb_bts')
|
||||
config.overlay(values, config.get_defaults('osmo_bts_sysmo'))
|
||||
config.overlay(values, self.conf)
|
||||
config.overlay(values, { 'type': 'sysmobts' })
|
||||
self.dbg(conf=values)
|
||||
return values
|
||||
|
||||
def set_nitb(self, nitb):
|
||||
self.nitb = nitb
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,69 @@
|
|||
# osmo_gsm_tester: specifics for running a sysmoBTS
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from . import log, config, util, template
|
||||
|
||||
class SysmoBts(log.Origin):
|
||||
suite_run = None
|
||||
nitb = None
|
||||
run_dir = None
|
||||
|
||||
def __init__(self, suite_run, conf):
|
||||
self.suite_run = suite_run
|
||||
self.conf = conf
|
||||
self.set_name('osmo-bts-sysmo')
|
||||
self.set_log_category(log.C_RUN)
|
||||
|
||||
def start(self):
|
||||
if self.nitb is None:
|
||||
raise RuntimeError('BTS needs to be added to a NITB before it can be started')
|
||||
self.log('Starting sysmoBTS to connect to', self.nitb)
|
||||
self.run_dir = util.Dir(self.suite_run.trial.get_run_dir().new_dir(self.name()))
|
||||
self.configure()
|
||||
self.err('SysmoBts is not yet implemented')
|
||||
|
||||
def configure(self):
|
||||
if self.nitb is None:
|
||||
raise RuntimeError('BTS needs to be added to a NITB before it can be configured')
|
||||
self.config_file = self.run_dir.new_file('osmo-bts-sysmo.cfg')
|
||||
self.dbg(config_file=self.config_file)
|
||||
|
||||
values = { 'osmo_bts_sysmo': config.get_defaults('osmo_bts_sysmo') }
|
||||
config.overlay(values, self.suite_run.config())
|
||||
config.overlay(values, { 'osmo_bts_sysmo': { 'oml_remote_ip': self.nitb.addr() } })
|
||||
config.overlay(values, { 'osmo_bts_sysmo': self.conf })
|
||||
self.dbg(conf=values)
|
||||
|
||||
with open(self.config_file, 'w') as f:
|
||||
r = template.render('osmo-bts-sysmo.cfg', values)
|
||||
self.dbg(r)
|
||||
f.write(r)
|
||||
|
||||
def conf_for_nitb(self):
|
||||
values = config.get_defaults('nitb_bts')
|
||||
config.overlay(values, config.get_defaults('osmo_bts_sysmo'))
|
||||
config.overlay(values, self.conf)
|
||||
config.overlay(values, { 'type': 'sysmobts' })
|
||||
self.dbg(conf=values)
|
||||
return values
|
||||
|
||||
def set_nitb(self, nitb):
|
||||
self.nitb = nitb
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -1,4 +1,4 @@
|
|||
# osmo_gsm_tester: read and validate config files
|
||||
# osmo_gsm_tester: read and manage config files and global config
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
|
@ -28,35 +28,124 @@
|
|||
#
|
||||
# JSON has too much braces and quotes to be easy to type
|
||||
#
|
||||
# YAML formatting is lean, but too powerful. The normal load() allows arbitrary
|
||||
# code execution. There is safe_load(). But YAML also allows several
|
||||
# alternative ways of formatting, better to have just one authoritative style.
|
||||
# Also it would be better to receive every setting as simple string rather than
|
||||
# e.g. an IMSI as an integer.
|
||||
# YAML formatting is lean, but:
|
||||
# - too powerful. The normal load() allows arbitrary code execution. There is
|
||||
# safe_load().
|
||||
# - allows several alternative ways of formatting, better to have just one
|
||||
# authoritative style.
|
||||
# - tries to detect types. It would be better to receive every setting as
|
||||
# simple string rather than e.g. an IMSI as an integer.
|
||||
# - e.g. an IMSI starting with a zero is interpreted as octal value, resulting
|
||||
# in super confusing error messages if the user merely forgets to quote it.
|
||||
# - does not tell me which line a config item came from, so no detailed error
|
||||
# message is possible.
|
||||
#
|
||||
# The Python ConfigParserShootout page has numerous contestants, but it we want
|
||||
# to use widely used, standardized parsing code without re-inventing the wheel.
|
||||
# The Python ConfigParserShootout page has numerous contestants, but many of
|
||||
# those seem to be not widely used / standardized or even tested.
|
||||
# https://wiki.python.org/moin/ConfigParserShootout
|
||||
#
|
||||
# The optimum would be a stripped down YAML format.
|
||||
# In the lack of that, we shall go with yaml.load_safe() + a round trip
|
||||
# (feeding back to itself), converting keys to lowercase and values to string.
|
||||
# There is no solution for octal interpretations nor config file source lines
|
||||
# unless, apparently, we implement our own config parser.
|
||||
|
||||
import yaml
|
||||
import re
|
||||
import os
|
||||
|
||||
from . import log
|
||||
from . import log, schema, util
|
||||
from .util import is_dict, is_list, Dir, get_tempdir
|
||||
|
||||
def read(path, schema=None):
|
||||
ENV_PREFIX = 'OSMO_GSM_TESTER_'
|
||||
ENV_CONF = os.getenv(ENV_PREFIX + 'CONF')
|
||||
|
||||
DEFAULT_CONFIG_LOCATIONS = [
|
||||
'.',
|
||||
os.path.join(os.getenv('HOME'), '.config', 'osmo_gsm_tester'),
|
||||
'/usr/local/etc/osmo_gsm_tester',
|
||||
'/etc/osmo_gsm_tester'
|
||||
]
|
||||
|
||||
PATHS_CONF = 'paths.conf'
|
||||
PATH_STATE_DIR = 'state_dir'
|
||||
PATH_SUITES_DIR = 'suites_dir'
|
||||
PATH_SCENARIOS_DIR = 'scenarios_dir'
|
||||
PATHS_SCHEMA = {
|
||||
PATH_STATE_DIR: schema.STR,
|
||||
PATH_SUITES_DIR: schema.STR,
|
||||
PATH_SCENARIOS_DIR: schema.STR,
|
||||
}
|
||||
|
||||
PATHS_TEMPDIR_STR = '$TEMPDIR'
|
||||
|
||||
PATHS = None
|
||||
|
||||
def get_config_file(basename, fail_if_missing=True):
|
||||
if ENV_CONF:
|
||||
locations = [ ENV_CONF ]
|
||||
else:
|
||||
locations = DEFAULT_CONFIG_LOCATIONS
|
||||
|
||||
for l in locations:
|
||||
p = os.path.join(l, basename)
|
||||
if os.path.isfile(p):
|
||||
return p
|
||||
if not fail_if_missing:
|
||||
return None
|
||||
raise RuntimeError('configuration file not found: %r in %r' % (basename,
|
||||
[os.path.abspath(p) for p in locations]))
|
||||
|
||||
def read_config_file(basename, validation_schema=None, if_missing_return=False):
|
||||
fail_if_missing = True
|
||||
if if_missing_return is not False:
|
||||
fail_if_missing = False
|
||||
path = get_config_file(basename, fail_if_missing=fail_if_missing)
|
||||
return read(path, validation_schema=validation_schema, if_missing_return=if_missing_return)
|
||||
|
||||
def get_configured_path(label, allow_unset=False):
|
||||
global PATHS
|
||||
|
||||
env_name = ENV_PREFIX + label.upper()
|
||||
env_path = os.getenv(env_name)
|
||||
if env_path:
|
||||
return env_path
|
||||
|
||||
if PATHS is None:
|
||||
paths_file = get_config_file(PATHS_CONF)
|
||||
PATHS = read(paths_file, PATHS_SCHEMA)
|
||||
p = PATHS.get(label)
|
||||
if p is None and not allow_unset:
|
||||
raise RuntimeError('missing configuration in %s: %r' % (PATHS_CONF, label))
|
||||
|
||||
if p.startswith(PATHS_TEMPDIR_STR):
|
||||
p = os.path.join(get_tempdir(), p[len(PATHS_TEMPDIR_STR):])
|
||||
return p
|
||||
|
||||
def get_state_dir():
|
||||
return Dir(get_configured_path(PATH_STATE_DIR))
|
||||
|
||||
def get_suites_dir():
|
||||
return Dir(get_configured_path(PATH_SUITES_DIR))
|
||||
|
||||
def get_scenarios_dir():
|
||||
return Dir(get_configured_path(PATH_SCENARIOS_DIR))
|
||||
|
||||
def read(path, validation_schema=None, if_missing_return=False):
|
||||
with log.Origin(path):
|
||||
if not os.path.isfile(path) and if_missing_return is not False:
|
||||
return if_missing_return
|
||||
with open(path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
config = _standardize(config)
|
||||
if schema:
|
||||
validate(config, schema)
|
||||
if validation_schema:
|
||||
schema.validate(config, validation_schema)
|
||||
return config
|
||||
|
||||
def write(path, config):
|
||||
with log.Origin(path):
|
||||
with open(path, 'w') as f:
|
||||
f.write(tostr(config))
|
||||
|
||||
def tostr(config):
|
||||
return _tostr(_standardize(config))
|
||||
|
||||
|
@ -74,88 +163,84 @@ def _standardize(config):
|
|||
config = yaml.safe_load(_tostr(_standardize_item(config)))
|
||||
return config
|
||||
|
||||
def get_defaults(for_kind):
|
||||
defaults = read_config_file('default.conf', if_missing_return={})
|
||||
return defaults.get(for_kind, {})
|
||||
|
||||
KEY_RE = re.compile('[a-zA-Z][a-zA-Z0-9_]*')
|
||||
def get_scenario(name, validation_schema=None):
|
||||
scenarios_dir = get_scenarios_dir()
|
||||
if not name.endswith('.conf'):
|
||||
name = name + '.conf'
|
||||
path = scenarios_dir.child(name)
|
||||
if not os.path.isfile(path):
|
||||
raise RuntimeError('No such scenario file: %r' % path)
|
||||
return read(path, validation_schema=validation_schema)
|
||||
|
||||
def band(val):
|
||||
if val in ('GSM-1800', 'GSM-1900'):
|
||||
def add(dest, src):
|
||||
if is_dict(dest):
|
||||
if not is_dict(src):
|
||||
raise ValueError('cannot add to dict a value of type: %r' % type(src))
|
||||
|
||||
for key, val in src.items():
|
||||
dest_val = dest.get(key)
|
||||
if dest_val is None:
|
||||
dest[key] = val
|
||||
else:
|
||||
with log.Origin(key=key):
|
||||
add(dest_val, val)
|
||||
return
|
||||
raise ValueError('Unknown GSM band: %r' % val)
|
||||
if is_list(dest):
|
||||
if not is_list(src):
|
||||
raise ValueError('cannot add to list a value of type: %r' % type(src))
|
||||
dest.extend(src)
|
||||
return
|
||||
if dest == src:
|
||||
return
|
||||
raise ValueError('cannot add dicts, conflicting items (values %r and %r)'
|
||||
% (dest, src))
|
||||
|
||||
INT = 'int'
|
||||
STR = 'str'
|
||||
BAND = 'band'
|
||||
SCHEMA_TYPES = {
|
||||
INT: int,
|
||||
STR: str,
|
||||
BAND: band,
|
||||
}
|
||||
def combine(dest, src):
|
||||
if is_dict(dest):
|
||||
if not is_dict(src):
|
||||
raise ValueError('cannot combine dict with a value of type: %r' % type(src))
|
||||
|
||||
def is_dict(l):
|
||||
return isinstance(l, dict)
|
||||
for key, val in src.items():
|
||||
dest_val = dest.get(key)
|
||||
if dest_val is None:
|
||||
dest[key] = val
|
||||
else:
|
||||
with log.Origin(key=key):
|
||||
combine(dest_val, val)
|
||||
return
|
||||
if is_list(dest):
|
||||
if not is_list(src):
|
||||
raise ValueError('cannot combine list with a value of type: %r' % type(src))
|
||||
for i in range(len(src)):
|
||||
with log.Origin(idx=i):
|
||||
combine(dest[i], src[i])
|
||||
return
|
||||
if dest == src:
|
||||
return
|
||||
raise ValueError('cannot combine dicts, conflicting items (values %r and %r)'
|
||||
% (dest, src))
|
||||
|
||||
def is_list(l):
|
||||
return isinstance(l, (list, tuple))
|
||||
def overlay(dest, src):
|
||||
if is_dict(dest):
|
||||
if not is_dict(src):
|
||||
raise ValueError('cannot combine dict with a value of type: %r' % type(src))
|
||||
|
||||
def validate(config, schema):
|
||||
'''Make sure the given config dict adheres to the schema.
|
||||
The schema is a dict of 'dict paths' in dot-notation with permitted
|
||||
value type. All leaf nodes are validated, nesting dicts are implicit.
|
||||
|
||||
validate( { 'a': 123, 'b': { 'b1': 'foo', 'b2': [ 1, 2, 3 ] } },
|
||||
{ 'a': int,
|
||||
'b.b1': str,
|
||||
'b.b2[]': int } )
|
||||
|
||||
Raise a ValueError in case the schema is violated.
|
||||
'''
|
||||
|
||||
def validate_item(path, value, schema):
|
||||
want_type = schema.get(path)
|
||||
|
||||
if is_list(value):
|
||||
if want_type:
|
||||
raise ValueError('config item is a list, should be %r: %r' % (want_type, path))
|
||||
path = path + '[]'
|
||||
want_type = schema.get(path)
|
||||
|
||||
if not want_type:
|
||||
if is_dict(value):
|
||||
nest(path, value, schema)
|
||||
return
|
||||
if is_list(value) and value:
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
raise ValueError('config item not known: %r' % path)
|
||||
|
||||
if want_type not in SCHEMA_TYPES:
|
||||
raise ValueError('unknown type %r at %r' % (want_type, path))
|
||||
|
||||
if is_dict(value):
|
||||
raise ValueError('config item is dict but should be a leaf node of type %r: %r'
|
||||
% (want_type, path))
|
||||
|
||||
if is_list(value):
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
|
||||
with log.Origin(item=path):
|
||||
type_validator = SCHEMA_TYPES.get(want_type)
|
||||
type_validator(value)
|
||||
|
||||
def nest(parent_path, config, schema):
|
||||
if parent_path:
|
||||
parent_path = parent_path + '.'
|
||||
else:
|
||||
parent_path = ''
|
||||
for k,v in config.items():
|
||||
if not KEY_RE.fullmatch(k):
|
||||
raise ValueError('invalid config key: %r' % k)
|
||||
path = parent_path + k
|
||||
validate_item(path, v, schema)
|
||||
|
||||
nest(None, config, schema)
|
||||
for key, val in src.items():
|
||||
dest_val = dest.get(key)
|
||||
with log.Origin(key=key):
|
||||
dest[key] = overlay(dest_val, val)
|
||||
return dest
|
||||
if is_list(dest):
|
||||
if not is_list(src):
|
||||
raise ValueError('cannot combine list with a value of type: %r' % type(src))
|
||||
for i in range(len(src)):
|
||||
with log.Origin(idx=i):
|
||||
dest[i] = overlay(dest[i], src[i])
|
||||
return dest
|
||||
return src
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -29,15 +29,25 @@ L_LOG = 20
|
|||
L_DBG = 10
|
||||
L_TRACEBACK = 'TRACEBACK'
|
||||
|
||||
LEVEL_STRS = {
|
||||
'err': L_ERR,
|
||||
'log': L_LOG,
|
||||
'dbg': L_DBG,
|
||||
}
|
||||
|
||||
C_NET = 'net'
|
||||
C_RUN = 'run'
|
||||
C_TST = 'tst'
|
||||
C_CNF = 'cnf'
|
||||
C_BUS = 'bus'
|
||||
C_DEFAULT = '---'
|
||||
|
||||
LONG_DATEFMT = '%Y-%m-%d_%H:%M:%S'
|
||||
DATEFMT = '%H:%M:%S'
|
||||
|
||||
# may be overridden by regression tests
|
||||
get_process_id = lambda: '%d-%d' % (os.getpid(), time.time())
|
||||
|
||||
class LogTarget:
|
||||
do_log_time = None
|
||||
do_log_category = None
|
||||
|
@ -47,6 +57,7 @@ class LogTarget:
|
|||
do_log_src = None
|
||||
origin_width = None
|
||||
origin_fmt = None
|
||||
all_levels = None
|
||||
|
||||
# redirected by logging test
|
||||
get_time_str = lambda self: time.strftime(self.log_time_fmt)
|
||||
|
@ -101,10 +112,16 @@ class LogTarget:
|
|||
'set global logging log.L_* level for a given log.C_* category'
|
||||
self.category_levels[category] = level
|
||||
|
||||
def set_all_levels(self, level):
|
||||
self.all_levels = level
|
||||
|
||||
def is_enabled(self, category, level):
|
||||
if level == L_TRACEBACK:
|
||||
return self.do_log_traceback
|
||||
is_level = self.category_levels.get(category)
|
||||
if self.all_levels is not None:
|
||||
is_level = self.all_levels
|
||||
else:
|
||||
is_level = self.category_levels.get(category)
|
||||
if is_level is None:
|
||||
is_level = L_LOG
|
||||
if level < is_level:
|
||||
|
@ -128,19 +145,26 @@ class LogTarget:
|
|||
if self.do_log_category:
|
||||
log_pre.append(category)
|
||||
|
||||
deeper_origins = ''
|
||||
if self.do_log_origin:
|
||||
if origin is None:
|
||||
name = '-'
|
||||
elif isinstance(origin, Origins):
|
||||
name = origin[-1]
|
||||
if len(origin) > 1:
|
||||
deeper_origins = str(origin)
|
||||
elif isinstance(origin, str):
|
||||
name = origin or None
|
||||
elif hasattr(origin, '_name'):
|
||||
name = origin._name
|
||||
elif hasattr(origin, 'name'):
|
||||
name = origin.name()
|
||||
if not name:
|
||||
name = str(origin.__class__.__name__)
|
||||
log_pre.append(self.origin_fmt.format(name))
|
||||
|
||||
if self.do_log_level and level != L_LOG:
|
||||
log_pre.append(level_str(level) or ('loglevel=' + str(level)) )
|
||||
loglevel = '%s: ' % (level_str(level) or ('loglevel=' + str(level)))
|
||||
else:
|
||||
loglevel = ''
|
||||
|
||||
log_line = [str(m) for m in messages]
|
||||
|
||||
|
@ -150,11 +174,15 @@ class LogTarget:
|
|||
(', '.join(['%s=%r' % (k,v)
|
||||
for k,v in sorted(named_items.items())])))
|
||||
|
||||
if deeper_origins:
|
||||
log_line.append(' [%s]' % deeper_origins)
|
||||
|
||||
if self.do_log_src and src:
|
||||
log_line.append(' [%s]' % str(src))
|
||||
|
||||
log_str = '%s%s%s' % (' '.join(log_pre),
|
||||
log_str = '%s%s%s%s' % (' '.join(log_pre),
|
||||
': ' if log_pre else '',
|
||||
loglevel,
|
||||
' '.join(log_line))
|
||||
|
||||
self.log_sink(log_str.strip() + '\n')
|
||||
|
@ -173,6 +201,9 @@ def level_str(level):
|
|||
|
||||
def _log_all_targets(origin, category, level, src, messages, named_items=None):
|
||||
global targets
|
||||
|
||||
if origin is None:
|
||||
origin = Origin._global_current_origin
|
||||
if isinstance(src, int):
|
||||
src = get_src_from_caller(src + 1)
|
||||
for target in targets:
|
||||
|
@ -188,6 +219,20 @@ def get_src_from_tb(tb, levels_up=1):
|
|||
f = os.path.basename(f)
|
||||
return '%s:%s: %s' % (f, l, c)
|
||||
|
||||
def get_line_for_src(src_path):
|
||||
etype, exception, tb = sys.exc_info()
|
||||
if tb:
|
||||
ftb = traceback.extract_tb(tb)
|
||||
for f,l,m,c in ftb:
|
||||
if f.endswith(src_path):
|
||||
return l
|
||||
|
||||
for frame in stack():
|
||||
caller = getframeinfo(frame[0])
|
||||
if caller.filename.endswith(src_path):
|
||||
return caller.lineno
|
||||
return None
|
||||
|
||||
|
||||
class Origin:
|
||||
'''
|
||||
|
@ -198,13 +243,14 @@ class Origin:
|
|||
This will log 'my name' as an origin for the Problem.
|
||||
'''
|
||||
|
||||
_global_current_origin = None
|
||||
_global_id = None
|
||||
|
||||
_log_category = None
|
||||
_src = None
|
||||
_name = None
|
||||
_log_line_buf = None
|
||||
_prev_stdout = None
|
||||
_origin_id = None
|
||||
|
||||
_global_current_origin = None
|
||||
_parent_origin = None
|
||||
|
||||
def __init__(self, *name_items, category=None, **detail_items):
|
||||
|
@ -226,7 +272,17 @@ class Origin:
|
|||
self._name = name + details
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
return self._name or self.__class__.__name__
|
||||
|
||||
__str__ = name
|
||||
__repr__ = name
|
||||
|
||||
def origin_id(self):
|
||||
if not self._origin_id:
|
||||
if not Origin._global_id:
|
||||
Origin._global_id = get_process_id()
|
||||
self._origin_id = '%s-%s' % (self.name(), Origin._global_id)
|
||||
return self._origin_id
|
||||
|
||||
def set_log_category(self, category):
|
||||
self._log_category = category
|
||||
|
@ -249,11 +305,9 @@ class Origin:
|
|||
log_exn(self, self._log_category, exc_info)
|
||||
|
||||
def __enter__(self):
|
||||
if self._parent_origin is not None:
|
||||
if not self.set_child_of(Origin._global_current_origin):
|
||||
return
|
||||
if Origin._global_current_origin == self:
|
||||
return
|
||||
self._parent_origin, Origin._global_current_origin = Origin._global_current_origin, self
|
||||
Origin._global_current_origin = self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
rc = None
|
||||
|
@ -263,10 +317,54 @@ class Origin:
|
|||
return rc
|
||||
|
||||
def redirect_stdout(self):
|
||||
return contextlib.redirect_stdout(self)
|
||||
return contextlib.redirect_stdout(SafeRedirectStdout(self))
|
||||
|
||||
def gather_origins(self):
|
||||
origins = Origins()
|
||||
origins.add(self)
|
||||
origin = self._parent_origin
|
||||
if origin is None and Origin._global_current_origin is not None:
|
||||
origin = Origin._global_current_origin
|
||||
while origin is not None:
|
||||
origins.add(origin)
|
||||
origin = origin._parent_origin
|
||||
return origins
|
||||
|
||||
def set_child_of(self, parent_origin):
|
||||
# avoid loops
|
||||
if self._parent_origin is not None:
|
||||
return False
|
||||
if parent_origin == self:
|
||||
return False
|
||||
self._parent_origin = parent_origin
|
||||
return True
|
||||
|
||||
class LineInfo(Origin):
|
||||
def __init__(self, src_file, *name_items, **detail_items):
|
||||
self.src_file = src_file
|
||||
self.set_name(*name_items, **detail_items)
|
||||
|
||||
def name(self):
|
||||
l = get_line_for_src(self.src_file)
|
||||
if l is not None:
|
||||
return '%s:%s' % (self._name, l)
|
||||
return super().name()
|
||||
|
||||
class SafeRedirectStdout:
|
||||
'''
|
||||
To be able to use 'print' in test scripts, this is used to redirect stdout
|
||||
to a test class' log() function. However, it turns out doing that breaks
|
||||
python debugger sessions -- it uses extended features of stdout, and will
|
||||
fail dismally if it finds this wrapper in sys.stdout. Luckily, overriding
|
||||
__getattr__() to return the original sys.__stdout__ attributes for anything
|
||||
else than write() makes the debugger session work nicely again!
|
||||
'''
|
||||
_log_line_buf = None
|
||||
|
||||
def __init__(self, origin):
|
||||
self._origin = origin
|
||||
|
||||
def write(self, message):
|
||||
'to redirect stdout to the log'
|
||||
lines = message.splitlines()
|
||||
if not lines:
|
||||
return
|
||||
|
@ -276,21 +374,12 @@ class Origin:
|
|||
if not message.endswith('\n'):
|
||||
self._log_line_buf = lines[-1]
|
||||
lines = lines[:-1]
|
||||
origins = self.gather_origins()
|
||||
origins = self._origin.gather_origins()
|
||||
for line in lines:
|
||||
self._log(L_LOG, (line,), origins=origins)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def gather_origins(self):
|
||||
origins = Origins()
|
||||
origin = self
|
||||
while origin:
|
||||
origins.add(origin)
|
||||
origin = origin._parent_origin
|
||||
return str(origins)
|
||||
self._origin._log(L_LOG, (line,), origins=origins)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return sys.__stdout__.__getattribute__(name)
|
||||
|
||||
|
||||
def dbg(origin, category, *messages, **named_items):
|
||||
|
@ -337,7 +426,7 @@ def log_exn(origin=None, category=None, exc_info=None):
|
|||
|
||||
# if there are origins recorded with the Exception, prefer that
|
||||
if hasattr(exception, 'origins'):
|
||||
origin = str(exception.origins)
|
||||
origin = exception.origins
|
||||
|
||||
# if there is a category recorded with the Exception, prefer that
|
||||
if hasattr(exception, 'category'):
|
||||
|
@ -363,16 +452,23 @@ class Origins(list):
|
|||
if origin is not None:
|
||||
self.add(origin)
|
||||
def add(self, origin):
|
||||
if hasattr(origin, '_name'):
|
||||
origin_str = origin._name
|
||||
if hasattr(origin, 'name'):
|
||||
origin_str = origin.name()
|
||||
else:
|
||||
origin_str = str(origin)
|
||||
origin_str = repr(origin)
|
||||
if origin_str is None:
|
||||
raise RuntimeError('origin_str is None for %r' % origin)
|
||||
self.insert(0, origin_str)
|
||||
def __str__(self):
|
||||
return '->'.join(self)
|
||||
return '↪'.join(self)
|
||||
|
||||
|
||||
|
||||
def set_all_levels(level):
|
||||
global targets
|
||||
for target in targets:
|
||||
target.set_all_levels(level)
|
||||
|
||||
def set_level(category, level):
|
||||
global targets
|
||||
for target in targets:
|
||||
|
|
|
@ -0,0 +1,117 @@
|
|||
# osmo_gsm_tester: DBUS client to talk to ofono
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from . import log
|
||||
|
||||
from pydbus import SystemBus, Variant
|
||||
import time
|
||||
import pprint
|
||||
|
||||
from gi.repository import GLib
|
||||
glib_main_loop = GLib.MainLoop()
|
||||
glib_main_ctx = glib_main_loop.get_context()
|
||||
bus = SystemBus()
|
||||
|
||||
def poll():
|
||||
global glib_main_ctx
|
||||
while glib_main_ctx.pending():
|
||||
glib_main_ctx.iteration()
|
||||
|
||||
def get(path):
|
||||
global bus
|
||||
return bus.get('org.ofono', path)
|
||||
|
||||
def list_modems():
|
||||
root = get('/')
|
||||
return sorted(root.GetModems())
|
||||
|
||||
|
||||
class Modem(log.Origin):
|
||||
'convenience for ofono Modem interaction'
|
||||
msisdn = None
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.path = conf.get('path')
|
||||
self.set_name(self.path)
|
||||
self.set_log_category(log.C_BUS)
|
||||
self._dbus_obj = None
|
||||
self._interfaces_was = set()
|
||||
poll()
|
||||
|
||||
def set_msisdn(self, msisdn):
|
||||
self.msisdn = msisdn
|
||||
|
||||
def imsi(self):
|
||||
return self.conf.get('imsi')
|
||||
|
||||
def ki(self):
|
||||
return self.conf.get('ki')
|
||||
|
||||
def set_powered(self, on=True):
|
||||
self.dbus_obj.SetProperty('Powered', Variant('b', on))
|
||||
|
||||
def dbus_obj(self):
|
||||
if self._dbus_obj is not None:
|
||||
return self._dbus_obj
|
||||
self._dbus_obj = get(self.path)
|
||||
self._dbus_obj.PropertyChanged.connect(self._on_property_change)
|
||||
self._on_interfaces_change(self.properties().get('Interfaces'))
|
||||
|
||||
def properties(self):
|
||||
return self.dbus_obj().GetProperties()
|
||||
|
||||
def _on_property_change(self, name, value):
|
||||
if name == 'Interfaces':
|
||||
self._on_interfaces_change(value)
|
||||
|
||||
def _on_interfaces_change(self, interfaces_now):
|
||||
now = set(interfaces_now)
|
||||
additions = now - self._interfaces_was
|
||||
removals = self._interfaces_was - now
|
||||
self._interfaces_was = now
|
||||
for iface in removals:
|
||||
with log.Origin('modem.disable(%s)' % iface):
|
||||
try:
|
||||
self._on_interface_disabled(iface)
|
||||
except:
|
||||
self.log_exn()
|
||||
for iface in additions:
|
||||
with log.Origin('modem.enable(%s)' % iface):
|
||||
try:
|
||||
self._on_interface_enabled(iface)
|
||||
except:
|
||||
self.log_exn()
|
||||
|
||||
def _on_interface_enabled(self, interface_name):
|
||||
self.dbg('Interface enabled:', interface_name)
|
||||
# todo: when the messages service comes up, connect a message reception signal
|
||||
|
||||
def _on_interface_disabled(self, interface_name):
|
||||
self.dbg('Interface disabled:', interface_name)
|
||||
|
||||
def connect(self, nitb):
|
||||
'set the modem up to connect to MCC+MNC from NITB config'
|
||||
self.log('connect to', nitb)
|
||||
|
||||
def sms_send(self, msisdn):
|
||||
self.log('send sms to MSISDN', msisdn)
|
||||
return 'todo'
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,88 @@
|
|||
|
||||
# osmo_gsm_tester: specifics for running a sysmoBTS
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from . import log
|
||||
|
||||
class CtrlInterfaceExn(Exception):
|
||||
pass
|
||||
|
||||
class OsmoCtrl(log.Origin):
|
||||
|
||||
def __init__(self, host, port):
|
||||
self.set_name('Ctrl', host=host, port=port)
|
||||
self.set_log_category(log.C_BUS)
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.sck = None
|
||||
|
||||
def prefix_ipa_ctrl_header(self, data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
s = struct.pack(">HBB", len(data)+1, 0xee, 0)
|
||||
return s + data
|
||||
|
||||
def remove_ipa_ctrl_header(self, data):
|
||||
if (len(data) < 4):
|
||||
raise CtrlInterfaceExn("Answer too short!")
|
||||
(plen, ipa_proto, osmo_proto) = struct.unpack(">HBB", data[:4])
|
||||
if (plen + 3 > len(data)):
|
||||
self.err('Warning: Wrong payload length', expected=plen, got=len(data)-3)
|
||||
if (ipa_proto != 0xee or osmo_proto != 0):
|
||||
raise CtrlInterfaceExn("Wrong protocol in answer!")
|
||||
return data[4:plen+3], data[plen+3:]
|
||||
|
||||
def connect(self):
|
||||
self.dbg('Connecting')
|
||||
self.sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.sck.connect((self.host, self.port))
|
||||
self.sck.setblocking(1)
|
||||
|
||||
def disconnect(self):
|
||||
self.dbg('Disconnecting')
|
||||
if self.sck is not None:
|
||||
self.sck.close()
|
||||
|
||||
def _send(self, data):
|
||||
self.dbg('Sending', data=data)
|
||||
data = self.prefix_ipa_ctrl_header(data)
|
||||
self.sck.send(data)
|
||||
|
||||
def receive(self, length = 1024):
|
||||
return self.sck.recv(length)
|
||||
|
||||
def do_set(self, var, value, id=0):
|
||||
setmsg = "SET %s %s %s" %(id, var, value)
|
||||
self._send(setmsg)
|
||||
|
||||
def do_get(self, var, id=0):
|
||||
getmsg = "GET %s %s" %(id, var)
|
||||
self._send(getmsg)
|
||||
|
||||
def __enter__(self):
|
||||
self.connect()
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
self.disconnect()
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,155 @@
|
|||
# osmo_gsm_tester: specifics for running an osmo-nitb
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
|
||||
from . import log, util, config, template, process, osmo_ctrl
|
||||
|
||||
class OsmoNitb(log.Origin):
|
||||
suite_run = None
|
||||
nitb_iface = None
|
||||
run_dir = None
|
||||
config_file = None
|
||||
process = None
|
||||
bts = None
|
||||
|
||||
def __init__(self, suite_run, nitb_iface):
|
||||
self.suite_run = suite_run
|
||||
self.nitb_iface = nitb_iface
|
||||
self.set_log_category(log.C_RUN)
|
||||
self.set_name('osmo-nitb_%s' % nitb_iface.get('addr'))
|
||||
self.bts = []
|
||||
|
||||
def start(self):
|
||||
self.log('Starting osmo-nitb')
|
||||
self.run_dir = util.Dir(self.suite_run.trial.get_run_dir().new_dir(self.name()))
|
||||
self.configure()
|
||||
inst = util.Dir(self.suite_run.trial.get_inst('openbsc'))
|
||||
binary = os.path.abspath(inst.child('bin', 'osmo-nitb'))
|
||||
if not os.path.isfile(binary):
|
||||
raise RuntimeError('Binary missing: %r' % binary)
|
||||
env = { 'LD_LIBRARY_PATH': os.path.abspath(str(inst)) }
|
||||
self.dbg(run_dir=self.run_dir, binary=binary, env=env)
|
||||
self.process = process.Process(self.name(), self.run_dir,
|
||||
(binary, '-c',
|
||||
os.path.abspath(self.config_file)),
|
||||
env=env)
|
||||
self.suite_run.remember_to_stop(self.process)
|
||||
self.process.launch()
|
||||
|
||||
def configure(self):
|
||||
self.config_file = self.run_dir.new_file('osmo-nitb.cfg')
|
||||
self.dbg(config_file=self.config_file)
|
||||
|
||||
values = dict(nitb=config.get_defaults('nitb'))
|
||||
config.overlay(values, self.suite_run.config())
|
||||
config.overlay(values, dict(nitb_iface=self.nitb_iface))
|
||||
|
||||
bts_list = []
|
||||
for bts in self.bts:
|
||||
bts_list.append(bts.conf_for_nitb())
|
||||
config.overlay(values, dict(nitb=dict(net=dict(bts_list=bts_list))))
|
||||
|
||||
self.dbg(conf=values)
|
||||
|
||||
with open(self.config_file, 'w') as f:
|
||||
r = template.render('osmo-nitb.cfg', values)
|
||||
self.dbg(r)
|
||||
f.write(r)
|
||||
|
||||
def addr(self):
|
||||
return self.nitb_iface.get('addr')
|
||||
|
||||
def add_bts(self, bts):
|
||||
self.bts.append(bts)
|
||||
bts.set_nitb(self)
|
||||
|
||||
def add_subscriber(self, modem, msisdn=None):
|
||||
if msisdn is None:
|
||||
msisdn = self.suite_run.resources_pool.next_msisdn(modem)
|
||||
modem.set_msisdn(msisdn)
|
||||
self.log('Add subscriber', msisdn=msisdn, imsi=modem.imsi())
|
||||
with self:
|
||||
OsmoNitbCtrl(self).add_subscriber(modem.imsi(), msisdn, modem.ki())
|
||||
|
||||
def subscriber_attached(self, *modems):
|
||||
return all([self.imsi_attached(m.imsi()) for m in modems])
|
||||
|
||||
def imsi_attached(self, imsi):
|
||||
return random.choice((True, False))
|
||||
|
||||
def sms_received(self, sms):
|
||||
return random.choice((True, False))
|
||||
|
||||
def running(self):
|
||||
return not self.process.terminated()
|
||||
|
||||
|
||||
class OsmoNitbCtrl(log.Origin):
|
||||
PORT = 4249
|
||||
SUBSCR_MODIFY_VAR = 'subscriber-modify-v1'
|
||||
SUBSCR_MODIFY_REPLY_RE = re.compile("SET_REPLY (\d+) %s OK" % SUBSCR_MODIFY_VAR)
|
||||
SUBSCR_LIST_ACTIVE_VAR = 'subscriber-list-active-v1'
|
||||
|
||||
def __init__(self, nitb):
|
||||
self.nitb = nitb
|
||||
self.set_name('CTRL(%s:%d)' % (self.nitb.addr(), OsmoNitbCtrl.PORT))
|
||||
self.set_child_of(nitb)
|
||||
|
||||
def ctrl(self):
|
||||
return osmo_ctrl.OsmoCtrl(self.nitb.addr(), OsmoNitbCtrl.PORT)
|
||||
|
||||
def add_subscriber(self, imsi, msisdn, ki=None, algo=None):
|
||||
created = False
|
||||
if ki and not algo:
|
||||
algo = 'comp128v1'
|
||||
|
||||
if algo:
|
||||
value = '%s,%s,%s,%s' % (imsi,msisdn,algo,ki)
|
||||
else:
|
||||
value = '%s,%s' % (imsi, msisdn)
|
||||
|
||||
with osmo_ctrl.OsmoCtrl(self.nitb.addr(), OsmoNitbCtrl.PORT) as ctrl:
|
||||
ctrl.do_set(OsmoNitbCtrl.SUBSCR_MODIFY_VAR, value)
|
||||
data = ctrl.receive()
|
||||
(answer, data) = ctrl.remove_ipa_ctrl_header(data)
|
||||
answer_str = answer.decode('utf-8')
|
||||
res = OsmoNitbCtrl.SUBSCR_MODIFY_REPLY_RE.match(answer_str)
|
||||
if not res:
|
||||
raise RuntimeError('Cannot create subscriber %r (answer=%r)' % (imsi, answer_str))
|
||||
self.dbg('Created subscriber', imsi=imsi, msisdn=msisdn)
|
||||
return True
|
||||
|
||||
def subscriber_list_active(self):
|
||||
var = 'subscriber-list-active-v1'
|
||||
aslist_str = ""
|
||||
with osmo_ctrl.OsmoCtrl(self.nitb.addr(), OsmoNitbCtrl.PORT) as ctrl:
|
||||
self.ctrl.do_get(OsmoNitbCtrl.SUBSCR_LIST_ACTIVE_VAR)
|
||||
# this looks like it doesn't work for long data. It's legacy code from the old osmo-gsm-tester.
|
||||
data = self.ctrl.receive()
|
||||
while (len(data) > 0):
|
||||
(answer, data) = self.ctrl.remove_ipa_ctrl_header(data)
|
||||
answer = answer.replace('\n', ' ')
|
||||
aslist_str = answer
|
||||
return aslist_str
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -17,7 +17,190 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import time
|
||||
import subprocess
|
||||
import signal
|
||||
|
||||
from . import log
|
||||
from .util import Dir
|
||||
|
||||
class Process(log.Origin):
|
||||
|
||||
process_obj = None
|
||||
outputs = None
|
||||
result = None
|
||||
killed = None
|
||||
|
||||
def __init__(self, name, run_dir, popen_args, **popen_kwargs):
|
||||
self.name_str = name
|
||||
self.set_name(name)
|
||||
self.set_log_category(log.C_RUN)
|
||||
self.run_dir = run_dir
|
||||
self.popen_args = popen_args
|
||||
self.popen_kwargs = popen_kwargs
|
||||
self.outputs = {}
|
||||
if not isinstance(self.run_dir, Dir):
|
||||
self.run_dir = Dir(os.path.abspath(str(self.run_dir)))
|
||||
|
||||
def set_env(self, key, value):
|
||||
env = self.popen_kwargs.get('env') or {}
|
||||
env[key] = value
|
||||
self.popen_kwargs['env'] = env
|
||||
|
||||
def make_output_log(self, name):
|
||||
'''
|
||||
create a non-existing log output file in run_dir to pipe stdout and
|
||||
stderr from this process to.
|
||||
'''
|
||||
path = self.run_dir.new_child(name)
|
||||
f = open(path, 'w')
|
||||
self.dbg(path)
|
||||
f.write('(launched: %s)\n' % time.strftime(log.LONG_DATEFMT))
|
||||
f.flush()
|
||||
self.outputs[name] = (path, f)
|
||||
return f
|
||||
|
||||
def launch(self):
|
||||
with self:
|
||||
|
||||
self.dbg('cd %r; %s %s' % (
|
||||
os.path.abspath(str(self.run_dir)),
|
||||
' '.join(['%s=%r'%(k,v) for k,v in self.popen_kwargs.get('env', {}).items()]),
|
||||
' '.join(self.popen_args)))
|
||||
|
||||
self.process_obj = subprocess.Popen(
|
||||
self.popen_args,
|
||||
stdout=self.make_output_log('stdout'),
|
||||
stderr=self.make_output_log('stderr'),
|
||||
shell=False,
|
||||
cwd=self.run_dir.path,
|
||||
**self.popen_kwargs)
|
||||
self.set_name(self.name_str, pid=self.process_obj.pid)
|
||||
self.log('Launched')
|
||||
|
||||
def _poll_termination(self, time_to_wait_for_term=5):
|
||||
wait_step = 0.001
|
||||
waited_time = 0
|
||||
while True:
|
||||
# poll returns None if proc is still running
|
||||
self.result = self.process_obj.poll()
|
||||
if self.result is not None:
|
||||
return True
|
||||
waited_time += wait_step
|
||||
# make wait_step approach 1.0
|
||||
wait_step = (1. + 5. * wait_step) / 6.
|
||||
if waited_time >= time_to_wait_for_term:
|
||||
break
|
||||
time.sleep(wait_step)
|
||||
return False
|
||||
|
||||
def terminate(self):
|
||||
if self.process_obj is None:
|
||||
return
|
||||
if self.result is not None:
|
||||
return
|
||||
|
||||
while True:
|
||||
# first try SIGINT to allow stdout+stderr flushing
|
||||
self.log('Terminating (SIGINT)')
|
||||
os.kill(self.process_obj.pid, signal.SIGINT)
|
||||
self.killed = signal.SIGINT
|
||||
if self._poll_termination():
|
||||
break
|
||||
|
||||
# SIGTERM maybe?
|
||||
self.log('Terminating (SIGTERM)')
|
||||
self.process_obj.terminate()
|
||||
self.killed = signal.SIGTERM
|
||||
if self._poll_termination():
|
||||
break
|
||||
|
||||
# out of patience
|
||||
self.log('Terminating (SIGKILL)')
|
||||
self.process_obj.kill()
|
||||
self.killed = signal.SIGKILL
|
||||
break;
|
||||
|
||||
self.process_obj.wait()
|
||||
self.cleanup()
|
||||
|
||||
def cleanup(self):
|
||||
self.close_output_logs()
|
||||
if self.result == 0:
|
||||
self.log('Terminated: ok', rc=self.result)
|
||||
elif self.killed:
|
||||
self.log('Terminated', rc=self.result)
|
||||
else:
|
||||
self.err('Terminated: ERROR', rc=self.result)
|
||||
#self.err('stdout:\n', self.get_stdout_tail(prefix='| '), '\n')
|
||||
self.err('stderr:\n', self.get_stderr_tail(prefix='| '), '\n')
|
||||
|
||||
def close_output_logs(self):
|
||||
self.dbg('Cleanup')
|
||||
for k, v in self.outputs.items():
|
||||
path, f = v
|
||||
if f:
|
||||
f.flush()
|
||||
f.close()
|
||||
self.outputs[k] = (path, None)
|
||||
|
||||
def poll(self):
|
||||
if self.process_obj is None:
|
||||
return
|
||||
if self.result is not None:
|
||||
return
|
||||
self.result = self.process_obj.poll()
|
||||
if self.result is not None:
|
||||
self.cleanup()
|
||||
|
||||
def get_output(self, which):
|
||||
v = self.outputs.get(which)
|
||||
if not v:
|
||||
return None
|
||||
path, f = v
|
||||
with open(path, 'r') as f2:
|
||||
return f2.read()
|
||||
|
||||
def get_output_tail(self, which, tail=10, prefix=''):
|
||||
out = self.get_output(which).splitlines()
|
||||
tail = min(len(out), tail)
|
||||
return ('\n' + prefix).join(out[-tail:])
|
||||
|
||||
def get_stdout(self):
|
||||
return self.get_output('stdout')
|
||||
|
||||
def get_stderr(self):
|
||||
return self.get_output('stderr')
|
||||
|
||||
def get_stdout_tail(self, tail=10, prefix=''):
|
||||
return self.get_output_tail('stdout', tail, prefix)
|
||||
|
||||
def get_stderr_tail(self, tail=10, prefix=''):
|
||||
return self.get_output_tail('stderr', tail, prefix)
|
||||
|
||||
def terminated(self):
|
||||
self.poll()
|
||||
return self.result is not None
|
||||
|
||||
def wait(self):
|
||||
self.process_obj.wait()
|
||||
self.poll()
|
||||
|
||||
|
||||
class RemoteProcess(Process):
|
||||
|
||||
def __init__(self, remote_host, remote_cwd, *process_args, **process_kwargs):
|
||||
super().__init__(*process_args, **process_kwargs)
|
||||
self.remote_host = remote_host
|
||||
self.remote_cwd = remote_cwd
|
||||
|
||||
# hacky: instead of just prepending ssh, i.e. piping stdout and stderr
|
||||
# over the ssh link, we should probably run on the remote side,
|
||||
# monitoring the process remotely.
|
||||
self.popen_args = ['ssh', '-t', self.remote_host,
|
||||
'cd "%s"; %s' % (self.remote_cwd,
|
||||
' '.join(['"%s"' % arg for arg in self.popen_args]))]
|
||||
self.dbg(self.popen_args, dir=self.run_dir, conf=self.popen_kwargs)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -18,34 +18,443 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import time
|
||||
import copy
|
||||
import atexit
|
||||
import pprint
|
||||
|
||||
from . import log
|
||||
from . import config
|
||||
from .utils import listdict, FileLock
|
||||
from . import util
|
||||
from . import schema
|
||||
from . import ofono_client
|
||||
from . import osmo_nitb
|
||||
from . import bts_sysmo, bts_osmotrx
|
||||
|
||||
class Resources(log.Origin):
|
||||
from .util import is_dict, is_list
|
||||
|
||||
def __init__(self, config_path, lock_dir):
|
||||
self.config_path = config_path
|
||||
self.lock_dir = lock_dir
|
||||
self.set_name(conf=self.config_path, lock=self.lock_dir)
|
||||
HASH_KEY = '_hash'
|
||||
RESERVED_KEY = '_reserved_by'
|
||||
USED_KEY = '_used'
|
||||
|
||||
def ensure_lock_dir_exists(self):
|
||||
if not os.path.isdir(self.lock_dir):
|
||||
os.makedirs(self.lock_dir)
|
||||
RESOURCES_CONF = 'resources.conf'
|
||||
LAST_USED_MSISDN_FILE = 'last_used_msisdn.state'
|
||||
RESERVED_RESOURCES_FILE = 'reserved_resources.state'
|
||||
|
||||
R_NITB_IFACE = 'nitb_iface'
|
||||
R_BTS = 'bts'
|
||||
R_ARFCN = 'arfcn'
|
||||
R_MODEM = 'modem'
|
||||
R_ALL = (R_NITB_IFACE, R_BTS, R_ARFCN, R_MODEM)
|
||||
|
||||
RESOURCES_SCHEMA = {
|
||||
'nitb_iface[].addr': schema.IPV4,
|
||||
'bts[].label': schema.STR,
|
||||
'bts[].type': schema.STR,
|
||||
'bts[].unit_id': schema.INT,
|
||||
'bts[].addr': schema.IPV4,
|
||||
'bts[].band': schema.BAND,
|
||||
'bts[].trx[].hwaddr': schema.HWADDR,
|
||||
'arfcn[].arfcn': schema.INT,
|
||||
'arfcn[].band': schema.BAND,
|
||||
'modem[].label': schema.STR,
|
||||
'modem[].path': schema.STR,
|
||||
'modem[].imsi': schema.IMSI,
|
||||
'modem[].ki': schema.KI,
|
||||
}
|
||||
|
||||
WANT_SCHEMA = util.dict_add(
|
||||
dict([('%s[].times' % r, schema.INT) for r in R_ALL]),
|
||||
RESOURCES_SCHEMA)
|
||||
|
||||
KNOWN_BTS_TYPES = {
|
||||
'sysmo': bts_sysmo.SysmoBts,
|
||||
'osmotrx': bts_osmotrx.OsmoBtsTrx,
|
||||
}
|
||||
|
||||
def register_bts_type(name, clazz):
|
||||
KNOWN_BTS_TYPES[name] = clazz
|
||||
|
||||
class ResourcesPool(log.Origin):
|
||||
_remember_to_free = None
|
||||
_registered_exit_handler = False
|
||||
|
||||
def __init__(self):
|
||||
self.config_path = config.get_config_file(RESOURCES_CONF)
|
||||
self.state_dir = config.get_state_dir()
|
||||
self.set_name(conf=self.config_path, state=self.state_dir.path)
|
||||
self.read_conf()
|
||||
|
||||
def read_conf(self):
|
||||
self.all_resources = Resources(config.read(self.config_path, RESOURCES_SCHEMA))
|
||||
self.all_resources.set_hashes()
|
||||
|
||||
def reserve(self, origin, want):
|
||||
'''
|
||||
attempt to reserve the resources specified in the dict 'want' for
|
||||
'origin'. Obtain a lock on the resources lock dir, verify that all
|
||||
wanted resources are available, and if yes mark them as reserved.
|
||||
|
||||
On success, return a reservation object which can be used to release
|
||||
the reservation. The reservation will be freed automatically on program
|
||||
exit, if not yet done manually.
|
||||
|
||||
'origin' should be an Origin() instance.
|
||||
|
||||
'want' is a dict matching WANT_SCHEMA, which is the same as
|
||||
the RESOURCES_SCHEMA, except each entity that can be reserved has a 'times'
|
||||
field added, to indicate how many of those should be reserved.
|
||||
|
||||
If an entry has only a 'times' set, any of the resources may be
|
||||
reserved without further limitations.
|
||||
|
||||
ResourcesPool may also be selected with narrowed down constraints.
|
||||
This would reserve one NITB IP address, two modems, one BTS of type
|
||||
sysmo and one of type oct, plus 2 ARFCNs in the 1800 band:
|
||||
|
||||
{
|
||||
'nitb_iface': [ { 'times': 1 } ],
|
||||
'bts': [ { 'type': 'sysmo', 'times': 1 }, { 'type': 'oct', 'times': 1 } ],
|
||||
'arfcn': [ { 'band': 'GSM-1800', 'times': 2 } ],
|
||||
'modem': [ { 'times': 2 } ],
|
||||
}
|
||||
|
||||
A times=1 value is implicit, so the above is equivalent to:
|
||||
|
||||
{
|
||||
'nitb_iface': [ {} ],
|
||||
'bts': [ { 'type': 'sysmo' }, { 'type': 'oct' } ],
|
||||
'arfcn': [ { 'band': 'GSM-1800', 'times': 2 } ],
|
||||
'modem': [ { 'times': 2 } ],
|
||||
}
|
||||
'''
|
||||
schema.validate(want, WANT_SCHEMA)
|
||||
|
||||
# replicate items that have a 'times' > 1
|
||||
want = copy.deepcopy(want)
|
||||
for key, item_list in want.items():
|
||||
more_items = []
|
||||
for item in item_list:
|
||||
times = int(item.pop('times'))
|
||||
if times and times > 1:
|
||||
for i in range(times - 1):
|
||||
more_items.append(copy.deepcopy(item))
|
||||
item_list.extend(more_items)
|
||||
|
||||
origin_id = origin.origin_id()
|
||||
|
||||
with self.state_dir.lock(origin_id):
|
||||
rrfile_path = self.state_dir.mk_parentdir(RESERVED_RESOURCES_FILE)
|
||||
reserved = Resources(config.read(rrfile_path, if_missing_return={}))
|
||||
to_be_reserved = self.all_resources.without(reserved).find(want)
|
||||
|
||||
to_be_reserved.mark_reserved_by(origin_id)
|
||||
|
||||
reserved.add(to_be_reserved)
|
||||
config.write(rrfile_path, reserved)
|
||||
|
||||
self.remember_to_free(to_be_reserved)
|
||||
return ReservedResources(self, origin, to_be_reserved)
|
||||
|
||||
def free(self, origin, to_be_freed):
|
||||
with self.state_dir.lock(origin.origin_id()):
|
||||
rrfile_path = self.state_dir.mk_parentdir(RESERVED_RESOURCES_FILE)
|
||||
reserved = Resources(config.read(rrfile_path, if_missing_return={}))
|
||||
reserved.drop(to_be_freed)
|
||||
config.write(rrfile_path, reserved)
|
||||
self.forget_freed(to_be_freed)
|
||||
|
||||
def register_exit_handler(self):
|
||||
if self._registered_exit_handler:
|
||||
return
|
||||
atexit.register(self.clean_up_registered_resources)
|
||||
self._registered_exit_handler = True
|
||||
|
||||
def unregister_exit_handler(self):
|
||||
if not self._registered_exit_handler:
|
||||
return
|
||||
atexit.unregister(self.clean_up_registered_resources)
|
||||
self._registered_exit_handler = False
|
||||
|
||||
def clean_up_registered_resources(self):
|
||||
if not self._remember_to_free:
|
||||
return
|
||||
self.free(log.Origin('atexit.clean_up_registered_resources()'),
|
||||
self._remember_to_free)
|
||||
|
||||
def remember_to_free(self, to_be_reserved):
|
||||
self.register_exit_handler()
|
||||
if not self._remember_to_free:
|
||||
self._remember_to_free = Resources()
|
||||
self._remember_to_free.add(to_be_reserved)
|
||||
|
||||
def forget_freed(self, freed):
|
||||
if freed is self._remember_to_free:
|
||||
self._remember_to_free.clear()
|
||||
else:
|
||||
self._remember_to_free.drop(freed)
|
||||
if not self._remember_to_free:
|
||||
self.unregister_exit_handler()
|
||||
|
||||
def next_msisdn(self, origin):
|
||||
origin_id = origin.origin_id()
|
||||
|
||||
with self.state_dir.lock(origin_id):
|
||||
msisdn_path = self.state_dir.child(LAST_USED_MSISDN_FILE)
|
||||
with log.Origin(msisdn_path):
|
||||
last_msisdn = '1'
|
||||
if os.path.exists(msisdn_path):
|
||||
if not os.path.isfile(msisdn_path):
|
||||
raise RuntimeError('path should be a file but is not: %r' % msisdn_path)
|
||||
with open(msisdn_path, 'r') as f:
|
||||
last_msisdn = f.read().strip()
|
||||
schema.msisdn(last_msisdn)
|
||||
|
||||
next_msisdn = util.msisdn_inc(last_msisdn)
|
||||
with open(msisdn_path, 'w') as f:
|
||||
f.write(next_msisdn)
|
||||
return next_msisdn
|
||||
|
||||
|
||||
global_resources = listdict()
|
||||
class NoResourceExn(Exception):
|
||||
pass
|
||||
|
||||
def register(kind, instance):
|
||||
global global_resources
|
||||
global_resources.add(kind, instance)
|
||||
class Resources(dict):
|
||||
|
||||
def reserve(user, config):
|
||||
asdf
|
||||
def __init__(self, all_resources={}, do_copy=True):
|
||||
if do_copy:
|
||||
all_resources = copy.deepcopy(all_resources)
|
||||
self.update(all_resources)
|
||||
|
||||
def drop(self, reserved, fail_if_not_found=True):
|
||||
# protect from modifying reserved because we're the same object
|
||||
if reserved is self:
|
||||
raise RuntimeError('Refusing to drop a list of resources from itself.'
|
||||
' This is probably a bug where a list of Resources()'
|
||||
' should have been copied but is passed as-is.'
|
||||
' use Resources.clear() instead.')
|
||||
|
||||
for key, reserved_list in reserved.items():
|
||||
my_list = self.get(key) or []
|
||||
|
||||
if my_list is reserved_list:
|
||||
self.pop(key)
|
||||
continue
|
||||
|
||||
for reserved_item in reserved_list:
|
||||
found = False
|
||||
reserved_hash = reserved_item.get(HASH_KEY)
|
||||
if not reserved_hash:
|
||||
raise RuntimeError('Resources.drop() only works with hashed items')
|
||||
|
||||
for i in range(len(my_list)):
|
||||
my_item = my_list[i]
|
||||
my_hash = my_item.get(HASH_KEY)
|
||||
if not my_hash:
|
||||
raise RuntimeError('Resources.drop() only works with hashed items')
|
||||
if my_hash == reserved_hash:
|
||||
found = True
|
||||
my_list.pop(i)
|
||||
break
|
||||
|
||||
if fail_if_not_found and not found:
|
||||
raise RuntimeError('Asked to drop resource from a pool, but the'
|
||||
' resource was not found: %s = %r' % (key, reserved_item))
|
||||
|
||||
if not my_list:
|
||||
self.pop(key)
|
||||
return self
|
||||
|
||||
def without(self, reserved):
|
||||
return Resources(self).drop(reserved)
|
||||
|
||||
def find(self, want, skip_if_marked=None, do_copy=True):
|
||||
matches = {}
|
||||
for key, want_list in want.items():
|
||||
with log.Origin(want=key):
|
||||
my_list = self.get(key)
|
||||
|
||||
log.dbg(None, None, 'Looking for', len(want_list), 'x', key, ', candidates:', len(my_list))
|
||||
|
||||
# Try to avoid a less constrained item snatching away a resource
|
||||
# from a more detailed constrained requirement.
|
||||
|
||||
# first record all matches
|
||||
all_matches = []
|
||||
for want_item in want_list:
|
||||
item_match_list = []
|
||||
for i in range(len(my_list)):
|
||||
my_item = my_list[i]
|
||||
if skip_if_marked and my_item.get(skip_if_marked):
|
||||
continue
|
||||
if item_matches(my_item, want_item, ignore_keys=('times',)):
|
||||
item_match_list.append(i)
|
||||
if not item_match_list:
|
||||
raise NoResourceExn('No matching resource available for %s = %r'
|
||||
% (key, want_item))
|
||||
all_matches.append( item_match_list )
|
||||
|
||||
if not all_matches:
|
||||
raise NoResourceExn('No matching resource available for %s = %r'
|
||||
% (key, want_list))
|
||||
|
||||
# figure out who gets what
|
||||
solution = solve(all_matches)
|
||||
picked = [ my_list[i] for i in solution if i is not None ]
|
||||
log.dbg(None, None, 'Picked', pprint.pformat(picked))
|
||||
matches[key] = picked
|
||||
|
||||
return Resources(matches, do_copy=do_copy)
|
||||
|
||||
def set_hashes(self):
|
||||
for key, item_list in self.items():
|
||||
for item in item_list:
|
||||
item[HASH_KEY] = util.hash_obj(item, HASH_KEY, RESERVED_KEY, USED_KEY)
|
||||
|
||||
def add(self, more):
|
||||
if more is self:
|
||||
raise RuntimeError('adding a list of resources to itself?')
|
||||
config.add(self, copy.deepcopy(more))
|
||||
|
||||
def combine(self, more_rules):
|
||||
if more_rules is self:
|
||||
raise RuntimeError('combining a list of resource rules with itself?')
|
||||
config.combine(self, copy.deepcopy(more))
|
||||
|
||||
def mark_reserved_by(self, origin_id):
|
||||
for key, item_list in self.items():
|
||||
for item in item_list:
|
||||
item[RESERVED_KEY] = origin_id
|
||||
|
||||
|
||||
def solve(all_matches):
|
||||
'''
|
||||
all_matches shall be a list of index-lists.
|
||||
all_matches[i] is the list of indexes that item i can use.
|
||||
Return a solution so that each i gets a different index.
|
||||
solve([ [0, 1, 2],
|
||||
[0],
|
||||
[0, 2] ]) == [1, 0, 2]
|
||||
'''
|
||||
|
||||
def all_differ(l):
|
||||
return len(set(l)) == len(l)
|
||||
|
||||
def search_in_permutations(fixed=[]):
|
||||
idx = len(fixed)
|
||||
for i in range(len(all_matches[idx])):
|
||||
val = all_matches[idx][i]
|
||||
# don't add a val that's already in the list
|
||||
if val in fixed:
|
||||
continue
|
||||
l = list(fixed)
|
||||
l.append(val)
|
||||
if len(l) == len(all_matches):
|
||||
# found a solution
|
||||
return l
|
||||
# not at the end yet, add next digit
|
||||
r = search_in_permutations(l)
|
||||
if r:
|
||||
# nested search_in_permutations() call found a solution
|
||||
return r
|
||||
# this entire branch yielded no solution
|
||||
return None
|
||||
|
||||
if not all_matches:
|
||||
raise RuntimeError('Cannot solve: no candidates')
|
||||
|
||||
solution = search_in_permutations()
|
||||
if not solution:
|
||||
raise NoResourceExn('The requested resource requirements are not solvable %r'
|
||||
% all_matches)
|
||||
return solution
|
||||
|
||||
|
||||
def contains_hash(list_of_dicts, a_hash):
|
||||
for d in list_of_dicts:
|
||||
if d.get(HASH_KEY) == a_hash:
|
||||
return True
|
||||
return False
|
||||
|
||||
def item_matches(item, wanted_item, ignore_keys=None):
|
||||
if is_dict(wanted_item):
|
||||
# match up two dicts
|
||||
if not isinstance(item, dict):
|
||||
return False
|
||||
for key, wanted_val in wanted_item.items():
|
||||
if ignore_keys and key in ignore_keys:
|
||||
continue
|
||||
if not item_matches(item.get(key), wanted_val, ignore_keys=ignore_keys):
|
||||
return False
|
||||
return True
|
||||
|
||||
if is_list(wanted_item):
|
||||
# multiple possible values
|
||||
if item not in wanted_item:
|
||||
return False
|
||||
return True
|
||||
|
||||
return item == wanted_item
|
||||
|
||||
|
||||
class ReservedResources(log.Origin):
|
||||
'''
|
||||
After all resources have been figured out, this is the API that a test case
|
||||
gets to interact with resources. From those resources that have been
|
||||
reserved for it, it can pick some to mark them as currently in use.
|
||||
Functions like nitb() provide a resource by automatically picking its
|
||||
dependencies from so far unused (but reserved) resource.
|
||||
'''
|
||||
|
||||
def __init__(self, resources_pool, origin, reserved):
|
||||
self.resources_pool = resources_pool
|
||||
self.origin = origin
|
||||
self.reserved = reserved
|
||||
|
||||
def __repr__(self):
|
||||
return 'resources(%s)=%s' % (self.origin.name(), pprint.pformat(self.reserved))
|
||||
|
||||
def get(self, kind, specifics=None):
|
||||
if specifics is None:
|
||||
specifics = {}
|
||||
self.dbg('requesting use of', kind, specifics=specifics)
|
||||
want = { kind: [specifics] }
|
||||
available_dict = self.reserved.find(want, skip_if_marked=USED_KEY, do_copy=False)
|
||||
available = available_dict.get(kind)
|
||||
self.dbg(available=len(available))
|
||||
if not available:
|
||||
raise NoResourceExn('No unused resource found: %r%s' %
|
||||
(kind,
|
||||
(' matching %r' % specifics) if specifics else '')
|
||||
)
|
||||
pick = available[0]
|
||||
self.dbg(using=pick)
|
||||
assert not pick.get(USED_KEY)
|
||||
pick[USED_KEY] = True
|
||||
return copy.deepcopy(pick)
|
||||
|
||||
def put(self, item):
|
||||
if not item.get(USED_KEY):
|
||||
raise RuntimeError('Can only put() a resource that is used: %r' % item)
|
||||
hash_to_put = item.get(HASH_KEY)
|
||||
if not hash_to_put:
|
||||
raise RuntimeError('Can only put() a resource that has a hash marker: %r' % item)
|
||||
for key, item_list in self.reserved.items():
|
||||
my_list = self.get(key)
|
||||
for my_item in my_list:
|
||||
if hash_to_put == my_item.get(HASH_KEY):
|
||||
my_item.pop(USED_KEY)
|
||||
|
||||
def put_all(self):
|
||||
for key, item_list in self.reserved.items():
|
||||
my_list = self.get(key)
|
||||
for my_item in my_list:
|
||||
if my_item.get(USED_KEY):
|
||||
my_item.pop(USED_KEY)
|
||||
|
||||
def free(self):
|
||||
self.resources_pool.free(self.origin, self.reserved)
|
||||
self.reserved = None
|
||||
|
||||
def read_conf(path):
|
||||
with open(path, 'r') as f:
|
||||
conf = f.read()
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
# osmo_gsm_tester: validate dict structures
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
|
||||
from . import log
|
||||
from .util import is_dict, is_list
|
||||
|
||||
KEY_RE = re.compile('[a-zA-Z][a-zA-Z0-9_]*')
|
||||
IPV4_RE = re.compile('([0-9]{1,3}.){3}[0-9]{1,3}')
|
||||
HWADDR_RE = re.compile('([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}')
|
||||
IMSI_RE = re.compile('[0-9]{6,15}')
|
||||
KI_RE = re.compile('[0-9a-fA-F]{32}')
|
||||
MSISDN_RE = re.compile('[0-9]{1,15}')
|
||||
|
||||
def match_re(name, regex, val):
|
||||
while True:
|
||||
if not isinstance(val, str):
|
||||
break;
|
||||
if not regex.fullmatch(val):
|
||||
break;
|
||||
return
|
||||
raise ValueError('Invalid %s: %r' % (name, val))
|
||||
|
||||
def band(val):
|
||||
if val in ('GSM-1800', 'GSM-1900'):
|
||||
return
|
||||
raise ValueError('Unknown GSM band: %r' % val)
|
||||
|
||||
def ipv4(val):
|
||||
match_re('IPv4 address', IPV4_RE, val)
|
||||
els = [int(el) for el in val.split('.')]
|
||||
if not all([el >= 0 and el <= 255 for el in els]):
|
||||
raise ValueError('Invalid IPv4 address: %r' % val)
|
||||
|
||||
def hwaddr(val):
|
||||
match_re('hardware address', HWADDR_RE, val)
|
||||
|
||||
def imsi(val):
|
||||
match_re('IMSI', IMSI_RE, val)
|
||||
|
||||
def ki(val):
|
||||
match_re('KI', KI_RE, val)
|
||||
|
||||
def msisdn(val):
|
||||
match_re('MSISDN', MSISDN_RE, val)
|
||||
|
||||
INT = 'int'
|
||||
STR = 'str'
|
||||
BAND = 'band'
|
||||
IPV4 = 'ipv4'
|
||||
HWADDR = 'hwaddr'
|
||||
IMSI = 'imsi'
|
||||
KI = 'ki'
|
||||
MSISDN = 'msisdn'
|
||||
SCHEMA_TYPES = {
|
||||
INT: int,
|
||||
STR: str,
|
||||
BAND: band,
|
||||
IPV4: ipv4,
|
||||
HWADDR: hwaddr,
|
||||
IMSI: imsi,
|
||||
KI: ki,
|
||||
MSISDN: msisdn,
|
||||
}
|
||||
|
||||
def validate(config, schema):
|
||||
'''Make sure the given config dict adheres to the schema.
|
||||
The schema is a dict of 'dict paths' in dot-notation with permitted
|
||||
value type. All leaf nodes are validated, nesting dicts are implicit.
|
||||
|
||||
validate( { 'a': 123, 'b': { 'b1': 'foo', 'b2': [ 1, 2, 3 ] } },
|
||||
{ 'a': int,
|
||||
'b.b1': str,
|
||||
'b.b2[]': int } )
|
||||
|
||||
Raise a ValueError in case the schema is violated.
|
||||
'''
|
||||
|
||||
def validate_item(path, value, schema):
|
||||
want_type = schema.get(path)
|
||||
|
||||
if is_list(value):
|
||||
if want_type:
|
||||
raise ValueError('config item is a list, should be %r: %r' % (want_type, path))
|
||||
path = path + '[]'
|
||||
want_type = schema.get(path)
|
||||
|
||||
if not want_type:
|
||||
if is_dict(value):
|
||||
nest(path, value, schema)
|
||||
return
|
||||
if is_list(value) and value:
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
raise ValueError('config item not known: %r' % path)
|
||||
|
||||
if want_type not in SCHEMA_TYPES:
|
||||
raise ValueError('unknown type %r at %r' % (want_type, path))
|
||||
|
||||
if is_dict(value):
|
||||
raise ValueError('config item is dict but should be a leaf node of type %r: %r'
|
||||
% (want_type, path))
|
||||
|
||||
if is_list(value):
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
|
||||
with log.Origin(item=path):
|
||||
type_validator = SCHEMA_TYPES.get(want_type)
|
||||
type_validator(value)
|
||||
|
||||
def nest(parent_path, config, schema):
|
||||
if parent_path:
|
||||
parent_path = parent_path + '.'
|
||||
else:
|
||||
parent_path = ''
|
||||
for k,v in config.items():
|
||||
if not KEY_RE.fullmatch(k):
|
||||
raise ValueError('invalid config key: %r' % k)
|
||||
path = parent_path + k
|
||||
validate_item(path, v, schema)
|
||||
|
||||
nest(None, config, schema)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -18,9 +18,12 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from . import config, log, template, utils
|
||||
import sys
|
||||
import time
|
||||
from . import config, log, template, util, resource, schema, ofono_client, osmo_nitb
|
||||
from . import test
|
||||
|
||||
class Suite(log.Origin):
|
||||
class SuiteDefinition(log.Origin):
|
||||
'''A test suite reserves resources for a number of tests.
|
||||
Each test requires a specific number of modems, BTSs etc., which are
|
||||
reserved beforehand by a test suite. This way several test suites can be
|
||||
|
@ -29,14 +32,122 @@ class Suite(log.Origin):
|
|||
|
||||
CONF_FILENAME = 'suite.conf'
|
||||
|
||||
CONF_SCHEMA = {
|
||||
'resources.nitb_iface': config.INT,
|
||||
'resources.nitb': config.INT,
|
||||
'resources.bts': config.INT,
|
||||
'resources.msisdn': config.INT,
|
||||
'resources.modem': config.INT,
|
||||
'defaults.timeout': config.STR,
|
||||
}
|
||||
CONF_SCHEMA = util.dict_add(
|
||||
{
|
||||
'defaults.timeout': schema.STR,
|
||||
},
|
||||
dict([('resources.%s' % k, t) for k,t in resource.WANT_SCHEMA.items()])
|
||||
)
|
||||
|
||||
|
||||
def __init__(self, suite_dir):
|
||||
self.set_log_category(log.C_CNF)
|
||||
self.suite_dir = suite_dir
|
||||
self.set_name(os.path.basename(self.suite_dir))
|
||||
self.read_conf()
|
||||
|
||||
def read_conf(self):
|
||||
with self:
|
||||
self.dbg('reading %s' % SuiteDefinition.CONF_FILENAME)
|
||||
if not os.path.isdir(self.suite_dir):
|
||||
raise RuntimeError('No such directory: %r' % self.suite_dir)
|
||||
self.conf = config.read(os.path.join(self.suite_dir,
|
||||
SuiteDefinition.CONF_FILENAME),
|
||||
SuiteDefinition.CONF_SCHEMA)
|
||||
self.load_tests()
|
||||
|
||||
|
||||
def load_tests(self):
|
||||
with self:
|
||||
self.tests = []
|
||||
for basename in sorted(os.listdir(self.suite_dir)):
|
||||
if not basename.endswith('.py'):
|
||||
continue
|
||||
self.tests.append(Test(self, basename))
|
||||
|
||||
def add_test(self, test):
|
||||
with self:
|
||||
if not isinstance(test, Test):
|
||||
raise ValueError('add_test(): pass a Test() instance, not %s' % type(test))
|
||||
if test.suite is None:
|
||||
test.suite = self
|
||||
if test.suite is not self:
|
||||
raise ValueError('add_test(): test already belongs to another suite')
|
||||
self.tests.append(test)
|
||||
|
||||
|
||||
|
||||
class Test(log.Origin):
|
||||
|
||||
def __init__(self, suite, test_basename):
|
||||
self.suite = suite
|
||||
self.basename = test_basename
|
||||
self.path = os.path.join(self.suite.suite_dir, self.basename)
|
||||
super().__init__(self.path)
|
||||
self.set_name(self.basename)
|
||||
self.set_log_category(log.C_TST)
|
||||
|
||||
def run(self, suite_run):
|
||||
assert self.suite is suite_run.definition
|
||||
with self:
|
||||
test.setup(suite_run, self, ofono_client)
|
||||
success = False
|
||||
try:
|
||||
self.log('START')
|
||||
with self.redirect_stdout():
|
||||
util.run_python_file('%s.%s' % (self.suite.name(), self.name()),
|
||||
self.path)
|
||||
success = True
|
||||
except resource.NoResourceExn:
|
||||
self.err('Current resource state:\n', repr(reserved_resources))
|
||||
raise
|
||||
finally:
|
||||
if success:
|
||||
self.log('PASS')
|
||||
else:
|
||||
self.log('FAIL')
|
||||
|
||||
def name(self):
|
||||
l = log.get_line_for_src(self.path)
|
||||
if l is not None:
|
||||
return '%s:%s' % (self._name, l)
|
||||
return super().name()
|
||||
|
||||
class SuiteRun(log.Origin):
|
||||
|
||||
trial = None
|
||||
resources_pool = None
|
||||
reserved_resources = None
|
||||
_resource_requirements = None
|
||||
_config = None
|
||||
_processes = None
|
||||
|
||||
def __init__(self, current_trial, suite_definition, scenarios=[]):
|
||||
self.trial = current_trial
|
||||
self.definition = suite_definition
|
||||
self.scenarios = scenarios
|
||||
self.set_name(suite_definition.name())
|
||||
self.set_log_category(log.C_TST)
|
||||
self.resources_pool = resource.ResourcesPool()
|
||||
|
||||
def combined(self, conf_name):
|
||||
combination = self.definition.conf.get(conf_name) or {}
|
||||
for scenario in self.scenarios:
|
||||
c = scenario.get(conf_name)
|
||||
if c is None:
|
||||
continue
|
||||
config.combine(combination, c)
|
||||
return combination
|
||||
|
||||
def resource_requirements(self):
|
||||
if self._resource_requirements is None:
|
||||
self._resource_requirements = self.combined('resources')
|
||||
return self._resource_requirements
|
||||
|
||||
def config(self):
|
||||
if self._config is None:
|
||||
self._config = self.combined('config')
|
||||
return self._config
|
||||
|
||||
class Results:
|
||||
def __init__(self):
|
||||
|
@ -54,97 +165,162 @@ class Suite(log.Origin):
|
|||
self.all_passed = bool(self.passed) and not bool(self.failed)
|
||||
return self
|
||||
|
||||
def __init__(self, suite_dir):
|
||||
self.set_log_category(log.C_CNF)
|
||||
self.suite_dir = suite_dir
|
||||
self.set_name(os.path.basename(self.suite_dir))
|
||||
self.read_conf()
|
||||
def __str__(self):
|
||||
if self.failed:
|
||||
return 'FAIL: %d of %d tests failed:\n %s' % (
|
||||
len(self.failed),
|
||||
len(self.failed) + len(self.passed),
|
||||
'\n '.join([t.name() for t in self.failed]))
|
||||
if not self.passed:
|
||||
return 'no tests were run.'
|
||||
return 'pass: all %d tests passed.' % len(self.passed)
|
||||
|
||||
def read_conf(self):
|
||||
def reserve_resources(self):
|
||||
if self.reserved_resources:
|
||||
raise RuntimeError('Attempt to reserve resources twice for a SuiteRun')
|
||||
self.log('reserving resources...')
|
||||
with self:
|
||||
if not os.path.isdir(self.suite_dir):
|
||||
raise RuntimeError('No such directory: %r' % self.suite_dir)
|
||||
self.conf = config.read(os.path.join(self.suite_dir,
|
||||
Suite.CONF_FILENAME),
|
||||
Suite.CONF_SCHEMA)
|
||||
self.load_tests()
|
||||
self.reserved_resources = self.resources_pool.reserve(self, self.resource_requirements())
|
||||
|
||||
def load_tests(self):
|
||||
with self:
|
||||
self.tests = []
|
||||
for basename in os.listdir(self.suite_dir):
|
||||
if not basename.endswith('.py'):
|
||||
continue
|
||||
self.tests.append(Test(self, basename))
|
||||
|
||||
def add_test(self, test):
|
||||
with self:
|
||||
if not isinstance(test, Test):
|
||||
raise ValueError('add_test(): pass a Test() instance, not %s' % type(test))
|
||||
if test.suite is None:
|
||||
test.suite = self
|
||||
if test.suite is not self:
|
||||
raise ValueError('add_test(): test already belongs to another suite')
|
||||
self.tests.append(test)
|
||||
|
||||
def run_tests(self):
|
||||
results = Suite.Results()
|
||||
for test in self.tests:
|
||||
def run_tests(self, names=None):
|
||||
if not self.reserved_resources:
|
||||
self.reserve_resources()
|
||||
results = SuiteRun.Results()
|
||||
for test in self.definition.tests:
|
||||
if names and not test.name() in names:
|
||||
continue
|
||||
self._run_test(test, results)
|
||||
return results.conclude()
|
||||
|
||||
def run_tests_by_name(self, *names):
|
||||
results = Suite.Results()
|
||||
for name in names:
|
||||
basename = name
|
||||
if not basename.endswith('.py'):
|
||||
basename = name + '.py'
|
||||
for test in self.tests:
|
||||
if basename == test.basename:
|
||||
self._run_test(test, results)
|
||||
break
|
||||
self.stop_processes()
|
||||
return results.conclude()
|
||||
|
||||
def _run_test(self, test, results):
|
||||
try:
|
||||
with self:
|
||||
test.run()
|
||||
test.run(self)
|
||||
results.add_pass(test)
|
||||
except:
|
||||
results.add_fail(test)
|
||||
self.log_exn()
|
||||
|
||||
class Test(log.Origin):
|
||||
def remember_to_stop(self, process):
|
||||
if self._processes is None:
|
||||
self._processes = []
|
||||
self._processes.append(process)
|
||||
|
||||
def __init__(self, suite, test_basename):
|
||||
self.suite = suite
|
||||
self.basename = test_basename
|
||||
self.set_name(self.basename)
|
||||
self.set_log_category(log.C_TST)
|
||||
self.path = os.path.join(self.suite.suite_dir, self.basename)
|
||||
with self:
|
||||
with open(self.path, 'r') as f:
|
||||
self.script = f.read()
|
||||
def stop_processes(self):
|
||||
if not self._processes:
|
||||
return
|
||||
for process in self._processes:
|
||||
process.terminate()
|
||||
|
||||
def run(self):
|
||||
with self:
|
||||
self.code = compile(self.script, self.path, 'exec')
|
||||
with self.redirect_stdout():
|
||||
exec(self.code, self.test_globals())
|
||||
self._success = True
|
||||
def nitb_iface(self):
|
||||
return self.reserved_resources.get(resource.R_NITB_IFACE)
|
||||
|
||||
def test_globals(self):
|
||||
test_globals = {
|
||||
'this': utils.dict2obj({
|
||||
'suite': self.suite.suite_dir,
|
||||
'test': self.basename,
|
||||
}),
|
||||
'resources': utils.dict2obj({
|
||||
}),
|
||||
}
|
||||
return test_globals
|
||||
def nitb(self, nitb_iface=None):
|
||||
if nitb_iface is None:
|
||||
nitb_iface = self.nitb_iface()
|
||||
return osmo_nitb.OsmoNitb(self, nitb_iface)
|
||||
|
||||
def load(suite_dir):
|
||||
return Suite(suite_dir)
|
||||
def bts(self):
|
||||
return bts_obj(self, self.reserved_resources.get(resource.R_BTS))
|
||||
|
||||
def modem(self):
|
||||
return modem_obj(self.reserved_resources.get(resource.R_MODEM))
|
||||
|
||||
def msisdn(self):
|
||||
msisdn = self.resources_pool.next_msisdn(self.origin)
|
||||
self.log('using MSISDN', msisdn)
|
||||
return msisdn
|
||||
|
||||
def wait(self, condition, *condition_args, timeout=300, **condition_kwargs):
|
||||
if not timeout or timeout < 0:
|
||||
raise RuntimeError('wait() *must* time out at some point. timeout=%r' % timeout)
|
||||
|
||||
started = time.time()
|
||||
while True:
|
||||
self.poll()
|
||||
if condition(*condition_args, **condition_kwargs):
|
||||
return True
|
||||
waited = time.time() - started
|
||||
if waited > timeout:
|
||||
return False
|
||||
time.sleep(.1)
|
||||
|
||||
def sleep(self, seconds):
|
||||
self.wait(lambda: False, timeout=seconds)
|
||||
|
||||
def poll(self):
|
||||
if self._processes:
|
||||
for process in self._processes:
|
||||
process.poll()
|
||||
ofono_client.poll()
|
||||
|
||||
def prompt(self, *msgs, **msg_details):
|
||||
'ask for user interaction. Do not use in tests that should run automatically!'
|
||||
if msg_details:
|
||||
msgs = list(msgs)
|
||||
msgs.append('{%s}' %
|
||||
(', '.join(['%s=%r' % (k,v)
|
||||
for k,v in sorted(msg_details.items())])))
|
||||
msg = ' '.join(msgs) or 'Hit Enter to continue'
|
||||
self.log('prompt:', msg)
|
||||
sys.__stdout__.write(msg)
|
||||
sys.__stdout__.write('\n> ')
|
||||
sys.__stdout__.flush()
|
||||
entered = util.input_polling(self.poll)
|
||||
self.log('prompt entered:', entered)
|
||||
return entered
|
||||
|
||||
|
||||
loaded_suite_definitions = {}
|
||||
|
||||
def load(suite_name):
|
||||
global loaded_suite_definitions
|
||||
|
||||
suite = loaded_suite_definitions.get(suite_name)
|
||||
if suite is not None:
|
||||
return suite
|
||||
|
||||
suites_dir = config.get_suites_dir()
|
||||
suite_dir = suites_dir.child(suite_name)
|
||||
if not suites_dir.exists(suite_name):
|
||||
raise RuntimeError('Suite not found: %r in %r' % (suite_name, suites_dir))
|
||||
if not suites_dir.isdir(suite_name):
|
||||
raise RuntimeError('Suite name found, but not a directory: %r' % (suite_dir))
|
||||
|
||||
suite_def = SuiteDefinition(suite_dir)
|
||||
loaded_suite_definitions[suite_name] = suite_def
|
||||
return suite_def
|
||||
|
||||
def parse_suite_scenario_str(suite_scenario_str):
|
||||
tokens = suite_scenario_str.split(':')
|
||||
if len(tokens) > 2:
|
||||
raise RuntimeError('invalid combination string: %r' % suite_scenario_str)
|
||||
|
||||
suite_name = tokens[0]
|
||||
if len(tokens) <= 1:
|
||||
scenario_names = []
|
||||
else:
|
||||
scenario_names = tokens[1].split('+')
|
||||
|
||||
return suite_name, scenario_names
|
||||
|
||||
def load_suite_scenario_str(suite_scenario_str):
|
||||
suite_name, scenario_names = parse_suite_scenario_str(suite_scenario_str)
|
||||
suite = load(suite_name)
|
||||
scenarios = [config.get_scenario(scenario_name) for scenario_name in scenario_names]
|
||||
return (suite, scenarios)
|
||||
|
||||
def bts_obj(suite_run, conf):
|
||||
bts_type = conf.get('type')
|
||||
log.dbg(None, None, 'create BTS object', type=bts_type)
|
||||
bts_class = resource.KNOWN_BTS_TYPES.get(bts_type)
|
||||
if bts_class is None:
|
||||
raise RuntimeError('No such BTS type is defined: %r' % bts_type)
|
||||
return bts_class(suite_run, conf)
|
||||
|
||||
def modem_obj(conf):
|
||||
log.dbg(None, None, 'create Modem object', conf=conf)
|
||||
return ofono_client.Modem(conf)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -23,7 +23,7 @@ from mako.template import Template
|
|||
from mako.lookup import TemplateLookup
|
||||
|
||||
from . import log
|
||||
from .utils import dict2obj
|
||||
from .util import dict2obj
|
||||
|
||||
_lookup = None
|
||||
_logger = log.Origin('no templates dir set')
|
||||
|
@ -47,10 +47,12 @@ def render(name, values):
|
|||
global _lookup
|
||||
if _lookup is None:
|
||||
set_templates_dir()
|
||||
with _logger:
|
||||
tmpl_name = name + '.tmpl'
|
||||
tmpl_name = name + '.tmpl'
|
||||
with log.Origin(tmpl_name):
|
||||
template = _lookup.get_template(tmpl_name)
|
||||
_logger.dbg('rendering', tmpl_name)
|
||||
|
||||
line_info_name = tmpl_name.replace('-', '_').replace('.', '_')
|
||||
return template.render(**dict2obj(values))
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
!
|
||||
! OsmoBTS () configuration saved from vty
|
||||
!!
|
||||
!
|
||||
! Configuration rendered by osmo-gsm-tester
|
||||
log stderr
|
||||
logging color 1
|
||||
logging timestamp 1
|
||||
|
@ -14,8 +11,8 @@ log stderr
|
|||
phy 0
|
||||
instance 0
|
||||
bts 0
|
||||
band {band}
|
||||
ipa unit-id {ipa_unit_id} 0
|
||||
oml remote-ip {oml_remote_ip}
|
||||
band ${osmo_bts_sysmo.band}
|
||||
ipa unit-id ${osmo_bts_sysmo.ipa_unit_id} 0
|
||||
oml remote-ip ${osmo_bts_sysmo.oml_remote_ip}
|
||||
trx 0
|
||||
phy 0 instance 0
|
|
@ -0,0 +1,22 @@
|
|||
! Configuration rendered by osmo-gsm-tester
|
||||
log stderr
|
||||
logging color 1
|
||||
logging timestamp 1
|
||||
logging print extended-timestamp 1
|
||||
logging print category 1
|
||||
logging level all debug
|
||||
logging level l1c info
|
||||
logging level linp info
|
||||
!
|
||||
phy 0
|
||||
instance 0
|
||||
osmotrx rx-gain 25
|
||||
bts 0
|
||||
band ${osmo_bts_trx.band}
|
||||
ipa unit-id ${osmo_bts_trx.ipa_unit_id} 0
|
||||
oml remote-ip ${osmo_bts_trx.oml_remote_ip}
|
||||
settsc
|
||||
gsmtap-sapi ccch
|
||||
gsmtap-sapi pdtch
|
||||
trx 0
|
||||
phy 0 instance 0
|
|
@ -1,6 +1,4 @@
|
|||
!
|
||||
! OpenBSC configuration saved from vty
|
||||
!
|
||||
! Configuration rendered by osmo-gsm-tester
|
||||
password foo
|
||||
!
|
||||
log stderr
|
||||
|
@ -12,19 +10,19 @@ log stderr
|
|||
!
|
||||
line vty
|
||||
no login
|
||||
bind ${vty_bind_ip}
|
||||
bind ${nitb_iface.addr}
|
||||
!
|
||||
e1_input
|
||||
e1_line 0 driver ipa
|
||||
ipa bind ${abis_bind_ip}
|
||||
ipa bind ${nitb_iface.addr}
|
||||
network
|
||||
network country code ${mcc}
|
||||
mobile network code ${mnc}
|
||||
short name ${net_name_short}
|
||||
long name ${net_name_long}
|
||||
auth policy ${net_auth_policy}
|
||||
network country code ${nitb.net.mcc}
|
||||
mobile network code ${nitb.net.mnc}
|
||||
short name ${nitb.net.short_name}
|
||||
long name ${nitb.net.long_name}
|
||||
auth policy ${nitb.net.auth_policy}
|
||||
location updating reject cause 13
|
||||
encryption a5 ${encryption}
|
||||
encryption ${nitb.net.encryption}
|
||||
neci 1
|
||||
rrlp mode none
|
||||
mm info 1
|
||||
|
@ -46,16 +44,7 @@ network
|
|||
timer t3117 0
|
||||
timer t3119 0
|
||||
timer t3141 0
|
||||
smpp
|
||||
local-tcp-ip ${smpp_bind_ip} 2775
|
||||
system-id test
|
||||
policy closed
|
||||
esme test
|
||||
password test
|
||||
default-route
|
||||
ctrl
|
||||
bind ${ctrl_bind_ip}
|
||||
%for bts in bts_list:
|
||||
%for bts in nitb.net.bts_list:
|
||||
bts ${loop.index}
|
||||
type ${bts.type}
|
||||
band ${bts.band}
|
||||
|
@ -69,7 +58,7 @@ ctrl
|
|||
channel allocator ascending
|
||||
rach tx integer 9
|
||||
rach max transmission 7
|
||||
ip.access unit_id ${bts.unit_id} 0
|
||||
ip.access unit_id ${bts.ipa_unit_id} 0
|
||||
oml ip.access stream_id ${bts.stream_id} line 0
|
||||
gprs mode none
|
||||
% for trx in bts.trx_list:
|
||||
|
@ -85,3 +74,12 @@ ctrl
|
|||
% endfor
|
||||
% endfor
|
||||
%endfor
|
||||
smpp
|
||||
local-tcp-ip ${nitb_iface.addr} 2775
|
||||
system-id test
|
||||
policy closed
|
||||
esme test
|
||||
password test
|
||||
default-route
|
||||
ctrl
|
||||
bind ${nitb_iface.addr}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# osmo_gsm_tester: prepare a test run and provide test API
|
||||
# osmo_gsm_tester: context for individual test runs
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
|
@ -17,27 +17,33 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
import pprint
|
||||
import inspect
|
||||
# These will be initialized before each test run.
|
||||
# A test script can thus establish its context by doing:
|
||||
# from osmo_gsm_tester.test import *
|
||||
trial = None
|
||||
suite = None
|
||||
test = None
|
||||
resources = None
|
||||
log = None
|
||||
dbg = None
|
||||
err = None
|
||||
wait = None
|
||||
sleep = None
|
||||
poll = None
|
||||
prompt = None
|
||||
|
||||
from . import suite as _suite
|
||||
from . import log
|
||||
from . import resource
|
||||
|
||||
# load the configuration for the test
|
||||
suite = _suite.Suite(sys.path[0])
|
||||
test = _suite.Test(suite, os.path.basename(inspect.stack()[-1][1]))
|
||||
|
||||
def test_except_hook(*exc_info):
|
||||
log.exn_add_info(exc_info, test)
|
||||
log.exn_add_info(exc_info, suite)
|
||||
log.log_exn(exc_info=exc_info)
|
||||
|
||||
sys.excepthook = test_except_hook
|
||||
|
||||
orig_stdout, sys.stdout = sys.stdout, test
|
||||
|
||||
resources = {}
|
||||
def setup(suite_run, _test, ofono_client):
|
||||
global trial, suite, test, resources, log, dbg, err, wait, sleep, poll, prompt
|
||||
trial = suite_run.trial
|
||||
suite = suite_run
|
||||
test = _test
|
||||
resources = suite_run.reserved_resources
|
||||
log = test.log
|
||||
dbg = test.dbg
|
||||
err = test.err
|
||||
wait = suite_run.wait
|
||||
sleep = suite_run.sleep
|
||||
poll = suite_run.poll
|
||||
prompt = suite_run.prompt
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -0,0 +1,160 @@
|
|||
# osmo_gsm_tester: trial: directory of binaries to be tested
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import tarfile
|
||||
|
||||
from . import log, util
|
||||
|
||||
FILE_MARK_TAKEN = 'taken'
|
||||
FILE_CHECKSUMS = 'checksums.md5'
|
||||
TIMESTAMP_FMT = '%Y-%m-%d_%H-%M-%S'
|
||||
FILE_LAST_RUN = 'last_run'
|
||||
|
||||
class Trial(log.Origin):
|
||||
path = None
|
||||
dir = None
|
||||
_run_dir = None
|
||||
bin_tars = None
|
||||
|
||||
@staticmethod
|
||||
def next(trials_dir):
|
||||
|
||||
with trials_dir.lock('Trial.next'):
|
||||
trials = [e for e in trials_dir.children()
|
||||
if trials_dir.isdir(e) and not trials_dir.exists(e, FILE_MARK_TAKEN)]
|
||||
if not trials:
|
||||
return None
|
||||
# sort by time to get the one that waited longest
|
||||
trials.sort(key=lambda e: os.path.getmtime(trials_dir.child(e)))
|
||||
next_trial = trials[0]
|
||||
return Trial(trials_dir.child(next_trial)).take()
|
||||
|
||||
def __init__(self, trial_dir):
|
||||
self.path = trial_dir
|
||||
self.set_name(self.path)
|
||||
self.set_log_category(log.C_TST)
|
||||
self.dir = util.Dir(self.path)
|
||||
self.inst_dir = util.Dir(self.dir.child('inst'))
|
||||
self.bin_tars = []
|
||||
|
||||
def __repr__(self):
|
||||
return self.name()
|
||||
|
||||
def __enter__(self):
|
||||
self.log('Trial start')
|
||||
super().__enter__()
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
super().__exit__(*exc_info)
|
||||
self.log('Trial end')
|
||||
|
||||
def take(self):
|
||||
self.dir.touch(FILE_MARK_TAKEN)
|
||||
return self
|
||||
|
||||
def get_run_dir(self):
|
||||
if self._run_dir is not None:
|
||||
return self._run_dir
|
||||
self._run_dir = util.Dir(self.dir.new_child('run.%s' % time.strftime(TIMESTAMP_FMT)))
|
||||
self._run_dir.mkdir()
|
||||
|
||||
last_run = self.dir.child(FILE_LAST_RUN)
|
||||
if os.path.islink(last_run):
|
||||
os.remove(last_run)
|
||||
if not os.path.exists(last_run):
|
||||
os.symlink(self.dir.rel_path(self._run_dir.path), last_run)
|
||||
return self._run_dir
|
||||
|
||||
def verify(self):
|
||||
"verify checksums"
|
||||
|
||||
if not self.dir.exists():
|
||||
raise RuntimeError('Trial dir does not exist: %r' % self.dir)
|
||||
if not self.dir.isdir():
|
||||
raise RuntimeError('Trial dir is not a dir: %r' % self.dir)
|
||||
|
||||
checksums = self.dir.child(FILE_CHECKSUMS)
|
||||
if not self.dir.isfile(FILE_CHECKSUMS):
|
||||
raise RuntimeError('No checksums file in trial dir: %r', checksums)
|
||||
|
||||
with open(checksums, 'r') as f:
|
||||
line_nr = 0
|
||||
for line in [l.strip() for l in f.readlines()]:
|
||||
line_nr += 1
|
||||
if not line:
|
||||
continue
|
||||
md5, filename = line.split(' ')
|
||||
file_path = self.dir.child(filename)
|
||||
|
||||
if not self.dir.isfile(filename):
|
||||
raise RuntimeError('File listed in checksums file but missing in trials dir:'
|
||||
' %r vs. %r line %d' % (file_path, checksums, line_nr))
|
||||
|
||||
if md5 != util.md5_of_file(file_path):
|
||||
raise RuntimeError('Checksum mismatch for %r vs. %r line %d'
|
||||
% (file_path, checksums, line_nr))
|
||||
|
||||
if filename.endswith('.tgz'):
|
||||
self.bin_tars.append(filename)
|
||||
|
||||
def has_bin_tar(self, bin_name):
|
||||
bin_tar_start = '%s.' % bin_name
|
||||
matches = [t for t in self.bin_tars if t.startswith(bin_tar_start)]
|
||||
self.dbg(bin_name=bin_name, matches=matches)
|
||||
if not matches:
|
||||
return None
|
||||
if len(matches) > 1:
|
||||
raise RuntimeError('More than one match for bin name %r: %r' % (bin_name, matches))
|
||||
bin_tar = matches[0]
|
||||
bin_tar_path = self.dir.child(bin_tar)
|
||||
if not os.path.isfile(bin_tar_path):
|
||||
raise RuntimeError('Not a file or missing: %r' % bin_tar_path)
|
||||
return bin_tar_path
|
||||
|
||||
def get_inst(self, bin_name):
|
||||
bin_tar = self.has_bin_tar(bin_name)
|
||||
if not bin_tar:
|
||||
return None
|
||||
inst_dir = self.inst_dir.child(bin_name)
|
||||
|
||||
if os.path.isdir(inst_dir):
|
||||
# already unpacked
|
||||
return inst_dir
|
||||
|
||||
t = None
|
||||
try:
|
||||
os.makedirs(inst_dir)
|
||||
t = tarfile.open(bin_tar)
|
||||
t.extractall(inst_dir)
|
||||
return inst_dir
|
||||
|
||||
except:
|
||||
shutil.rmtree(inst_dir)
|
||||
raise
|
||||
finally:
|
||||
if t:
|
||||
try:
|
||||
t.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,332 @@
|
|||
# osmo_gsm_tester: language snippets
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import fcntl
|
||||
import hashlib
|
||||
import tempfile
|
||||
import shutil
|
||||
import atexit
|
||||
import threading
|
||||
import importlib.util
|
||||
import fcntl
|
||||
import tty
|
||||
import termios
|
||||
|
||||
|
||||
class listdict:
|
||||
'a dict of lists { "a": [1, 2, 3], "b": [1, 2] }'
|
||||
def __getattr__(ld, name):
|
||||
if name == 'add':
|
||||
return ld.__getattribute__(name)
|
||||
return ld.__dict__.__getattribute__(name)
|
||||
|
||||
def add(ld, name, item):
|
||||
l = ld.__dict__.get(name)
|
||||
if not l:
|
||||
l = []
|
||||
ld.__dict__[name] = l
|
||||
l.append(item)
|
||||
return l
|
||||
|
||||
def add_dict(ld, d):
|
||||
for k,v in d.items():
|
||||
ld.add(k, v)
|
||||
|
||||
def __setitem__(ld, name, val):
|
||||
return ld.__dict__.__setitem__(name, val)
|
||||
|
||||
def __getitem__(ld, name):
|
||||
return ld.__dict__.__getitem__(name)
|
||||
|
||||
def __str__(ld):
|
||||
return ld.__dict__.__str__()
|
||||
|
||||
|
||||
class DictProxy:
|
||||
'''
|
||||
allow accessing dict entries like object members
|
||||
syntactical sugar, adapted from http://stackoverflow.com/a/31569634
|
||||
so that e.g. templates can do ${bts.member} instead of ${bts['member']}
|
||||
'''
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict2obj(self.obj[key])
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return dict2obj(getattr(self.obj, key))
|
||||
except AttributeError:
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
|
||||
class ListProxy:
|
||||
'allow nesting for DictProxy'
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict2obj(self.obj[key])
|
||||
|
||||
def dict2obj(value):
|
||||
if isinstance(value, dict):
|
||||
return DictProxy(value)
|
||||
if isinstance(value, (tuple, list)):
|
||||
return ListProxy(value)
|
||||
return value
|
||||
|
||||
|
||||
class FileLock:
|
||||
def __init__(self, path, owner):
|
||||
self.path = path
|
||||
self.owner = owner
|
||||
self.f = None
|
||||
|
||||
def __enter__(self):
|
||||
if self.f is not None:
|
||||
return
|
||||
self.fd = os.open(self.path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC)
|
||||
fcntl.flock(self.fd, fcntl.LOCK_EX)
|
||||
os.truncate(self.fd, 0)
|
||||
os.write(self.fd, str(self.owner).encode('utf-8'))
|
||||
os.fsync(self.fd)
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
#fcntl.flock(self.fd, fcntl.LOCK_UN)
|
||||
os.truncate(self.fd, 0)
|
||||
os.fsync(self.fd)
|
||||
os.close(self.fd)
|
||||
self.fd = -1
|
||||
|
||||
def lock(self):
|
||||
self.__enter__()
|
||||
|
||||
def unlock(self):
|
||||
self.__exit__()
|
||||
|
||||
|
||||
class Dir():
|
||||
LOCK_FILE = 'lock'
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.lock_path = os.path.join(self.path, Dir.LOCK_FILE)
|
||||
|
||||
def lock(self, origin_id):
|
||||
'''
|
||||
return lock context, usage:
|
||||
|
||||
with my_dir.lock(origin):
|
||||
read_from(my_dir.child('foo.txt'))
|
||||
write_to(my_dir.child('bar.txt'))
|
||||
'''
|
||||
self.mkdir()
|
||||
return FileLock(self.lock_path, origin_id)
|
||||
|
||||
@staticmethod
|
||||
def ensure_abs_dir_exists(*path_elements):
|
||||
l = len(path_elements)
|
||||
if l < 1:
|
||||
raise RuntimeError('Cannot create empty path')
|
||||
if l == 1:
|
||||
path = path_elements[0]
|
||||
else:
|
||||
path = os.path.join(*path_elements)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
|
||||
def child(self, *rel_path):
|
||||
if not rel_path:
|
||||
return self.path
|
||||
return os.path.join(self.path, *rel_path)
|
||||
|
||||
def mk_parentdir(self, *rel_path):
|
||||
child = self.child(*rel_path)
|
||||
child_parent = os.path.dirname(child)
|
||||
Dir.ensure_abs_dir_exists(child_parent)
|
||||
return child
|
||||
|
||||
def mkdir(self, *rel_path):
|
||||
child = self.child(*rel_path)
|
||||
Dir.ensure_abs_dir_exists(child)
|
||||
return child
|
||||
|
||||
def children(self):
|
||||
return os.listdir(self.path)
|
||||
|
||||
def exists(self, *rel_path):
|
||||
return os.path.exists(self.child(*rel_path))
|
||||
|
||||
def isdir(self, *rel_path):
|
||||
return os.path.isdir(self.child(*rel_path))
|
||||
|
||||
def isfile(self, *rel_path):
|
||||
return os.path.isfile(self.child(*rel_path))
|
||||
|
||||
def new_child(self, *rel_path):
|
||||
attempt = 1
|
||||
prefix, suffix = os.path.splitext(self.child(*rel_path))
|
||||
rel_path_fmt = '%s%%s%s' % (prefix, suffix)
|
||||
while True:
|
||||
path = rel_path_fmt % (('_%d'%attempt) if attempt > 1 else '')
|
||||
if not os.path.exists(path):
|
||||
break
|
||||
attempt += 1
|
||||
continue
|
||||
Dir.ensure_abs_dir_exists(os.path.dirname(path))
|
||||
return path
|
||||
|
||||
def rel_path(self, path):
|
||||
return os.path.relpath(path, self.path)
|
||||
|
||||
def touch(self, *rel_path):
|
||||
touch_file(self.child(*rel_path))
|
||||
|
||||
def new_file(self, *rel_path):
|
||||
path = self.new_child(*rel_path)
|
||||
touch_file(path)
|
||||
return path
|
||||
|
||||
def new_dir(self, *rel_path):
|
||||
path = self.new_child(*rel_path)
|
||||
Dir.ensure_abs_dir_exists(path)
|
||||
return path
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
def __repr__(self):
|
||||
return self.path
|
||||
|
||||
def touch_file(path):
|
||||
with open(path, 'a') as f:
|
||||
f.close()
|
||||
|
||||
def is_dict(l):
|
||||
return isinstance(l, dict)
|
||||
|
||||
def is_list(l):
|
||||
return isinstance(l, (list, tuple))
|
||||
|
||||
|
||||
def dict_add(a, *b, **c):
|
||||
for bb in b:
|
||||
a.update(bb)
|
||||
a.update(c)
|
||||
return a
|
||||
|
||||
def _hash_recurse(acc, obj, ignore_keys):
|
||||
if is_dict(obj):
|
||||
for key, val in sorted(obj.items()):
|
||||
if key in ignore_keys:
|
||||
continue
|
||||
_hash_recurse(acc, val, ignore_keys)
|
||||
return
|
||||
|
||||
if is_list(obj):
|
||||
for item in obj:
|
||||
_hash_recurse(acc, item, ignore_keys)
|
||||
return
|
||||
|
||||
acc.update(str(obj).encode('utf-8'))
|
||||
|
||||
def hash_obj(obj, *ignore_keys):
|
||||
acc = hashlib.sha1()
|
||||
_hash_recurse(acc, obj, ignore_keys)
|
||||
return acc.hexdigest()
|
||||
|
||||
|
||||
def md5(of_content):
|
||||
if isinstance(of_content, str):
|
||||
of_content = of_content.encode('utf-8')
|
||||
return hashlib.md5(of_content).hexdigest()
|
||||
|
||||
def md5_of_file(path):
|
||||
with open(path, 'rb') as f:
|
||||
return md5(f.read())
|
||||
|
||||
_tempdir = None
|
||||
|
||||
def get_tempdir(remove_on_exit=True):
|
||||
global _tempdir
|
||||
if _tempdir is not None:
|
||||
return _tempdir
|
||||
_tempdir = tempfile.mkdtemp()
|
||||
if remove_on_exit:
|
||||
atexit.register(lambda: shutil.rmtree(_tempdir))
|
||||
return _tempdir
|
||||
|
||||
|
||||
if hasattr(importlib.util, 'module_from_spec'):
|
||||
def run_python_file(module_name, path):
|
||||
spec = importlib.util.spec_from_file_location(module_name, path)
|
||||
spec.loader.exec_module( importlib.util.module_from_spec(spec) )
|
||||
else:
|
||||
from importlib.machinery import SourceFileLoader
|
||||
def run_python_file(module_name, path):
|
||||
SourceFileLoader(module_name, path).load_module()
|
||||
|
||||
def msisdn_inc(msisdn_str):
|
||||
'add 1 and preserve leading zeros'
|
||||
return ('%%0%dd' % len(msisdn_str)) % (int(msisdn_str) + 1)
|
||||
|
||||
class polling_stdin:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
self.fd = self.stream.fileno()
|
||||
def __enter__(self):
|
||||
self.original_stty = termios.tcgetattr(self.stream)
|
||||
tty.setcbreak(self.stream)
|
||||
self.orig_fl = fcntl.fcntl(self.fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl | os.O_NONBLOCK)
|
||||
def __exit__(self, *args):
|
||||
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl)
|
||||
termios.tcsetattr(self.stream, termios.TCSANOW, self.original_stty)
|
||||
|
||||
def input_polling(poll_func, stream=None):
|
||||
if stream is None:
|
||||
stream = sys.stdin
|
||||
unbuffered_stdin = os.fdopen(stream.fileno(), 'rb', buffering=0)
|
||||
try:
|
||||
with polling_stdin(unbuffered_stdin):
|
||||
acc = []
|
||||
while True:
|
||||
poll_func()
|
||||
got = unbuffered_stdin.read(1)
|
||||
if got and len(got):
|
||||
try:
|
||||
# this is hacky: can't deal with multibyte sequences
|
||||
got_str = got.decode('utf-8')
|
||||
except:
|
||||
got_str = '?'
|
||||
acc.append(got_str)
|
||||
sys.__stdout__.write(got_str)
|
||||
sys.__stdout__.flush()
|
||||
if '\n' in got_str:
|
||||
return ''.join(acc)
|
||||
time.sleep(.1)
|
||||
finally:
|
||||
unbuffered_stdin.close()
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -1,118 +0,0 @@
|
|||
# osmo_gsm_tester: language snippets
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import fcntl
|
||||
|
||||
class listdict:
|
||||
'a dict of lists { "a": [1, 2, 3], "b": [1, 2] }'
|
||||
def __getattr__(ld, name):
|
||||
if name == 'add':
|
||||
return ld.__getattribute__(name)
|
||||
return ld.__dict__.__getattribute__(name)
|
||||
|
||||
def add(ld, name, item):
|
||||
l = ld.__dict__.get(name)
|
||||
if not l:
|
||||
l = []
|
||||
ld.__dict__[name] = l
|
||||
l.append(item)
|
||||
return l
|
||||
|
||||
def add_dict(ld, d):
|
||||
for k,v in d.items():
|
||||
ld.add(k, v)
|
||||
|
||||
def __setitem__(ld, name, val):
|
||||
return ld.__dict__.__setitem__(name, val)
|
||||
|
||||
def __getitem__(ld, name):
|
||||
return ld.__dict__.__getitem__(name)
|
||||
|
||||
def __str__(ld):
|
||||
return ld.__dict__.__str__()
|
||||
|
||||
|
||||
class DictProxy:
|
||||
'''
|
||||
allow accessing dict entries like object members
|
||||
syntactical sugar, adapted from http://stackoverflow.com/a/31569634
|
||||
so that e.g. templates can do ${bts.member} instead of ${bts['member']}
|
||||
'''
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict2obj(self.obj[key])
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return dict2obj(getattr(self.obj, key))
|
||||
except AttributeError:
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
|
||||
class ListProxy:
|
||||
'allow nesting for DictProxy'
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict2obj(self.obj[key])
|
||||
|
||||
def dict2obj(value):
|
||||
if isinstance(value, dict):
|
||||
return DictProxy(value)
|
||||
if isinstance(value, (tuple, list)):
|
||||
return ListProxy(value)
|
||||
return value
|
||||
|
||||
|
||||
class FileLock:
|
||||
def __init__(self, path, owner):
|
||||
self.path = path
|
||||
self.owner = owner
|
||||
self.f = None
|
||||
|
||||
def __enter__(self):
|
||||
if self.f is not None:
|
||||
return
|
||||
self.fd = os.open(self.path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC)
|
||||
fcntl.flock(self.fd, fcntl.LOCK_EX)
|
||||
os.truncate(self.fd, 0)
|
||||
os.write(self.fd, str(self.owner).encode('utf-8'))
|
||||
os.fsync(self.fd)
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
#fcntl.flock(self.fd, fcntl.LOCK_UN)
|
||||
os.truncate(self.fd, 0)
|
||||
os.fsync(self.fd)
|
||||
os.close(self.fd)
|
||||
self.fd = -1
|
||||
|
||||
def lock(self):
|
||||
self.__enter__()
|
||||
|
||||
def unlock(self):
|
||||
self.__exit__()
|
||||
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
112
src/run_once.py
112
src/run_once.py
|
@ -21,28 +21,124 @@
|
|||
|
||||
'''osmo_gsm_tester: invoke a single test run.
|
||||
|
||||
./run_once.py ~/path/to/test_package/
|
||||
Examples:
|
||||
|
||||
Upon launch, a 'test_package/run-<date>' directory will be created.
|
||||
When complete, a symbolic link 'test_package/last_run' will point at this dir.
|
||||
The run dir then contains logs and test results.
|
||||
./run_once.py ~/my_trial_package/ -s osmo_trx
|
||||
./run_once.py ~/my_trial_package/ -c sms_tests:dyn_ts+eu_band+bts_sysmo
|
||||
./run_once.py ~/my_trial_package/ -c sms_tests/mo_mt_sms:bts_trx
|
||||
|
||||
(The names for test suite, scenario and series names used in these examples
|
||||
must be defined by the osmo-gsm-tester configuration.)
|
||||
|
||||
A trial package contains binaries (usually built by a jenkins job) of GSM
|
||||
software, including the core network programs as well as binaries for the
|
||||
various BTS models.
|
||||
|
||||
A test suite defines specific actions to be taken and verifies their outcome.
|
||||
Such a test suite may leave certain aspects of a setup undefined, e.g. it may
|
||||
be BTS model agnostic or does not care which voice codecs are chosen.
|
||||
|
||||
A test scenario completes the picture in that it defines which specific choices
|
||||
shall be made to run a test suite. Any one test suite may thus run on any
|
||||
number of different scenarios, e.g. to test various voice codecs.
|
||||
|
||||
Test scenarios may be combined. For example, one scenario may define a timeslot
|
||||
configuration to use, while another scenario may define the voice codec
|
||||
configuration.
|
||||
|
||||
There may still be aspects that are neither required by a test suite nor
|
||||
strictly defined by a scenario, which will be resolved automatically, e.g. by
|
||||
choosing the first available item that matches the other constraints.
|
||||
|
||||
A test run thus needs to define: a trial package containing built binaries, a
|
||||
combination of scenarios to run a suite in, and a test suite to launch in the
|
||||
given scenario with the given binaries.
|
||||
|
||||
The osmo-gsm-tester configuration may define one or more series as a number of
|
||||
suite:scenario combinations. So instead of a specific suite:scenario
|
||||
combination, the name of such a series can be passed.
|
||||
|
||||
If neither a combination or series is specified, the default series will be run
|
||||
as defined in the osmo-gsm-tester configuration.
|
||||
|
||||
The scenarios and suites run for a given trial will be recorded in a trial
|
||||
package's directory: Upon launch, a 'test_package/run.<date>' directory will be
|
||||
created, which will collect logs and reports.
|
||||
'''
|
||||
|
||||
import osmo_gsm_tester
|
||||
from osmo_gsm_tester import trial, suite, log, __version__
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser = argparse.ArgumentParser(epilog=__doc__, formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument('-V', '--version', action='store_true',
|
||||
help='Show version')
|
||||
parser.add_argument('test_package', nargs='*',
|
||||
parser.add_argument('trial_package', nargs='+',
|
||||
help='Directory containing binaries to test')
|
||||
parser.add_argument('-s', '--suite-scenario', dest='suite_scenario', action='append',
|
||||
help='A suite-scenarios combination like suite:scenario+scenario')
|
||||
parser.add_argument('-S', '--series', dest='series', action='append',
|
||||
help='A series of suite-scenarios combinations as defined in the'
|
||||
' osmo-gsm-tester configuration')
|
||||
parser.add_argument('-t', '--test', dest='test', action='append',
|
||||
help='Run only tests matching this name')
|
||||
parser.add_argument('-l', '--log-level', dest='log_level', choices=log.LEVEL_STRS.keys(),
|
||||
default=None,
|
||||
help='Set logging level for all categories (on stdout)')
|
||||
parser.add_argument('-T', '--traceback', dest='trace', action='store_true',
|
||||
help='Enable logging of tracebacks')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.version:
|
||||
print(osmo_gsm_tester.__version__)
|
||||
print(__version__)
|
||||
exit(0)
|
||||
|
||||
print('combinations:', repr(args.suite_scenario))
|
||||
print('series:', repr(args.series))
|
||||
print('trials:', repr(args.trial_package))
|
||||
print('tests:', repr(args.test))
|
||||
|
||||
if args.log_level:
|
||||
log.set_all_levels(log.LEVEL_STRS.get(args.log_level))
|
||||
log.style_change(origin_width=32)
|
||||
if args.trace:
|
||||
log.style_change(trace=True)
|
||||
|
||||
combination_strs = list(args.suite_scenario or [])
|
||||
# for series in args.series:
|
||||
# combination_strs.extend(config.get_series(series))
|
||||
|
||||
if not combination_strs:
|
||||
raise RuntimeError('Need at least one suite:scenario or series to run')
|
||||
|
||||
suite_scenarios = []
|
||||
for combination_str in combination_strs:
|
||||
suite_scenarios.append(suite.load_suite_scenario_str(combination_str))
|
||||
|
||||
test_names = []
|
||||
for test_name in (args.test or []):
|
||||
found = False
|
||||
for suite_run in suite_runs:
|
||||
for test in suite_run.definition.tests:
|
||||
if test_name in test.name():
|
||||
found = True
|
||||
test_names.append(test.name())
|
||||
if not found:
|
||||
raise RuntimeError('No test found for %r' % test_name)
|
||||
if test_names:
|
||||
print(repr(test_names))
|
||||
|
||||
trials = []
|
||||
for trial_package in args.trial_package:
|
||||
t = trial.Trial(trial_package)
|
||||
t.verify()
|
||||
trials.append(t)
|
||||
|
||||
for current_trial in trials:
|
||||
with current_trial:
|
||||
for suite_def, scenarios in suite_scenarios:
|
||||
suite_run = suite.SuiteRun(current_trial, suite_def, scenarios)
|
||||
suite_run.run_tests(test_names)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
{'bts': [{'addr': '10.42.42.114',
|
||||
'name': 'sysmoBTS 1002',
|
||||
'trx': [{'band': 'GSM-1800',
|
||||
'timeslots': ['CCCH+SDCCH4',
|
||||
'SDCCH8',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH',
|
||||
'TCH/F_TCH/H_PDCH']},
|
||||
{'band': 'GSM-1900',
|
||||
'timeslots': ['SDCCH8',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH',
|
||||
'PDCH']}],
|
||||
'type': 'sysmobts'}],
|
||||
'modems': [{'dbus_path': '/sierra_0',
|
||||
'imsi': '901700000009001',
|
||||
'ki': 'D620F48487B1B782DA55DF6717F08FF9',
|
||||
'msisdn': '7801'},
|
||||
{'dbus_path': '/sierra_1',
|
||||
'imsi': '901700000009002',
|
||||
'ki': 'D620F48487B1B782DA55DF6717F08FF9',
|
||||
'msisdn': '7802'}]}
|
||||
- expect validation success:
|
||||
Validation: OK
|
||||
- unknown item:
|
||||
--- - ERR: ValueError: config item not known: 'bts[].unknown_item'
|
||||
Validation: Error
|
||||
- wrong type modems[].imsi:
|
||||
--- - ERR: ValueError: config item is dict but should be a leaf node of type 'str': 'modems[].imsi'
|
||||
Validation: Error
|
||||
- invalid key with space:
|
||||
--- - ERR: ValueError: invalid config key: 'imsi '
|
||||
Validation: Error
|
||||
- list instead of dict:
|
||||
--- - ERR: ValueError: config item not known: 'a_dict[]'
|
||||
Validation: Error
|
||||
- unknown band:
|
||||
--- (item='bts[].trx[].band') ERR: ValueError: Unknown GSM band: 'what'
|
||||
Validation: Error
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue