initial import
The original osmo-gsm-tester was an internal development at sysmocom, mostly by D. Laszlo Sitzer <dlsitzer@sysmocom.de>, of which this public osmo-gsm-tester is a refactoring / rewrite. This imports an early state of the refactoring and is not functional yet. Bits from the earlier osmo-gsm-tester will be added as needed. The earlier commit history is not imported.changes/43/2443/1 0.1
parent
0f2f19e9aa
commit
dae3d3c479
|
@ -0,0 +1,15 @@
|
|||
all: deps version check
|
||||
|
||||
.PHONY: version check
|
||||
|
||||
deps:
|
||||
./check_dependencies.py
|
||||
|
||||
version:
|
||||
./update_version.sh
|
||||
|
||||
check:
|
||||
$(MAKE) -C test check
|
||||
@echo "make check: success"
|
||||
|
||||
# vim: noexpandtab tabstop=8 shiftwidth=8
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# just import all python3 modules used by osmo-gsm-tester to make sure they are
|
||||
# installed.
|
||||
|
||||
from inspect import getframeinfo, stack
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako.template import Template
|
||||
import argparse
|
||||
import contextlib
|
||||
import copy
|
||||
import difflib
|
||||
import fcntl
|
||||
import inspect
|
||||
import io
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import yaml
|
||||
|
||||
print('ok')
|
|
@ -0,0 +1,140 @@
|
|||
set -e -x
|
||||
|
||||
prefix_base="`pwd`"
|
||||
prefix_dirname="inst-openbsc"
|
||||
prefix="$prefix_base/$prefix_dirname"
|
||||
|
||||
reposes="
|
||||
libosmocore
|
||||
libosmo-abis
|
||||
libosmo-netif
|
||||
openggsn
|
||||
libsmpp34
|
||||
libosmo-sccp
|
||||
openbsc/openbsc
|
||||
"
|
||||
|
||||
osmo_gsm_tester_host=root@10.9.1.190
|
||||
osmo_gsm_tester_dir="/var/tmp/osmo-gsm-tester"
|
||||
tmp_dir="/var/tmp/prep-osmo-gsm-tester"
|
||||
arch="x86_64"
|
||||
archive_name="openbsc-$arch-build-$BUILD_NUMBER"
|
||||
archive="$archive_name.tgz"
|
||||
manifest="manifest.txt"
|
||||
test_report="test-report.xml"
|
||||
test_timeout_sec=120
|
||||
|
||||
rm -rf $prefix
|
||||
mkdir -p $prefix
|
||||
|
||||
opt_prefix=""
|
||||
if [ -n "$prefix" ]; then
|
||||
export LD_LIBRARY_PATH="$prefix"/lib
|
||||
export PKG_CONFIG_PATH="$prefix"/lib/pkgconfig
|
||||
opt_prefix="--prefix=$prefix"
|
||||
fi
|
||||
|
||||
for r in $reposes; do
|
||||
make -C "$r" clean || true
|
||||
done
|
||||
|
||||
for r in $reposes; do
|
||||
|
||||
cd "$r"
|
||||
|
||||
echo "$(git rev-parse HEAD) $r" >> "$prefix/openbsc_git_hashes.txt"
|
||||
|
||||
autoreconf -fi
|
||||
|
||||
opt_enable=""
|
||||
if [ "$r" = 'openbsc/openbsc' ]; then
|
||||
opt_enable="--enable-smpp --enable-osmo-bsc --enable-nat"
|
||||
fi
|
||||
|
||||
./configure "$opt_prefix" $opt_enable
|
||||
|
||||
make -j || make || make
|
||||
if [ "$r" != asn1c ]; then
|
||||
if [ "$r" = 'libosmo-netif' ]; then
|
||||
# skip clock dependent test in libosmo-netif
|
||||
make check TESTSUITEFLAGS='-k !osmux_test'
|
||||
else
|
||||
make check
|
||||
fi
|
||||
fi
|
||||
make install
|
||||
cd ..
|
||||
done
|
||||
|
||||
# create test session directory, archive and manifest
|
||||
|
||||
cd $prefix_base
|
||||
|
||||
ts_name="$NODE_NAME-$BUILD_TAG"
|
||||
local_ts_base="./compose_ts"
|
||||
local_ts_dir="$local_ts_base/$ts_name"
|
||||
|
||||
rm -rf "$local_ts_base" || true
|
||||
mkdir -p "$local_ts_dir"
|
||||
|
||||
# create archive of openbsc build
|
||||
tar czf "$local_ts_dir/$archive" "$prefix_dirname"/*
|
||||
# move archived bts builds into test session directory
|
||||
mv $WORKSPACE/osmo-bts-*.tgz "$local_ts_dir"
|
||||
cd "$local_ts_dir"
|
||||
md5sum *.tgz > $manifest
|
||||
cd -
|
||||
|
||||
# transfer test session directory to temporary dir on osmo-gsm-tester host
|
||||
# when transfer is complete, move the directory to its final location (where
|
||||
# the osmo-gsm-tester will recognize the session directory and start the session
|
||||
|
||||
ssh $osmo_gsm_tester_host "mkdir -p $tmp_dir"
|
||||
scp -r "$local_ts_dir" $osmo_gsm_tester_host:$tmp_dir/
|
||||
ssh $osmo_gsm_tester_host "mv $tmp_dir/$ts_name $osmo_gsm_tester_dir"
|
||||
|
||||
# poll for test status
|
||||
ts_dir="$osmo_gsm_tester_dir/$ts_name"
|
||||
|
||||
set +x
|
||||
ts_log=$ts_dir/test-session.log
|
||||
echo "Waiting for test session log to be created"
|
||||
while /bin/true; do
|
||||
if ssh $osmo_gsm_tester_host "test -e $ts_log"; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "Following test session log"
|
||||
# NOTE this will leave dead ssh session with tail running
|
||||
ssh $osmo_gsm_tester_host "tail -f $ts_log" &
|
||||
|
||||
echo "Waiting for test session to complete"
|
||||
while /bin/true; do
|
||||
# if [ "$test_timeout_sec" = "0" ]; then
|
||||
# echo "TIMEOUT test execution timeout ($test_timeout_sec seconds) exceeded!"
|
||||
# exit 1
|
||||
# fi
|
||||
if ssh $osmo_gsm_tester_host "test -e $ts_dir/$test_report"; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
# test_timeout_sec="$(($test_timeout_sec - 1))"
|
||||
done
|
||||
set -x
|
||||
|
||||
# use pgrep to terminate the ssh/tail (if it still exists)
|
||||
remote_tail_pid=`ssh $osmo_gsm_tester_host "pgrep -fx 'tail -f $ts_log'"`
|
||||
echo "remote_tail_pid = $remote_tail_pid"
|
||||
ssh $osmo_gsm_tester_host "kill $remote_tail_pid"
|
||||
|
||||
# copy contents of test session directory back and remove it from the osmo-gsm-tester host
|
||||
|
||||
rsync -av -e ssh --exclude='inst-*' --exclude='tmp*' $osmo_gsm_tester_host:$ts_dir/ "$local_ts_dir/"
|
||||
|
||||
ssh $osmo_gsm_tester_host "/usr/local/src/osmo-gsm-tester/contrib/ts-dir-cleanup.sh"
|
||||
|
||||
# touch test-report.xml (to make up for clock drift between jenkins and build slave)
|
||||
|
||||
touch "$local_ts_dir/$test_report"
|
|
@ -0,0 +1,94 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
OPTION_DO_CLONE=0
|
||||
OPTION_DO_CLEAN=0
|
||||
OPTION_DO_TEST=1
|
||||
|
||||
PREFIX=`pwd`/inst-osmo-bts-octphy
|
||||
|
||||
# NOTE Make sure either 'octphy-2g-headers' (prefered) or
|
||||
# 'octsdr-2g' is listed among the repositories
|
||||
|
||||
octbts_repos="libosmocore
|
||||
libosmo-abis
|
||||
openbsc/openbsc
|
||||
octphy-2g-headers
|
||||
osmo-bts"
|
||||
|
||||
clone_repos() {
|
||||
repos="$1"
|
||||
for repo in $repos; do
|
||||
if [ -e $repo ]; then
|
||||
continue
|
||||
fi
|
||||
if [ "$repo" = "libosmocore" ]; then
|
||||
url="git://git.osmocom.org/libosmocore.git"
|
||||
elif [ "$repo" = "libosmo-abis" ]; then
|
||||
url="git://git.osmocom.org/libosmo-abis.git"
|
||||
elif [ "$repo" = "libosmo-netif" ]; then
|
||||
url="git://git.osmocom.org/libosmo-netif.git"
|
||||
elif [ "$repo" = "openbsc/openbsc" ]; then
|
||||
url="git://git.osmocom.org/openbsc"
|
||||
elif [ "$repo" = "octphy-2g-headers" ]; then
|
||||
url="git://git.osmocom.org/octphy-2g-headers"
|
||||
elif [ "$repo" = "octsdr-2g" ]; then
|
||||
# NOTE acutally we only need the headers from the octphy-2g-headers
|
||||
# repository but this (private) repository contains more recent versions
|
||||
url="ssh://git@git.admin.sysmocom.de/octasic/octsdr-2g"
|
||||
elif [ "$repo" = "osmo-bts" ]; then
|
||||
url="git://git.osmocom.org/osmo-bts.git"
|
||||
else
|
||||
exit 2
|
||||
fi
|
||||
git clone $url
|
||||
done
|
||||
}
|
||||
|
||||
main() {
|
||||
repos="$1"
|
||||
if [ $OPTION_DO_CLONE -eq 1 ]; then clone_repos "$repos"; fi
|
||||
rm -rf $PREFIX
|
||||
mkdir -p $PREFIX
|
||||
for repo in $repos; do
|
||||
if [ "$repo" = "openbsc/openbsc" ]; then
|
||||
continue
|
||||
fi
|
||||
if [ "$repo" = "octphy-2g-headers" ]; then
|
||||
OCTPHY_INCDIR=`pwd`/octphy-2g-headers
|
||||
continue
|
||||
fi
|
||||
if [ "$repo" = "octsdr-2g" ]; then
|
||||
cd $repo
|
||||
git checkout 5c7166bab0a0f2d8a9664213d18642ae305e7004
|
||||
cd -
|
||||
OCTPHY_INCDIR=`pwd`/octsdr-2g/software/include
|
||||
continue
|
||||
fi
|
||||
cd $repo
|
||||
if [ $OPTION_DO_CLEAN -eq 1 ]; then git clean -dxf; fi
|
||||
echo "$(git rev-parse HEAD) $repo" >> "$PREFIX/osmo-bts-octphy_git_hashes.txt"
|
||||
autoreconf -fi
|
||||
if [ "$repo" != "libosmocore" ]; then
|
||||
export PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig
|
||||
export LD_LIBRARY_PATH=$PREFIX/lib:/usr/local/lib
|
||||
fi
|
||||
config_opts=""
|
||||
case "$repo" in
|
||||
'osmo-bts') config_opts="$config_opts --enable-octphy --with-octsdr-2g=$OCTPHY_INCDIR"
|
||||
esac
|
||||
./configure --prefix=$PREFIX $config_opts
|
||||
make -j8
|
||||
if [ $OPTION_DO_TEST -eq 1 ]; then make check; fi
|
||||
make install
|
||||
cd ..
|
||||
done
|
||||
}
|
||||
|
||||
set -x
|
||||
main "$octbts_repos"
|
||||
|
||||
# build the archive that is going to be copied to the tester and then to the BTS
|
||||
rm -f $WORKSPACE/osmo-bts-octphy*.tgz
|
||||
tar czf $WORKSPACE/osmo-bts-octphy-build-$BUILD_NUMBER.tgz inst-osmo-bts-octphy
|
|
@ -0,0 +1,68 @@
|
|||
set -e -x
|
||||
|
||||
deps="
|
||||
libosmocore
|
||||
libosmo-abis
|
||||
osmo-bts
|
||||
"
|
||||
|
||||
base="$PWD"
|
||||
|
||||
have_repo() {
|
||||
repo="$1"
|
||||
cd "$base"
|
||||
if [ ! -e "$repo" ]; then
|
||||
set +x
|
||||
echo "MISSING REPOSITORY: $repo"
|
||||
echo "should be provided by the jenkins workspace"
|
||||
exit 1
|
||||
fi
|
||||
cd "$repo"
|
||||
git clean -dxf
|
||||
cd "$base"
|
||||
}
|
||||
|
||||
for dep in $deps; do
|
||||
have_repo "$dep"
|
||||
done
|
||||
|
||||
# for gsm_data_shared.h
|
||||
have_repo openbsc
|
||||
|
||||
. /opt/poky/1.5.4/environment-setup-armv5te-poky-linux-gnueabi
|
||||
|
||||
export DESTDIR=/opt/poky/1.5.4/sysroots/armv5te-poky-linux-gnueabi
|
||||
|
||||
prefix_base="/usr/local/jenkins-build"
|
||||
prefix_base_real="$DESTDIR$prefix_base"
|
||||
rm -rf "$prefix_base_real"
|
||||
|
||||
prefix="$prefix_base/inst-osmo-bts-sysmo"
|
||||
prefix_real="$DESTDIR$prefix"
|
||||
mkdir -p "$prefix_real"
|
||||
|
||||
for dep in $deps; do
|
||||
cd "$base/$dep"
|
||||
|
||||
echo "$(git rev-parse HEAD) $dep" >> "$prefix_real/osmo-bts-sysmo_git_hashes.txt"
|
||||
|
||||
autoreconf -fi
|
||||
|
||||
config_opts=""
|
||||
case "$dep" in
|
||||
'libosmocore') config_opts="--disable-pcsc" ;;
|
||||
'osmo-bts') config_opts="--enable-sysmocom-bts --with-openbsc=$base/openbsc/openbsc/include" ;;
|
||||
esac
|
||||
|
||||
./configure --prefix="$prefix" $CONFIGURE_FLAGS $config_opts
|
||||
make -j8
|
||||
make install
|
||||
done
|
||||
|
||||
# build the archive that is going to be copied to the tester and then to the BTS
|
||||
tar_name="osmo-bts-sysmo-build-"
|
||||
if ls "$base/$tar_name"* ; then
|
||||
rm -f "$base/$tar_name"*
|
||||
fi
|
||||
cd "$prefix_base_real"
|
||||
tar cvzf "$base/$tar_name${BUILD_NUMBER}.tgz" *
|
|
@ -0,0 +1,61 @@
|
|||
set -x -e
|
||||
|
||||
base="$PWD"
|
||||
inst="inst-osmo-bts-trx"
|
||||
prefix="$base/$inst"
|
||||
|
||||
deps="
|
||||
libosmocore
|
||||
libosmo-abis
|
||||
osmo-trx
|
||||
osmo-bts
|
||||
"
|
||||
|
||||
have_repo() {
|
||||
repo="$1"
|
||||
cd "$base"
|
||||
if [ ! -e "$repo" ]; then
|
||||
set +x
|
||||
echo "MISSING REPOSITORY: $repo"
|
||||
echo "should be provided by the jenkins workspace"
|
||||
exit 1
|
||||
fi
|
||||
cd "$repo"
|
||||
git clean -dxf
|
||||
cd "$base"
|
||||
}
|
||||
|
||||
# for gsm_data_shared.*
|
||||
have_repo openbsc
|
||||
|
||||
|
||||
rm -rf "$prefix"
|
||||
mkdir -p "$prefix"
|
||||
|
||||
export PKG_CONFIG_PATH="$prefix/lib/pkgconfig"
|
||||
export LD_LIBRARY_PATH="$prefix/lib"
|
||||
|
||||
for dep in $deps; do
|
||||
have_repo "$dep"
|
||||
cd "$dep"
|
||||
|
||||
echo "$(git rev-parse HEAD) $dep" >> "$prefix/osmo-bts-trx_osmo-trx_git_hashes.txt"
|
||||
|
||||
autoreconf -fi
|
||||
|
||||
config_opts=""
|
||||
|
||||
case "$repo" in
|
||||
'osmo-bts') config_opts="--enable-trx --with-openbsc=$base/openbsc/openbsc/include" ;;
|
||||
'osmo-trx') config_opts="--without-sse" ;;
|
||||
esac
|
||||
|
||||
./configure --prefix="$prefix" $config_opts
|
||||
make -j8
|
||||
make install
|
||||
done
|
||||
|
||||
# build the archive that is going to be copied to the tester
|
||||
cd "$base"
|
||||
rm -f osmo-bts-trx*.tgz
|
||||
tar czf "osmo-bts-trx-build-${BUILD_NUMBER}.tgz" "$inst"
|
|
@ -0,0 +1,30 @@
|
|||
#!/bin/sh
|
||||
# Remove all but the N newest test run dirs (that have been started)
|
||||
|
||||
ts_rx_dir="$1"
|
||||
ts_prep_dir="$2"
|
||||
if [ -z "$ts_rx_dir" ]; then
|
||||
ts_rx_dir="/var/tmp/osmo-gsm-tester"
|
||||
fi
|
||||
if [ -z "$ts_prep_dir" ]; then
|
||||
ts_prep_dir="/var/tmp/prep-osmo-gsm-tester"
|
||||
fi
|
||||
|
||||
mkdir -p "$ts_prep_dir"
|
||||
|
||||
rm_ts() {
|
||||
ts_dir="$1"
|
||||
ts_name="$(basename "$ts_dir")"
|
||||
echo "Removing: $(ls -ld "$ts_dir")"
|
||||
# ensure atomic removal, so that the gsm-tester doesn't take it as a
|
||||
# newly added dir (can happen when the 'SEEN' marker is removed first).
|
||||
mv "$ts_dir" "$ts_prep_dir/"
|
||||
rm -rf "$ts_prep_dir/$ts_name"
|
||||
}
|
||||
|
||||
# keep the N newest test session dirs that have been started: find all that
|
||||
# have been started sorted by time, then discard all but the N newest ones.
|
||||
|
||||
for seen in $(ls -1t "$ts_rx_dir"/*/SEEN | tail -n +31); do
|
||||
rm_ts "$(dirname "$seen")"
|
||||
done
|
|
@ -0,0 +1,59 @@
|
|||
SETTING UP sysmobts
|
||||
|
||||
PACKAGE VERSIONS
|
||||
|
||||
Depending on the code to be tested, select the stable, testing or nightly opkg
|
||||
feed:
|
||||
|
||||
To change the feed and packages installed on the sysmobts edit the
|
||||
following files in /etc/opkg/
|
||||
|
||||
* all-feed.conf
|
||||
* armv5te-feed.conf
|
||||
* sysmobts-v2-feed.conf
|
||||
|
||||
and adjust the URL. For example, to move to the testing feeds:
|
||||
|
||||
sed -i 's/201310/201310-testing/g' /etc/opkg/*.conf
|
||||
|
||||
Then run 'opkg update', 'opkg upgrade' and finally 'reboot'.
|
||||
|
||||
|
||||
DISABLE SERVICES
|
||||
|
||||
To use the sysmobts together with the tester, the following systemd services must be disabled
|
||||
but using the mask and not using the disable option. You can use the following lines:
|
||||
|
||||
systemctl mask osmo-nitb
|
||||
systemctl mask sysmobts
|
||||
systemctl mask sysmobts-mgr
|
||||
|
||||
|
||||
SSH ACCESS
|
||||
|
||||
Copy the SSH public key from the system/user that runs the tester to the BTS
|
||||
authorized keys file so the tester will be able to deploy binaries.
|
||||
|
||||
It is also advisable to configure the eth0 network interface of the BTS to a
|
||||
static IP address instead of using DHCP. To do so adjust /etc/network/interfaces
|
||||
and change the line
|
||||
|
||||
iface eth0 inet dhcp
|
||||
|
||||
to
|
||||
|
||||
iface eth0 inet static
|
||||
address 10.42.42.114
|
||||
netmask 255.255.255.0
|
||||
gateway 10.42.42.1
|
||||
|
||||
Set the name server in /etc/resolve.conf (most likely to the IP of the
|
||||
gateway).
|
||||
|
||||
|
||||
ALLOW CORE FILES
|
||||
|
||||
In case a binary run for the test crashes, we allow it to write a core file, to
|
||||
be able to analyze the crash later. This requires a limit rule:
|
||||
|
||||
scp install/osmo-gsm-tester-limits.conf sysmobts:/etc/security/limits.d/
|
|
@ -0,0 +1,92 @@
|
|||
INSTALLATION
|
||||
|
||||
So far the osmo-gsm-tester directory is manually placed in /usr/local/src
|
||||
|
||||
|
||||
DEPENDENCIES
|
||||
|
||||
Packages required to run the osmo-gsm-tester:
|
||||
|
||||
dbus
|
||||
python3
|
||||
python3-dbus
|
||||
python3-pip
|
||||
python3-mako
|
||||
tcpdump
|
||||
smpplib (pip install git+git://github.com/podshumok/python-smpplib.git)
|
||||
ofono
|
||||
|
||||
To build ofono:
|
||||
libglib2.0-dev
|
||||
libdbus-1-dev
|
||||
libudev-dev
|
||||
mobile-broadband-provider-info
|
||||
|
||||
|
||||
INSTALLATION
|
||||
|
||||
Place a copy of the osmo-gsm-tester repository in /usr/local/src/
|
||||
|
||||
cp install/osmo-gsm-tester-limits.conf /etc/security/limits.d/
|
||||
cp install/*.service /lib/systemd/system/
|
||||
cp install/org.ofono.conf /etc/dbus-1/system.d/
|
||||
systemctl daemon-reload
|
||||
|
||||
To run:
|
||||
|
||||
systemctl enable ofono
|
||||
systemctl start ofono
|
||||
systemctl status ofono
|
||||
|
||||
systemctl enable osmo-gsm-tester
|
||||
systemctl start osmo-gsm-tester
|
||||
systemctl status osmo-gsm-tester
|
||||
|
||||
|
||||
To stop:
|
||||
|
||||
systemctl stop osmo-gsm-tester
|
||||
|
||||
After ofonod has been started and modems have been connected to the system,
|
||||
you can run the 'list-modems' script located in /usr/local/src/ofono/test to get
|
||||
a list of the modems that have been detected by ofono.
|
||||
|
||||
|
||||
CONFIGURATION
|
||||
|
||||
Host System configuration
|
||||
|
||||
Create the /var/tmp/osmo-gsm-tester directory. It will be used to accept new test jobs.
|
||||
|
||||
Test resources (NITB, BTS and modems) are currently configured in the test_manager.py.
|
||||
|
||||
For every nitb resource that can be allocated, one alias IP address needs
|
||||
to be set up in /etc/network/interfaces on the interface that is connected to the BTSes.
|
||||
By add the following lines for each nitb instance that can be allocated (while making
|
||||
sure each interface alias and IP is unique)
|
||||
|
||||
auto eth1:0
|
||||
allow-hotplug eth1:0
|
||||
iface eth1:0 inet static
|
||||
address 10.42.42.2
|
||||
netmask 255.255.255.0
|
||||
|
||||
Also make sure, the user executing the tester is allowed to run tcpdump. If
|
||||
the user is not root, we have used the folloing line to get proper permissions:
|
||||
|
||||
groupadd pcap
|
||||
addgroup <your-user-name> pcap
|
||||
setcap cap_net_raw,cap_net_admin=eip /usr/sbin/tcpdump
|
||||
chgroup pcap /usr/sbin/tcpdump
|
||||
chmod 0750 /usr/sbin/tcpdump
|
||||
|
||||
The tester main unit must be able to ssh without password to the sysmobts (and
|
||||
possibly other) hardware: place the main unit's public SSH key on the sysmoBTS.
|
||||
Log in via SSH at least once to accept the BTS' host key.
|
||||
|
||||
|
||||
LAUNCHING A TEST RUN
|
||||
|
||||
osmo-gsm-tester watches /var/tmp/osmo-gsm-tester for instructions to launch
|
||||
test runs. A test run is triggered by a subdirectory containing binaries and a
|
||||
manifest file, typically created by jenkins using the enclosed scripts.
|
|
@ -0,0 +1,11 @@
|
|||
# systemd service file for the ofono daemon
|
||||
[Unit]
|
||||
Description=oFono
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/local/src/ofono/src/ofonod -n
|
||||
Restart=always
|
||||
StartLimitInterval=0
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,28 @@
|
|||
<!-- This configuration file specifies the required security policies
|
||||
for oFono core daemon to work. It lives in /etc/dbus-1/system.d/ -->
|
||||
|
||||
<!DOCTYPE busconfig PUBLIC "-//freedesktop//DTD D-BUS Bus Configuration 1.0//EN"
|
||||
"http://www.freedesktop.org/standards/dbus/1.0/busconfig.dtd">
|
||||
<busconfig>
|
||||
|
||||
<!-- ../system.conf have denied everything, so we just punch some holes -->
|
||||
|
||||
<policy user="root">
|
||||
<allow own="org.ofono"/>
|
||||
<allow send_destination="org.ofono"/>
|
||||
<allow send_interface="org.ofono.SimToolkitAgent"/>
|
||||
<allow send_interface="org.ofono.PushNotificationAgent"/>
|
||||
<allow send_interface="org.ofono.SmartMessagingAgent"/>
|
||||
<allow send_interface="org.ofono.PositioningRequestAgent"/>
|
||||
<allow send_interface="org.ofono.HandsfreeAudioAgent"/>
|
||||
</policy>
|
||||
|
||||
<policy at_console="true">
|
||||
<allow send_destination="org.ofono"/>
|
||||
</policy>
|
||||
|
||||
<policy context="default">
|
||||
<deny send_destination="org.ofono"/>
|
||||
</policy>
|
||||
|
||||
</busconfig>
|
|
@ -0,0 +1,4 @@
|
|||
# place this file in /etc/security/limits.d to allow core files when a program
|
||||
# crashes; for osmo-gsm-tester.
|
||||
root - core unlimited
|
||||
* - core unlimited
|
|
@ -0,0 +1,11 @@
|
|||
# systemd service file for the osmo-gsm-tester daemon
|
||||
[Unit]
|
||||
Description=Osmocom GSM Tester
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/local/src/osmo-gsm-tester/osmo-gsm-tester
|
||||
Restart=on-abort
|
||||
StartLimitInterval=0
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,29 @@
|
|||
# osmo_gsm_tester: automated cellular network hardware tests
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Authors: D. Lazlo Sitzer <dlsitzer@sysmocom.de>
|
||||
# Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
__version__ = 'UNKNOWN'
|
||||
|
||||
try:
|
||||
from ._version import _version
|
||||
__version__ = _version
|
||||
except:
|
||||
pass
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,161 @@
|
|||
# osmo_gsm_tester: read and validate config files
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# discussion for choice of config file format:
|
||||
#
|
||||
# Python syntax is insane, because it allows the config file to run arbitrary
|
||||
# python commands.
|
||||
#
|
||||
# INI file format is nice and simple, but it doesn't allow having the same
|
||||
# section numerous times (e.g. to define several modems or BTS models) and does
|
||||
# not support nesting.
|
||||
#
|
||||
# JSON has too much braces and quotes to be easy to type
|
||||
#
|
||||
# YAML formatting is lean, but too powerful. The normal load() allows arbitrary
|
||||
# code execution. There is safe_load(). But YAML also allows several
|
||||
# alternative ways of formatting, better to have just one authoritative style.
|
||||
# Also it would be better to receive every setting as simple string rather than
|
||||
# e.g. an IMSI as an integer.
|
||||
#
|
||||
# The Python ConfigParserShootout page has numerous contestants, but it we want
|
||||
# to use widely used, standardized parsing code without re-inventing the wheel.
|
||||
# https://wiki.python.org/moin/ConfigParserShootout
|
||||
#
|
||||
# The optimum would be a stripped down YAML format.
|
||||
# In the lack of that, we shall go with yaml.load_safe() + a round trip
|
||||
# (feeding back to itself), converting keys to lowercase and values to string.
|
||||
|
||||
import yaml
|
||||
import re
|
||||
import os
|
||||
|
||||
from . import log
|
||||
|
||||
def read(path, schema=None):
|
||||
with log.Origin(path):
|
||||
with open(path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
config = _standardize(config)
|
||||
if schema:
|
||||
validate(config, schema)
|
||||
return config
|
||||
|
||||
def tostr(config):
|
||||
return _tostr(_standardize(config))
|
||||
|
||||
def _tostr(config):
|
||||
return yaml.dump(config, default_flow_style=False)
|
||||
|
||||
def _standardize_item(item):
|
||||
if isinstance(item, (tuple, list)):
|
||||
return [_standardize_item(i) for i in item]
|
||||
if isinstance(item, dict):
|
||||
return dict([(key.lower(), _standardize_item(val)) for key,val in item.items()])
|
||||
return str(item)
|
||||
|
||||
def _standardize(config):
|
||||
config = yaml.safe_load(_tostr(_standardize_item(config)))
|
||||
return config
|
||||
|
||||
|
||||
KEY_RE = re.compile('[a-zA-Z][a-zA-Z0-9_]*')
|
||||
|
||||
def band(val):
|
||||
if val in ('GSM-1800', 'GSM-1900'):
|
||||
return
|
||||
raise ValueError('Unknown GSM band: %r' % val)
|
||||
|
||||
INT = 'int'
|
||||
STR = 'str'
|
||||
BAND = 'band'
|
||||
SCHEMA_TYPES = {
|
||||
INT: int,
|
||||
STR: str,
|
||||
BAND: band,
|
||||
}
|
||||
|
||||
def is_dict(l):
|
||||
return isinstance(l, dict)
|
||||
|
||||
def is_list(l):
|
||||
return isinstance(l, (list, tuple))
|
||||
|
||||
def validate(config, schema):
|
||||
'''Make sure the given config dict adheres to the schema.
|
||||
The schema is a dict of 'dict paths' in dot-notation with permitted
|
||||
value type. All leaf nodes are validated, nesting dicts are implicit.
|
||||
|
||||
validate( { 'a': 123, 'b': { 'b1': 'foo', 'b2': [ 1, 2, 3 ] } },
|
||||
{ 'a': int,
|
||||
'b.b1': str,
|
||||
'b.b2[]': int } )
|
||||
|
||||
Raise a ValueError in case the schema is violated.
|
||||
'''
|
||||
|
||||
def validate_item(path, value, schema):
|
||||
want_type = schema.get(path)
|
||||
|
||||
if is_list(value):
|
||||
if want_type:
|
||||
raise ValueError('config item is a list, should be %r: %r' % (want_type, path))
|
||||
path = path + '[]'
|
||||
want_type = schema.get(path)
|
||||
|
||||
if not want_type:
|
||||
if is_dict(value):
|
||||
nest(path, value, schema)
|
||||
return
|
||||
if is_list(value) and value:
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
raise ValueError('config item not known: %r' % path)
|
||||
|
||||
if want_type not in SCHEMA_TYPES:
|
||||
raise ValueError('unknown type %r at %r' % (want_type, path))
|
||||
|
||||
if is_dict(value):
|
||||
raise ValueError('config item is dict but should be a leaf node of type %r: %r'
|
||||
% (want_type, path))
|
||||
|
||||
if is_list(value):
|
||||
for list_v in value:
|
||||
validate_item(path, list_v, schema)
|
||||
return
|
||||
|
||||
with log.Origin(item=path):
|
||||
type_validator = SCHEMA_TYPES.get(want_type)
|
||||
type_validator(value)
|
||||
|
||||
def nest(parent_path, config, schema):
|
||||
if parent_path:
|
||||
parent_path = parent_path + '.'
|
||||
else:
|
||||
parent_path = ''
|
||||
for k,v in config.items():
|
||||
if not KEY_RE.fullmatch(k):
|
||||
raise ValueError('invalid config key: %r' % k)
|
||||
path = parent_path + k
|
||||
validate_item(path, v, schema)
|
||||
|
||||
nest(None, config, schema)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,405 @@
|
|||
# osmo_gsm_tester: global logging
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import contextlib
|
||||
from inspect import getframeinfo, stack
|
||||
|
||||
L_ERR = 30
|
||||
L_LOG = 20
|
||||
L_DBG = 10
|
||||
L_TRACEBACK = 'TRACEBACK'
|
||||
|
||||
C_NET = 'net'
|
||||
C_RUN = 'run'
|
||||
C_TST = 'tst'
|
||||
C_CNF = 'cnf'
|
||||
C_DEFAULT = '---'
|
||||
|
||||
LONG_DATEFMT = '%Y-%m-%d_%H:%M:%S'
|
||||
DATEFMT = '%H:%M:%S'
|
||||
|
||||
class LogTarget:
|
||||
do_log_time = None
|
||||
do_log_category = None
|
||||
do_log_level = None
|
||||
do_log_origin = None
|
||||
do_log_traceback = None
|
||||
do_log_src = None
|
||||
origin_width = None
|
||||
origin_fmt = None
|
||||
|
||||
# redirected by logging test
|
||||
get_time_str = lambda self: time.strftime(self.log_time_fmt)
|
||||
|
||||
# sink that gets each complete logging line
|
||||
log_sink = sys.stderr.write
|
||||
|
||||
category_levels = None
|
||||
|
||||
def __init__(self):
|
||||
self.category_levels = {}
|
||||
self.style()
|
||||
|
||||
def style(self, time=True, time_fmt=DATEFMT, category=True, level=True, origin=True, origin_width=0, src=True, trace=False):
|
||||
'''
|
||||
set all logging format aspects, to defaults if not passed:
|
||||
time: log timestamps;
|
||||
time_fmt: format of timestamps;
|
||||
category: print the logging category (three letters);
|
||||
level: print the logging level, unless it is L_LOG;
|
||||
origin: print which object(s) the message originated from;
|
||||
origin_width: fill up the origin string with whitespace to this witdh;
|
||||
src: log the source file and line number the log comes from;
|
||||
trace: on exceptions, log the full stack trace;
|
||||
'''
|
||||
self.log_time_fmt = time_fmt
|
||||
self.do_log_time = bool(time)
|
||||
if not self.log_time_fmt:
|
||||
self.do_log_time = False
|
||||
self.do_log_category = bool(category)
|
||||
self.do_log_level = bool(level)
|
||||
self.do_log_origin = bool(origin)
|
||||
self.origin_width = int(origin_width)
|
||||
self.origin_fmt = '{:>%ds}' % self.origin_width
|
||||
self.do_log_src = src
|
||||
self.do_log_traceback = trace
|
||||
|
||||
def style_change(self, time=None, time_fmt=None, category=None, level=None, origin=None, origin_width=None, src=None, trace=None):
|
||||
'modify only the given aspects of the logging format'
|
||||
self.style(
|
||||
time=(time if time is not None else self.do_log_time),
|
||||
time_fmt=(time_fmt if time_fmt is not None else self.log_time_fmt),
|
||||
category=(category if category is not None else self.do_log_category),
|
||||
level=(level if level is not None else self.do_log_level),
|
||||
origin=(origin if origin is not None else self.do_log_origin),
|
||||
origin_width=(origin_width if origin_width is not None else self.origin_width),
|
||||
src=(src if src is not None else self.do_log_src),
|
||||
trace=(trace if trace is not None else self.do_log_traceback),
|
||||
)
|
||||
|
||||
def set_level(self, category, level):
|
||||
'set global logging log.L_* level for a given log.C_* category'
|
||||
self.category_levels[category] = level
|
||||
|
||||
def is_enabled(self, category, level):
|
||||
if level == L_TRACEBACK:
|
||||
return self.do_log_traceback
|
||||
is_level = self.category_levels.get(category)
|
||||
if is_level is None:
|
||||
is_level = L_LOG
|
||||
if level < is_level:
|
||||
return False
|
||||
return True
|
||||
|
||||
def log(self, origin, category, level, src, messages, named_items):
|
||||
if category and len(category) != 3:
|
||||
self.log_sink('WARNING: INVALID LOG SUBSYSTEM %r\n' % category)
|
||||
self.log_sink('origin=%r category=%r level=%r\n' % (origin, category, level));
|
||||
|
||||
if not category:
|
||||
category = C_DEFAULT
|
||||
if not self.is_enabled(category, level):
|
||||
return
|
||||
|
||||
log_pre = []
|
||||
if self.do_log_time:
|
||||
log_pre.append(self.get_time_str())
|
||||
|
||||
if self.do_log_category:
|
||||
log_pre.append(category)
|
||||
|
||||
if self.do_log_origin:
|
||||
if origin is None:
|
||||
name = '-'
|
||||
elif isinstance(origin, str):
|
||||
name = origin or None
|
||||
elif hasattr(origin, '_name'):
|
||||
name = origin._name
|
||||
if not name:
|
||||
name = str(origin.__class__.__name__)
|
||||
log_pre.append(self.origin_fmt.format(name))
|
||||
|
||||
if self.do_log_level and level != L_LOG:
|
||||
log_pre.append(level_str(level) or ('loglevel=' + str(level)) )
|
||||
|
||||
log_line = [str(m) for m in messages]
|
||||
|
||||
if named_items:
|
||||
# unfortunately needs to be sorted to get deterministic results
|
||||
log_line.append('{%s}' %
|
||||
(', '.join(['%s=%r' % (k,v)
|
||||
for k,v in sorted(named_items.items())])))
|
||||
|
||||
if self.do_log_src and src:
|
||||
log_line.append(' [%s]' % str(src))
|
||||
|
||||
log_str = '%s%s%s' % (' '.join(log_pre),
|
||||
': ' if log_pre else '',
|
||||
' '.join(log_line))
|
||||
|
||||
self.log_sink(log_str.strip() + '\n')
|
||||
|
||||
|
||||
targets = [ LogTarget() ]
|
||||
|
||||
def level_str(level):
|
||||
if level == L_TRACEBACK:
|
||||
return L_TRACEBACK
|
||||
if level <= L_DBG:
|
||||
return 'DBG'
|
||||
if level <= L_LOG:
|
||||
return 'LOG'
|
||||
return 'ERR'
|
||||
|
||||
def _log_all_targets(origin, category, level, src, messages, named_items=None):
|
||||
global targets
|
||||
if isinstance(src, int):
|
||||
src = get_src_from_caller(src + 1)
|
||||
for target in targets:
|
||||
target.log(origin, category, level, src, messages, named_items)
|
||||
|
||||
def get_src_from_caller(levels_up=1):
|
||||
caller = getframeinfo(stack()[levels_up][0])
|
||||
return '%s:%d' % (os.path.basename(caller.filename), caller.lineno)
|
||||
|
||||
def get_src_from_tb(tb, levels_up=1):
|
||||
ftb = traceback.extract_tb(tb)
|
||||
f,l,m,c = ftb[-levels_up]
|
||||
f = os.path.basename(f)
|
||||
return '%s:%s: %s' % (f, l, c)
|
||||
|
||||
|
||||
class Origin:
|
||||
'''
|
||||
Base class for all classes that want to log,
|
||||
and to add an origin string to a code path:
|
||||
with log.Origin('my name'):
|
||||
raise Problem()
|
||||
This will log 'my name' as an origin for the Problem.
|
||||
'''
|
||||
|
||||
_log_category = None
|
||||
_src = None
|
||||
_name = None
|
||||
_log_line_buf = None
|
||||
_prev_stdout = None
|
||||
|
||||
_global_current_origin = None
|
||||
_parent_origin = None
|
||||
|
||||
def __init__(self, *name_items, category=None, **detail_items):
|
||||
self.set_log_category(category)
|
||||
self.set_name(*name_items, **detail_items)
|
||||
|
||||
def set_name(self, *name_items, **detail_items):
|
||||
if name_items:
|
||||
name = '-'.join([str(i) for i in name_items])
|
||||
elif not detail_items:
|
||||
name = self.__class__.__name__
|
||||
else:
|
||||
name = ''
|
||||
if detail_items:
|
||||
details = '(%s)' % (', '.join([("%s=%r" % (k,v))
|
||||
for k,v in sorted(detail_items.items())]))
|
||||
else:
|
||||
details = ''
|
||||
self._name = name + details
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def set_log_category(self, category):
|
||||
self._log_category = category
|
||||
|
||||
def _log(self, level, messages, named_items=None, src_levels_up=3, origins=None):
|
||||
src = self._src or src_levels_up
|
||||
origin = origins or self.gather_origins()
|
||||
_log_all_targets(origin, self._log_category, level, src, messages, named_items)
|
||||
|
||||
def dbg(self, *messages, **named_items):
|
||||
self._log(L_DBG, messages, named_items)
|
||||
|
||||
def log(self, *messages, **named_items):
|
||||
self._log(L_LOG, messages, named_items)
|
||||
|
||||
def err(self, *messages, **named_items):
|
||||
self._log(L_ERR, messages, named_items)
|
||||
|
||||
def log_exn(self, exc_info=None):
|
||||
log_exn(self, self._log_category, exc_info)
|
||||
|
||||
def __enter__(self):
|
||||
if self._parent_origin is not None:
|
||||
return
|
||||
if Origin._global_current_origin == self:
|
||||
return
|
||||
self._parent_origin, Origin._global_current_origin = Origin._global_current_origin, self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
rc = None
|
||||
if exc_info[0] is not None:
|
||||
rc = exn_add_info(exc_info, self)
|
||||
Origin._global_current_origin, self._parent_origin = self._parent_origin, None
|
||||
return rc
|
||||
|
||||
def redirect_stdout(self):
|
||||
return contextlib.redirect_stdout(self)
|
||||
|
||||
def write(self, message):
|
||||
'to redirect stdout to the log'
|
||||
lines = message.splitlines()
|
||||
if not lines:
|
||||
return
|
||||
if self._log_line_buf:
|
||||
lines[0] = self._log_line_buf + lines[0]
|
||||
self._log_line_buf = None
|
||||
if not message.endswith('\n'):
|
||||
self._log_line_buf = lines[-1]
|
||||
lines = lines[:-1]
|
||||
origins = self.gather_origins()
|
||||
for line in lines:
|
||||
self._log(L_LOG, (line,), origins=origins)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def gather_origins(self):
|
||||
origins = Origins()
|
||||
origin = self
|
||||
while origin:
|
||||
origins.add(origin)
|
||||
origin = origin._parent_origin
|
||||
return str(origins)
|
||||
|
||||
|
||||
|
||||
def dbg(origin, category, *messages, **named_items):
|
||||
_log_all_targets(origin, category, L_DBG, 2, messages, named_items)
|
||||
|
||||
def log(origin, category, *messages, **named_items):
|
||||
_log_all_targets(origin, category, L_LOG, 2, messages, named_items)
|
||||
|
||||
def err(origin, category, *messages, **named_items):
|
||||
_log_all_targets(origin, category, L_ERR, 2, messages, named_items)
|
||||
|
||||
def trace(origin, category, exc_info):
|
||||
_log_all_targets(origin, category, L_TRACEBACK, None,
|
||||
traceback.format_exception(*exc_info))
|
||||
|
||||
def resolve_category(origin, category):
|
||||
if category is not None:
|
||||
return category
|
||||
if not hasattr(origin, '_log_category'):
|
||||
return None
|
||||
return origin._log_category
|
||||
|
||||
def exn_add_info(exc_info, origin, category=None):
|
||||
etype, exception, tb = exc_info
|
||||
if not hasattr(exception, 'origins'):
|
||||
exception.origins = Origins()
|
||||
if not hasattr(exception, 'category'):
|
||||
# only remember the deepest category
|
||||
exception.category = resolve_category(origin, category)
|
||||
if not hasattr(exception, 'src'):
|
||||
exception.src = get_src_from_tb(tb)
|
||||
exception.origins.add(origin)
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def log_exn(origin=None, category=None, exc_info=None):
|
||||
if not (exc_info is not None and len(exc_info) == 3):
|
||||
exc_info = sys.exc_info()
|
||||
if not (exc_info is not None and len(exc_info) == 3):
|
||||
raise RuntimeError('invalid call to log_exn() -- no valid exception info')
|
||||
|
||||
etype, exception, tb = exc_info
|
||||
|
||||
# if there are origins recorded with the Exception, prefer that
|
||||
if hasattr(exception, 'origins'):
|
||||
origin = str(exception.origins)
|
||||
|
||||
# if there is a category recorded with the Exception, prefer that
|
||||
if hasattr(exception, 'category'):
|
||||
category = exception.category
|
||||
|
||||
if hasattr(exception, 'msg'):
|
||||
msg = exception.msg
|
||||
else:
|
||||
msg = str(exception)
|
||||
|
||||
if hasattr(exception, 'src'):
|
||||
src = exception.src
|
||||
else:
|
||||
src = 2
|
||||
|
||||
trace(origin, category, exc_info)
|
||||
_log_all_targets(origin, category, L_ERR, src,
|
||||
('%s:' % str(etype.__name__), msg))
|
||||
|
||||
|
||||
class Origins(list):
|
||||
def __init__(self, origin=None):
|
||||
if origin is not None:
|
||||
self.add(origin)
|
||||
def add(self, origin):
|
||||
if hasattr(origin, '_name'):
|
||||
origin_str = origin._name
|
||||
else:
|
||||
origin_str = str(origin)
|
||||
self.insert(0, origin_str)
|
||||
def __str__(self):
|
||||
return '->'.join(self)
|
||||
|
||||
|
||||
|
||||
def set_level(category, level):
|
||||
global targets
|
||||
for target in targets:
|
||||
target.set_level(category, level)
|
||||
|
||||
def style(**kwargs):
|
||||
global targets
|
||||
for target in targets:
|
||||
target.style(**kwargs)
|
||||
|
||||
def style_change(**kwargs):
|
||||
global targets
|
||||
for target in targets:
|
||||
target.style_change(**kwargs)
|
||||
|
||||
class TestsTarget(LogTarget):
|
||||
'LogTarget producing deterministic results for regression tests'
|
||||
def __init__(self, out=sys.stdout):
|
||||
super().__init__()
|
||||
self.style(time=False, src=False)
|
||||
self.log_sink = out.write
|
||||
|
||||
def run_logging_exceptions(func, *func_args, return_on_failure=None, **func_kwargs):
|
||||
try:
|
||||
return func(*func_args, **func_kwargs)
|
||||
except:
|
||||
log_exn()
|
||||
return return_on_failure
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,23 @@
|
|||
# osmo_gsm_tester: process management
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,51 @@
|
|||
# osmo_gsm_tester: manage resources
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
from . import log
|
||||
from . import config
|
||||
from .utils import listdict, FileLock
|
||||
|
||||
class Resources(log.Origin):
|
||||
|
||||
def __init__(self, config_path, lock_dir):
|
||||
self.config_path = config_path
|
||||
self.lock_dir = lock_dir
|
||||
self.set_name(conf=self.config_path, lock=self.lock_dir)
|
||||
|
||||
def ensure_lock_dir_exists(self):
|
||||
if not os.path.isdir(self.lock_dir):
|
||||
os.makedirs(self.lock_dir)
|
||||
|
||||
|
||||
global_resources = listdict()
|
||||
|
||||
def register(kind, instance):
|
||||
global global_resources
|
||||
global_resources.add(kind, instance)
|
||||
|
||||
def reserve(user, config):
|
||||
asdf
|
||||
|
||||
def read_conf(path):
|
||||
with open(path, 'r') as f:
|
||||
conf = f.read()
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,150 @@
|
|||
# osmo_gsm_tester: test suite
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
from . import config, log, template, utils
|
||||
|
||||
class Suite(log.Origin):
|
||||
'''A test suite reserves resources for a number of tests.
|
||||
Each test requires a specific number of modems, BTSs etc., which are
|
||||
reserved beforehand by a test suite. This way several test suites can be
|
||||
scheduled dynamically without resource conflicts arising halfway through
|
||||
the tests.'''
|
||||
|
||||
CONF_FILENAME = 'suite.conf'
|
||||
|
||||
CONF_SCHEMA = {
|
||||
'resources.nitb_iface': config.INT,
|
||||
'resources.nitb': config.INT,
|
||||
'resources.bts': config.INT,
|
||||
'resources.msisdn': config.INT,
|
||||
'resources.modem': config.INT,
|
||||
'defaults.timeout': config.STR,
|
||||
}
|
||||
|
||||
class Results:
|
||||
def __init__(self):
|
||||
self.passed = []
|
||||
self.failed = []
|
||||
self.all_passed = None
|
||||
|
||||
def add_pass(self, test):
|
||||
self.passed.append(test)
|
||||
|
||||
def add_fail(self, test):
|
||||
self.failed.append(test)
|
||||
|
||||
def conclude(self):
|
||||
self.all_passed = bool(self.passed) and not bool(self.failed)
|
||||
return self
|
||||
|
||||
def __init__(self, suite_dir):
|
||||
self.set_log_category(log.C_CNF)
|
||||
self.suite_dir = suite_dir
|
||||
self.set_name(os.path.basename(self.suite_dir))
|
||||
self.read_conf()
|
||||
|
||||
def read_conf(self):
|
||||
with self:
|
||||
if not os.path.isdir(self.suite_dir):
|
||||
raise RuntimeError('No such directory: %r' % self.suite_dir)
|
||||
self.conf = config.read(os.path.join(self.suite_dir,
|
||||
Suite.CONF_FILENAME),
|
||||
Suite.CONF_SCHEMA)
|
||||
self.load_tests()
|
||||
|
||||
def load_tests(self):
|
||||
with self:
|
||||
self.tests = []
|
||||
for basename in os.listdir(self.suite_dir):
|
||||
if not basename.endswith('.py'):
|
||||
continue
|
||||
self.tests.append(Test(self, basename))
|
||||
|
||||
def add_test(self, test):
|
||||
with self:
|
||||
if not isinstance(test, Test):
|
||||
raise ValueError('add_test(): pass a Test() instance, not %s' % type(test))
|
||||
if test.suite is None:
|
||||
test.suite = self
|
||||
if test.suite is not self:
|
||||
raise ValueError('add_test(): test already belongs to another suite')
|
||||
self.tests.append(test)
|
||||
|
||||
def run_tests(self):
|
||||
results = Suite.Results()
|
||||
for test in self.tests:
|
||||
self._run_test(test, results)
|
||||
return results.conclude()
|
||||
|
||||
def run_tests_by_name(self, *names):
|
||||
results = Suite.Results()
|
||||
for name in names:
|
||||
basename = name
|
||||
if not basename.endswith('.py'):
|
||||
basename = name + '.py'
|
||||
for test in self.tests:
|
||||
if basename == test.basename:
|
||||
self._run_test(test, results)
|
||||
break
|
||||
return results.conclude()
|
||||
|
||||
def _run_test(self, test, results):
|
||||
try:
|
||||
with self:
|
||||
test.run()
|
||||
results.add_pass(test)
|
||||
except:
|
||||
results.add_fail(test)
|
||||
self.log_exn()
|
||||
|
||||
class Test(log.Origin):
|
||||
|
||||
def __init__(self, suite, test_basename):
|
||||
self.suite = suite
|
||||
self.basename = test_basename
|
||||
self.set_name(self.basename)
|
||||
self.set_log_category(log.C_TST)
|
||||
self.path = os.path.join(self.suite.suite_dir, self.basename)
|
||||
with self:
|
||||
with open(self.path, 'r') as f:
|
||||
self.script = f.read()
|
||||
|
||||
def run(self):
|
||||
with self:
|
||||
self.code = compile(self.script, self.path, 'exec')
|
||||
with self.redirect_stdout():
|
||||
exec(self.code, self.test_globals())
|
||||
self._success = True
|
||||
|
||||
def test_globals(self):
|
||||
test_globals = {
|
||||
'this': utils.dict2obj({
|
||||
'suite': self.suite.suite_dir,
|
||||
'test': self.basename,
|
||||
}),
|
||||
'resources': utils.dict2obj({
|
||||
}),
|
||||
}
|
||||
return test_globals
|
||||
|
||||
def load(suite_dir):
|
||||
return Suite(suite_dir)
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|
@ -0,0 +1,56 @@
|
|||
# osmo_gsm_tester: automated cellular network hardware tests
|
||||
# Proxy to templating engine to handle files
|
||||
#
|
||||
# Copyright (C) 2016-2017 by sysmocom - s.f.m.c. GmbH
|
||||
#
|
||||
# Author: Neels Hofmeyr <neels@hofmeyr.de>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os, sys
|
||||
from mako.template import Template
|
||||
from mako.lookup import TemplateLookup
|
||||
|
||||
from . import log
|
||||
from .utils import dict2obj
|
||||
|
||||
_lookup = None
|
||||
_logger = log.Origin('no templates dir set')
|
||||
|
||||
def set_templates_dir(*templates_dirs):
|
||||
global _lookup
|
||||
global _logger
|
||||
if not templates_dirs:
|
||||
# default templates dir is relative to this source file
|
||||
templates_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
|
||||
for d in templates_dirs:
|
||||
if not os.path.isdir(d):
|
||||
raise RuntimeError('templates dir is not a dir: %r'
|
||||
% os.path.abspath(d))
|
||||
_lookup = TemplateLookup(directories=templates_dirs)
|
||||
_logger = log.Origin('Templates', category=log.C_CNF)
|
||||
|
||||
def render(name, values):
|
||||
'''feed values dict into template and return rendered result.
|
||||
".tmpl" is added to the name to look it up in the templates dir.'''
|
||||
global _lookup
|
||||
if _lookup is None:
|
||||
set_templates_dir()
|
||||
with _logger:
|
||||
tmpl_name = name + '.tmpl'
|
||||
template = _lookup.get_template(tmpl_name)
|
||||
_logger.dbg('rendering', tmpl_name)
|
||||
return template.render(**dict2obj(values))
|
||||
|
||||
# vim: expandtab tabstop=4 shiftwidth=4
|
|