First commit

Signed-off-by: Jan Lindemann <jan@janware.com>
This commit is contained in:
Jan Lindemann 2022-11-01 13:28:53 +01:00
commit 4b912741cb
73 changed files with 3753 additions and 0 deletions

4
src/python/Makefile Normal file
View file

@ -0,0 +1,4 @@
TOPDIR = ../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mods.mk

View file

@ -0,0 +1,4 @@
TOPDIR = ../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,127 @@
# -*- coding: utf-8 -*-
import asyncio
import sys
import traceback
import jwutils
from jwutils.log import *
from .Machine import Machine
from .MachineCmd import MachineCmd
from .TestPhases import TestPhases
from .TestCases import TestCases
from .TestResults import TestResults
class CmdTestOs(MachineCmd): # export
def __init__(self):
super().__init__('test-os', help='Run batches of tests against a machine')
self.__results = TestResults()
# -- phase trigger methods
async def __trigger_boot(self, env, machine):
slog(INFO, "requesting power-on")
await machine.request_power_on(env)
async def __trigger_up(self, env, machine):
await machine.wait_up(env)
async def __trigger_shutdown(self, env, machine):
await machine.request_shutdown(env)
async def __trigger_post(self, env, machine):
await machine.wait_poweroff(env)
# -- phase filtered batches
async def __establish_connections(self, phase, env, machine):
for conn in env.connections:
if not phase in conn.info.phases:
if conn.instance is None:
continue
slog(INFO, 'Phase "{}": closing connection "{}" with phases [{}]'.format(phase.name, conn.info, conn.info.phases_str))
await machine.unregister_connection(conn.instance)
conn.instance = None
continue
slog(DEBUG, " phase matches")
if conn.instance is not None:
continue
slog(INFO, 'Phase "{}": opening connection "{}" with phases [{}]'.format(phase.name, conn.info, conn.info.phases_str))
conn.instance = machine.register_connection(env, conn.info)
async def __run_test_cases(self, phase, env, machine, cases):
connections = [ c.instance for c in env.connections if c.instance ]
for case in cases:
if not phase in case.phases:
continue
delim = '--- running test case "{}"'.format(case)
prio = NOTICE
prefix = ' {}|{}'.format(*console_color_chars(prio))
slog(prio, ',' + delim)
jwutils.log.append_to_prefix(prefix)
try:
if not machine.clear_for_tests():
raise Exception("machine is not clear for running tests")
rr = await case.run(env, machine, phase)
if rr:
slog(ERR, "FAIL: ", rr)
except Exception:
rr = sys.exc_info()[1]
slog(ERR, "FAIL: ", rr)
slog_m(ERR, traceback.format_exc())
self.__results.add(case, phase, rr)
jwutils.log.remove_from_prefix(len(prefix))
slog(prio, '`' + delim)
@classmethod
def __get_trigger(cls, phase):
return getattr(cls, '_{}__trigger_{}'.format(cls.__name__, phase.name.lower()), None)
# override MachineCmd._run()
async def _run(self, env):
machine = None
try:
machine = await Machine.create(env)
test_cases = TestCases(env.args.test_case_path, dummies=env.args.dummy_tests)
for phase in TestPhases.Phase:
if not machine.clear_for_tests():
raise Exception("machine is not clear for running tests")
delim = '-' * 60 + " Phase {} -- ".format(phase.name)
prio = NOTICE
prefix = ' {}|{}'.format(*console_color_chars(prio))
slog(prio, ',' + delim + '>')
jwutils.log.append_to_prefix(prefix)
try:
trigger_method = self.__get_trigger(phase)
if trigger_method is not None:
await trigger_method(phase, env, machine)
await self.__establish_connections(phase, env, machine)
await self.__run_test_cases(phase, env, machine, test_cases)
finally:
jwutils.log.remove_from_prefix(len(prefix))
slog(prio, '`' + delim + '<')
except Exception as e:
slog(ERR, "failed to run tests against machine {}: {}".format(machine, e))
slog_m(ERR, traceback.format_exc())
raise
finally:
if machine is not None:
await machine.request_shutdown(env)
await machine.wait_poweroff(env)
await machine.cleanup(env)
for conn in env.connections:
if conn.instance:
await machine.unregister_connection(conn.instance)
conn.instance = None
del machine
self.__results.dump(env.args)
n_failed = self.__results.n_failed()
if (n_failed):
slog(ERR, "%d test%s failed" % (n_failed, "" if n_failed == 1 else "s"))
return -1
return 0
def add_parser(self, parsers):
p = super().add_parser(parsers)
self.__results.add_arguments(p)
return p

View file

@ -0,0 +1,136 @@
# -*- coding: utf-8 -*-
from abc import ABC, abstractmethod
from enum import Enum, IntFlag
import asyncio
import types
from jwutils.log import *
from .TestPhases import TestPhases
# TODO: Make this a valid abstract python stream
class Connection(ABC): # export
class Proto(Enum):
Serial = 1
Console = 2
CAN = 3
HTTP = 4
Fifos = 5
class Flags(IntFlag):
FailOnTimeout = 0x01
ReadStripNewline = 0x02
ReadDecodeToString = 0x04
class Info:
@staticmethod
def _parse_attr_proto(rhs):
if not rhs in Connection.Proto._member_names_:
raise Exception("Unknown connection protocol >" + rhs + "<")
return Connection.Proto[rhs]
@staticmethod
def _parse_attr_phases(rhs):
return TestPhases.parse(rhs)
def __getitem__(self, key_):
key, default = (key_[0], key_[1]) if isinstance(key_, tuple) else (key_, None)
if key in self.__spec.keys():
return self.__spec[key]
return default
#p.add_argument("-C", "--connection", help="Connection specification of the form 'proto=console,phase=pre|up-shutdown'", action='append', default=[])
def __init(self, spec):
self.spec = spec
self.phases = set()
self.proto = None
tokens = spec.split(',')
if not len(tokens):
raise Exeption("Empty connection spec")
if tokens[0].find("=") == -1:
tokens[0] = 'proto=' + tokens[0]
self.__spec = {}
for tok in tokens:
assignment = tok.split('=')
lhs = assignment[0]
rhs = assignment[1] if len(assignment) > 1 else True
func = getattr(Connection.Info, '_parse_attr_' + lhs, None)
if not isinstance(func, types.FunctionType):
#setattr(self, lhs, rhs) # FIXME: This is overkill
self.__spec[lhs] = rhs
else:
setattr(self, lhs, func(rhs))
self.__spec[lhs] = func(rhs)
def __init__(self, spec):
self.__init(spec)
def __format__(self, fmt):
return self.spec
@staticmethod
def parse_spec(spec):
return ConInfo(spec)
@property
def phases_names(self):
return [p.name for p in self.phases]
@property
def phases_str(self):
return ', '.join(self.phases_names)
def __init__(self, env, spec):
self.env = env
if isinstance(spec, self.Info):
self.__info = spec
elif isinstance(spec, str):
self.__info = Connection.Info(spec)
else:
raise TypeError('Invalid type "{}" of spec "{}", should be string.'.format(type(spec), spec))
def __format__(self, fmt):
return self.__info.spec
@abstractmethod
async def _read(self, act_timeout, flags):
return await self._read(act_timeout, flags)
@abstractmethod
async def _readline(self, timeout):
pass
@abstractmethod
async def _write(self, data):
pass
@abstractmethod
async def _close(self):
pass
@property
def info(self):
return self.__info
@info.setter
def info(self, value):
raise Exception("Tried to set immutable attribute info")
async def read(self, act_timeout=None, flags=Flags.ReadDecodeToString):
r = await self._read(act_timeout, flags)
if r is not None:
if flags & self.Flags.ReadDecodeToString:
r = r.decode('utf-8')
if flags & self.Flags.ReadStripNewline:
if len(r) and r[-1] == '\n':
r = r[0:-1]
return r
async def readline(self, timeout=None):
return await self._readline(timeout)
async def write(self, data):
return await self._write(data)
async def close(self):
return await self._close()

View file

@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
from .Connection import Connection
class Connections: # export
class Instance:
def __init__(self, info, conn = None):
self.info = info
self.instance = conn
def __init__(self, rhs=None):
if rhs:
self.__contents = rhs.__contents
else:
self.__contents = []
self.__idx = 0
def __iter__(self):
return self
def __next__(self):
self.__idx += 1
try:
return self.__contents[self.__idx-1]
except IndexError:
self.__idx = 0
raise StopIteration
def __len__(self):
return len(self.__contents)
def append(self, stuff):
if isinstance(stuff, Connection.Info):
self.__contents.append(Connections.Instance(stuff, None))
elif isinstance(stuff, Connection):
self.__contents.append(Connections.Instance(stuff.info, stuff))
elif isinstance(stuff, Connections.Instance):
self.__contents.append(stuff)
elif isinstance(stuff, str):
self.__contents.append(Connections.Instance(Connection.Info(stuff), None))
else:
raise Exception("tried to append connection of incompatible type ", stuff)

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
from .Connections import Connections
from jwutils import Options
class Environment:
def __init__(self, cmd, args):
self.args = args
self.cmd = cmd
self.connections = Connections()
self.features = Options(args.features)
for spec in args.connection:
self.connections.append(spec)
@property
def eloop(self):
return self.cmd.cmds.eloop

View file

@ -0,0 +1,129 @@
# -*- coding: utf-8 -*-
import os
import sys
import re
from abc import ABC, abstractmethod
from enum import Enum
import tempfile
import shutil
import asyncio
import importlib
from jwutils.log import *
from jwutils.misc import *
from .Connection import Connection
from .conn.Fifos import Fifos as ConnFifos
class Machine(ABC): # export
def __init__(self, env):
# -- readonly stuff
self.__env = env
self.__name = env.args.platform + '@' + env.args.backend
self.__exe_path = None
self.__tmpdir = None
self.__connections = {}
def __format__(self, fmt):
return self.__name
def _register_connection(self, conn):
if conn.info.spec in self.__connections.keys():
raise Exception("Connection {} is already up".format(conn))
self.__connections[conn.info.spec] = conn
async def _unregister_connection(self, conn):
spec = conn.info.spec
if spec in self.__connections.keys():
del self.__connections[conn.info.spec]
await conn.close()
@classmethod
async def create(cls, env):
backend = env.args.backend
if backend.find('.') == -1:
backend = 'devtest.os.be.' + backend
slog(NOTICE, "loading machine ", backend)
mod = importlib.import_module(backend)
r = mod.Machine(env)
await r.init()
return r
@property
def name(self):
return self.__name
@name.setter
def name(self, value):
self.__name = value
@property
def env(self):
return self.__env
@property
def exe_path(self):
if not self.__exe_path:
self.__exe_path = os.path.abspath(sys.argv[0])
return self.__exe_path
@property
def exe_basename(self):
return re.sub('\.py$', '', os.path.basename(self.exe_path))
@property
def tmpdir(self):
if self.env.args.persistent_tmpdir:
if not self.__tmpdir:
self.__tmpdir = self.env.args.persistent_tmpdir
shutil.rmtree(self.__tmpdir, ignore_errors=True)
os.mkdir(self.__tmpdir)
return self.__tmpdir
if not self.__tmpdir:
self.__tmpdir = tempfile.TemporaryDirectory(prefix=self.exe_basename+'-')
return self.__tmpdir.name
@abstractmethod
async def init(self):
pass
async def cleanup(self, env):
pass
def clear_for_tests(self):
return True
def register_connection(self, env, info):
slog(NOTICE, 'opening connection "{}" to platform "{}"'.format(info, self.env.args.platform))
try:
mod = importlib.import_module('devtest.os.conn.' + info.proto.name) # untested
classes = get_derived_classes(mod, Connection)
if not len(classes):
raise Exception("Tried to establish connection {} of unsupported type {}".format(info, info.proto))
slog(DEBUG, 'constructing connection with info "{}"'.format(info))
r = classes[0](env, info)
slog(DEBUG, "found connection definition class >%s<" % type(r))
self._register_connection(r)
except Exception as e:
slog(ERR, "failed to open connection {}: {}".format(info, e))
raise
return r
async def unregister_connection(self, conn):
await self._unregister_connection(conn)
await conn.close()
@abstractmethod
async def request_power_on(self):
pass
async def wait_up(self, timeout):
return True
@abstractmethod
async def request_shutdown(self):
pass
async def wait_poweroff(self, timeout):
return True

View file

@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod
import jwutils
from jwutils.log import *
from .Environment import Environment
class MachineCmd(jwutils.Cmd): # export
def __init__(self, name, help):
super().__init__(name, help=help)
def add_parser(self, parsers):
p = super().add_parser(parsers)
p.add_argument("-B", "--backend", help="Machine backend", default='qemu')
p.add_argument("-P", "--platform", help="Target platform type", required=True)
p.add_argument("-C", "--connection", help="Connection specification of the form 'proto=console,phase=pre|up-shutdown'", action='append', default=[])
p.add_argument("-T", "--test-case-path", help="Test case search path, directories separated by colons", required=True)
p.add_argument( "--shutdown-timeout", help="Timeout for a machine to complete an ACPI shutdown request in ms", default=60000)
p.add_argument( "--poweroff-timeout", help="Timeout for a machine to complete a poweroff request in ms", default=100)
p.add_argument( "--features", help="Comma-separated list of features that are passed to the test cases for interpretation", default='')
# TODO: Don't pollute list of generic machine options with Qemu specific stuff
p.add_argument("-R", "--root-part-number", help="Number of root partition (starting from 1)")
p.add_argument("--medium", help="Boot medium (file system path or \"net\")")
p.add_argument( "--persistent-tmpdir", help="Temporary directory for storing runtime data and scripts, by default a random one is created", default=None)
p.add_argument( "--dummy-tests", help="Add hard-coded dummy test cases to the test run", action='store_true', default=False)
# TODO: Don't pollute list of generic machine options with Qemu specific stuff
# TODO: Remove short options, currently only there for qemu.sh compatibility
p.add_argument("-S", "--qe-network-script", help="Script to be called for network bringup / teardown, builtin version is used by default", default=None)
p.add_argument("-k", "--qe-kernel", help="Kernel for Qemu to use, none by default", default=None)
p.add_argument("-D", "--qe-dtb", help="Device-tree blob for the kernel to use, requires --qe-kernel, none by default", default=None)
p.add_argument("-o", "--qe-opts", help="Additional arguments to Qemu", default=None)
return p
@abstractmethod
async def _run(self, env):
pass
async def run(self, args):
env = Environment(self, args)
return await self._run(env)

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,16 @@
# -*- coding: utf-8 -*-
from jwutils.log import *
from .TestCase import TestCase
class TcDummy(TestCase):
@property
def is_dummy(self):
return True
async def _run(self, env, machine, phase):
slog(NOTICE, "running dummy test case in phase >%s<" % phase.name)
#return None
# return an error message string if this test fails
return "Stumbled over chair"

View file

@ -0,0 +1,49 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod, ABC
from jwutils.misc import *
from .TestPhases import TestPhases
# could be derived from Python unittest.TestCase, I currently don't see the
# benefit, though
class TestCase: # export
def __init__(self):
self.__name = object_builtin_name(self)
self.__factory = None
self.__phases = { TestPhases.Phase.Up }
def __format__(self, fmt):
return self.name
@abstractmethod
async def _run(self, env, machine, phase):
pass
@property
def name(self):
return self.__name
@name.setter
def name(self, value):
self.__name = value
@property
def factory(self):
return self.__factory
@factory.setter
def factory(self, value):
self.__factory = value
@property
def phases(self):
return self.__phases
@phases.setter
def phases(self, value):
self.__phases = value
@property
def is_dummy(self):
return False
async def run(self, env, machine, phase):
return await self._run(env, machine, phase)

View file

@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod, ABC
class TestCaseFactory(ABC): # export
def __init__(self, path):
self.path = path
@abstractmethod
def test_cases(self):
pass

View file

@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod, ABC
import sys
import re
import inspect
from jwutils.log import *
from jwutils.misc import *
from .TestCase import TestCase
from .TestCaseFactory import TestCaseFactory
from .tcf import *
class TestCases: # export
def __init__(self, path, dummies=False):
self.__path = path.split(':')
self.__dummies = dummies
self.__idx = 0
self.__cases = []
factories = set()
# use list() to avoid expansion of sys.modules during iteration
for name, mod in list(sys.modules.items()):
members = inspect.getmembers(mod, inspect.isclass)
for name, c in members:
if inspect.isabstract(c):
continue
if TestCaseFactory in inspect.getmro(c):
factories.add(c)
continue
slog(NOTICE, "using test case factories:", ', '.join([f.__name__ for f in factories]))
for factory in factories:
slog(INFO, "checking test case factory", str(factory))
cases = factory(self.__path).test_cases()
for c in cases:
if c.is_dummy and not self.__dummies:
continue
c.factory = factory
self.__cases.append(c)
# -- fancy log found test cases
delim = "-" * 60 + " found test cases"
fmt = "| {:>30} | {:<10} | {}"
slog(NOTICE, "," + delim + " >")
slog(NOTICE, fmt.format('Factory', 'Phases', 'Test Case'))
slog(NOTICE, "+" + re.sub('[^-]', '-', delim) + "--")
for c in self.__cases:
slog(NOTICE, fmt.format(c.factory.__name__, ', '.join([p.name for p in c.phases]), c.name))
slog(NOTICE, "`" + delim + " <")
def __iter__(self):
return self
def __next__(self):
self.__idx += 1
try:
return self.__cases[self.__idx-1]
except IndexError:
self.__idx = 0
raise StopIteration # Done iterating.

View file

@ -0,0 +1,39 @@
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from enum import IntEnum
import itertools
class TestPhases: # export
class Phase(IntEnum):
Pre = 1
Boot = 2
Up = 3
Shutdown = 4
Post = 5
@staticmethod
def __parse(st):
return TestPhases.Phase[st.capitalize()]
@staticmethod
def parse(spec):
r = set()
terms = spec.split('|')
for t in terms:
interval = t.split('-')
if len(interval) == 1:
r.add(TestPhases.__parse(interval[0]))
continue
for p in itertools.islice(
TestPhases.Phase,
TestPhases.__parse(interval[0])-1,
TestPhases.__parse(interval[1])
):
r.add(p)
return r
if __name__ == '__main__':
for p in [ 'Pre-Post', 'Pre', 'Boot', 'Shutdown-Post', 'Blah' ]:
print("{} = {}".format(p, TestPhases.parse(p)))

View file

@ -0,0 +1,143 @@
# -*- coding: utf-8 -*-
from enum import IntEnum
from termcolor import colored
from texttable import Texttable
from jwutils.log import *
class TestResults: # export
class Field(IntEnum):
Case = 0
Phase = 1
Result = 2
def __init__(self):
self.__deco = {
'border': Texttable.BORDER,
'header': Texttable.HEADER,
'hlines': Texttable.HLINES,
'vlines': Texttable.VLINES
}
self.__cols = {
'Case': {
'bits': set(['case']),
'fmt': self.__fmt_case
},
'Phase': {
'bits': set(['phase']),
'fmt': self.__fmt_phase
},
'Result': {
'bits': set(['result']),
'fmt': self.__fmt_result
},
'Message': {
'bits': set(['msg']),
'fmt': self.__fmt_msg
},
}
self.__col_aliases = {
'all': [
'Case',
'Phase',
'Result',
'Message',
]
}
self.__records = []
def ___cols(self, args):
cols = args.result_cols
if cols in self.__col_aliases:
return self.__col_aliases[cols]
return [c.strip(' ') for c in cols.split(',')]
def ___records(self, args):
# hook to apply filters defined in args
return self.__records
def __fmt_case(self, record):
return record[TestResults.Field.Case].name
def __fmt_phase(self, record):
return record[TestResults.Field.Phase].name
def __fmt_result(self, record):
rr = record[TestResults.Field.Result]
if rr is None:
prio = NOTICE
color = 'green'
r = "PASS"
else:
prio = ERR
color = 'red'
r = "FAIL"
return r
# currently disabled: Texttable doesn't support colored cell contents
#c_on, c_off = console_color_chars(prio)
#return c_on + r + c_off
#return colored(r, color)
def __fmt_msg(self, record):
rr = record[TestResults.Field.Result]
if rr is None:
return ""
return rr
def add_arguments(self, p):
p.add_argument("--result-cols", help="Comma-separated list of output columns, " +
"possible columns are: " + ', '.join(self.__cols.keys()), default="all")
p.add_argument("--result-deco", help="Comma-separated list of output table decoration flags, " +
"possible values are: " + ', '.join(self.__deco.keys()), default="all")
return p
def add(self, case, phase, result):
self.__records.append([case, phase, result])
def n_failed(self):
return len(list(filter(None, [rec[TestResults.Field.Result] for rec in self.__records])))
def dump(self, args):
t = Texttable(max_width=160)
# -- deco
deco_keys = args.result_deco
if deco_keys == 'all':
deco_keys = self.__deco.keys()
else:
deco_keys = filter(None, args.deco.split(','))
deco = 0x0
for d in deco_keys:
deco |= self.__deco[d]
t.set_deco(deco)
cols = self.___cols(args)
#slog(DEBUG, 'Columns: ' + ' '.join(cols))
records = self.___records(args)
bits = set()
for c in cols:
bits |= self.__cols[c]['bits']
#info = dict()
#out = dict()
rows = []
row = []
if 'header' in deco_keys:
for c in cols:
row.append(c)
rows.append(row)
#slog(DEBUG, 'Header: ' + ' '.join(row))
for record in records:
row = []
for c in cols:
row.append(self.__cols[c]['fmt'](record))
#slog(DEBUG, 'case-row: ' + ' '.join(row))
rows.append(row)
#slog(DEBUG, rows)
t.add_rows(rows)
print(t.draw())

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,235 @@
# -*- coding: utf-8 -*-
import os
from abc import ABC
from abc import abstractmethod
import shutil
from jwutils.log import *
from ...Connection import Connection
class Invocation(ABC):
# -- private stuff
def __init__(self, machine):
self.__cl = None
self.__cf = ""
self.__mach = machine
@property
def mach(self):
return self.__mach
@property
def env(self):
return self.__mach.env
def __val(self, func_name, defval=None):
func_name = '_impl_' + func_name
method = getattr(self, func_name, None)
if not method:
if defval is not None:
return defval
raise Exception("tried to invoke unknown method >%s<" % func_name)
return method()
def __do_add_cl(self, stuff):
if isinstance(stuff, list):
self.__cl.extend(stuff)
else:
self.__cl.append(stuff)
def __add_cl(self, func_name, opt_name=None):
val = self.__val(func_name)
if not val:
return
arr = []
if opt_name:
arr.append(opt_name)
if isinstance(val, list):
arr.extend(val)
else:
arr.append(val)
self.__do_add_cl(arr)
def __add_cf(self, func_name, key):
val = self.__val(func_name)
if not val:
return
self.__cf += '{}="{}"\n'.format(key, val)
# -- protected API methods
#@abstractmethod
def _impl_brctl_exe(self):
r = shutil.which("brctl", path="/bin:/usr/bin:/sbin:/usr/sbin")
if r is None:
raise Exception("no brctl executable found")
return r
#@abstractmethod
def _impl_bridge_name(self):
return "brdev0"
#@abstractmethod
def _impl_bridge_net(self):
return "192.168.100.0/24"
#@abstractmethod
def _impl_bridge_addr(self):
return "192.168.100.1"
#@abstractmethod
def _impl_mac_addr(self):
return "00:0B:DC:9B:D6:DA"
#@abstractmethod
def _impl_nic_model(self):
pass
@abstractmethod
def _impl_qemu_exe(self):
pass
@abstractmethod
def _impl_machine(self):
pass
#@abstractmethod
def _impl_memory(self):
return "1G"
#@abstractmethod
def _impl_opts_interactive(self):
console = False
r = []
for conn in self.env.connections:
if conn.info.proto == Connection.Proto.Console:
slog(INFO, "found console: {}".format(conn.info))
#r.extend(['-serial pipe:/tmp/qemu-{}-console'.format(self.env.args.platform), '-display none'])
r.extend([
'-chardev', 'pipe,id=cpipe,path={},delay=off,wait=off,mux=off'.format(self.mach.fifo_base("console")),
'-serial', 'chardev:cpipe',
'-display', 'none'
])
console = True
r.extend([
'-chardev', 'pipe,id=cmon,path={},delay=off,wait=off'.format(self.mach.fifo_base("monitor")),
'-mon', 'chardev=cmon,mode=readline',
#'-mon', 'chardev=cmon,mode=control,pretty=off',
#'-monitor', 'stdio',
'-S', # start with CPU stopped, need "cont" command on monitor to start
])
if not console:
r.extend(["-nographic"])
return r
#@abstractmethod
def _impl_kernel(self):
if self.env.args.qe_kernel:
return self.env.args.qe_kernel
return None
#@abstractmethod
def _impl_qemu_opts(self):
if self.env.args.qe_opts:
slog(NOTICE, "qemu_opts=>" + self.env.args.qe_opts + "<")
#os.exit(1)
return self.env.args.qe_opts.split()
return None
#@abstractmethod
def _impl_dtb(self):
if self.env.args.qe_dtb:
return self.env.args.qe_dtb
return None
#@abstractmethod
def _impl_append(self):
pass
def _impl_append_wrapper(self):
r = self._impl_append()
ct = "devtest.qemu=1"
if not r or not len(r):
return r
if isinstance(r, list):
r.append(ct)
return r
return r + ' ' + ct
##@abstractmethod
def _impl_audio_dev(self):
return "none,id=blah"
#@abstractmethod
def _impl_disks(self):
pass
# -- public API
def typename(self):
return self.__class__.__name__
@classmethod
def create(cls, platform):
mod_name = "Invocation_" + platform
mod = importlib.import_module("devtest.os.be.qemu." + mod_name)
@property
def cmdline(self):
if self.__cl is None:
self.__cl = []
self.__do_add_cl([shutil.which("sudo"), "-E"])
self.__add_cl('qemu_exe')
self.__add_cl('opts_interactive')
self.__add_cl('machine', '-M')
self.__add_cl('kernel', '-kernel')
self.__add_cl('append_wrapper', '-append')
self.__add_cl('dtb', '-dtb')
self.__add_cl('memory', '-m')
self.__add_cl('audio_dev', '-audiodev')
self.__do_add_cl(['-rtc', 'base=utc'])
self.__add_cl('disks')
nic="nic"
val = self.__val('nic_model', '')
if val is not None and len(val):
nic += ',model=' + val
val = self.__val('mac_addr', '')
if val is not None and len(val):
nic += ',macaddr=' + val
self.__do_add_cl(["-net", nic, "-net", "tap,id=blah0,ifname=tap11"
+ ",script=" + self.__mach.net_helper_path(True)
+ ",downscript=" + self.__mach.net_helper_path(False)
])
self.__add_cl('qemu_opts')
return self.__cl
@property
def helper_config(self):
if not self.__cf:
keys = [
"append",
#"base",
"brctl_exe",
"bridge_addr",
"bridge_name",
"bridge_net",
#"dns_domain",
#"hostname",
#"short_hostname",
"kernel",
"dtb",
"mac_addr",
"machine",
#"nic_model",
"qemu_exe",
"qemu_opts",
#"root_dev",
#"root_part_number",
#"dev_platform"
]
for k in keys:
self.__add_cf(k, k)
return self.__cf

View file

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod
import shutil
from .Invocation import Invocation
class Invocation_q3beige(Invocation): # export
def __init__(self, machine):
super().__init__(machine)
self.__qemu_exe = shutil.which("qemu-system-ppc")
def _impl_qemu_exe(self):
return self.__qemu_exe
def _impl_machine(self):
return "g3beige"
def _impl_append(self):
return "console=ttyS0 root=/dev/hda2"
def _impl_disks(self):
return ["-hda", self.mach.env.args.medium]

View file

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod
import shutil
from .Invocation import Invocation
class Invocation_sabrelite(Invocation): # export
def __init__(self, machine):
super().__init__(machine)
self.__qemu_exe = shutil.which("qemu-system-arm")
def _impl_qemu_exe(self):
return self.__qemu_exe
def _impl_machine(self):
return "sabrelite"
def _impl_append(self):
return "root=/dev/mmcblk3p2 console=ttymxc0,115200 nomodeset vga=current video=vesafb:off locale.LANG=C locale.LANGUAGE=C locale.LC_MESSAGES=C locale.LC_CTYPE=C TERM=dumb"
def _impl_disks(self):
return ["-drive", "if=none,id=mydrive,file={},format=raw".format(self.mach.env.args.medium), "-device", "sd-card,id=sd0,drive=mydrive"]

View file

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
from abc import abstractmethod
import shutil
from .Invocation import Invocation
class Invocation_versatilepb(Invocation): # export
def __init__(self, machine):
super().__init__(machine)
self.__qemu_exe = shutil.which("qemu-system-arm")
def _impl_qemu_exe(self):
return self.__qemu_exe
def _impl_machine(self):
return "versatilepb"
def _impl_append(self):
return "console=ttyAMA0 root=/dev/mtdblock0 rootfstype=jffs2 rootdelay=20 block2mtd.block2mtd=/dev/sda3,131072"
def _impl_memory(self):
return "256M"
def _impl_disks(self):
return ["-hda", self.mach.env.args.medium ]

View file

@ -0,0 +1,383 @@
# -*- coding: utf-8 -*-
import os
import asyncio
import subprocess
import importlib
import re
import io
import shutil
import functools
from jwutils.log import *
from jwutils.misc import get_derived_classes
from ...misc import *
from ...test import *
from ...Machine import Machine as MachineBase
from ...Connection import Connection
from ...conn.Fifos import Fifos as ConnFifos
from . import Invocation
_ifupdown_script="""#!/bin/bash
#echo exit for test purposes; exit 1
goodbye()
{
:
rm -rf $tmp_files
}
usage()
{
cat << EOT >&2
$myname -h
$myname {net|<image-file>} [-b hostname]
EOT
[ "$1" ] && exit $1
exit 0
}
log()
{
local tag=`whoami`@$myname
echo "$log_delim [$tag] $*"
/usr/bin/logger -t "$tag" "$*"
}
err()
{
log "$@" >&2
}
fatal()
{
err "Fatal: $@ ... giving up"
exit 1
}
run()
{
log running $@
"$@"
return $?
}
bridge()
{
case "$1" in
start)
die() {
log $@
bridge stop
exit 1
}
failed_to() {
die "failed to $@"
}
log setting up network on $bridge_name
local -r net_prefix_len=$(echo $bridge_net | sed 's:.*/::')
[ $net_prefix_len -lt 24 ] && fatal currently only class-C networks are supported
local -r net_addr=$(echo $bridge_net | sed 's:/.*::')
local -r last_octet_net_addr=$(echo $net_addr | sed 's/.*\.//')
if [ "$bridge_ip" ]; then
local_bridge_ip=$bridge_ip
else
local -r last_octet_bridge_ip=$(( $last_octet_net_addr + 1 ))
local_bridge_ip=$(echo $net_addr | sed "s/\.[0-9]\+$/.$last_octet_bridge_ip/")
fi
local -r last_octet_broadcast=$(( $last_octet_net_addr + 2 ** (32 - $net_prefix_len) - 1 ))
local -r broadcast=$(echo $net_addr | sed "s/\.[0-9]\+$/.$last_octet_broadcast/")
run $brctl_exe addbr $bridge_name || failed_to "add bridge $bridge_name"
#run $brctl_exe stp $bridge_name off || failed_to "disable spanning tree protocol on bridge $bridge_name"
run $brctl_exe setfd $bridge_name 0 || failed_to "set forward delay of bridge $bridge_name"
run $ip_exe address add $local_bridge_ip broadcast $broadcast dev $bridge_name \
|| failed_to "add IP address to bridge $bridge_name"
run $ip_exe link set $bridge_name up arp on || failed_to "switch on arp on bridge $bridge_name"
run $ip_exe route add to $bridge_net dev $bridge_name || failed_to "add route over bridge $bridge_name"
;;
stop)
log shutting down network on $bridge_name
run $ip_exe link set $bridge_name down
run $brctl_exe delbr $bridge_name
;;
restart)
bridge stop
sleep 1
bridge start
;;
check)
LANG=POSIX $brctl_exe show $bridge_name 2>&1 | grep -q "$bridge_name.*No such device" && return 1
$brctl_exe show $bridge_name >/dev/null 2>&1
return $?
;;
*)
echo "Usage: bridge {start|stop|restart|check}"
exit 1
;;
esac
}
ref_bridge()
{
bridge check || bridge start
}
unref_bridge()
{
bridge check || return 0
$brctl_exe show $bridge_name | awk "/^$bridge_name/ {print \$4}" | grep -q . || {
log bridge $bridge_name is unused, cleaning up
bridge stop
}
return 0
}
# -- here we go
myname=`basename $0`
log_delim="---------------"
log running $0 $@
[ -x $0 ] || chmod u+x $0 || fatal "$0 is not executable"
exe=`readlink -f $0`
dirname=`dirname $exe`
bridge_name=brdev0
#bridge_name=in1
base=dc=priv,dc=lcl
brctl_exe=`PATH=/usr/sbin:/sbin /usr/bin/which brctl`
ip_exe=`PATH=/usr/sbin:/sbin /usr/bin/which ip`
tmp_files=""
tmpdir=`dirname $0`
config=$tmpdir/config.sh
# -- qemu default options
bridge_net=192.168.100.0/24
mac_addr=,macaddr=00:0B:DC:9B:D6:DA
[ -r "$config" ] && . $config
trap goodbye INT QUIT EXIT KILL TERM PIPE
case $myname in
*ifup*)
ref_bridge
virt_if=$1
run $ip_exe link set $virt_if up
run $brctl_exe addif $bridge_name $virt_if
;;
*ifdown*)
virt_if=$1
run $brctl_exe delif $bridge_name $virt_if
run $ip_exe link set $virt_if down
unref_bridge
;;
*)
echo called as unknown executable name $0
;;
esac
"""
class SubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, machine, name):
self.machine = machine
self.name = name
super().__init__()
def pipe_data_received(self, fd, data):
stream = "stdout" if fd == 1 else ("stderr" if fd == 2 else str(fd))
tag = stream + '@' + self.name
data = data.decode().rstrip('\n')
prio = WARNING if fd == 2 else NOTICE
for line in data.split('\n'):
slog(prio, "[%s] %s" % (tag, line.rstrip('\r\n')))
def process_exited(self):
slog(NOTICE, "[%s] process exited" % (self.name))
super().process_exited()
self.machine.qemu_exited()
class Machine(MachineBase): # export
def __init__(self, env):
super().__init__(env)
self.monitor = None
self.console = None
self.__running = False
self.__shutdown_requested = False
self.__clear_for_tests = True
self.__net_helper = None
self.__invocation = None
self.__transport = None
self.__protocol = None
self.__proc = None
self.__rc = None
self.__task = None
async def __await_monitor_prompt(self, act_timeout=2.):
if await expect(self.monitor, regex='\(qemu\) ', subject="Qemu monitor prompt",
act_timeout=act_timeout) is None:
raise Exception("timed out waiting for Qemu monitor prompt")
def __mkfifo(self, tp, out):
name = self.fifo_base(tp) + "." + ("out" if out else "in")
try:
st = os.stat(name)
except:
slog(DEBUG, "running mkfifo(%s)" % name)
os.mkfifo(name)
return name
def __mkfifos(self, tp):
return (self.__mkfifo(tp, True), self.__mkfifo(tp, False))
async def __exec_qemu(self, env):
def format_cmdline(arr):
r = ''
for tok in arr:
if re.search(' ', tok):
r += ' "%s"' % tok
continue
r += ' ' + tok
return r[1:]
try:
slog(INFO, "opening read-FIFOs to machine")
for name in [ "monitor", "console" ]:
c = ConnFifos(self.env, "Serial", paths=self.__mkfifos(name))
await c.fifo(ConnFifos.Dir.In).open(timeout=1.)
setattr(self, name, c)
slog(NOTICE, "==== invoking qemu: ", format_cmdline(self.invocation.cmdline))
self.__transport, self.__protocol = await env.eloop.subprocess_exec(
lambda: SubprocessProtocol(self, "qemu"),
*self.invocation.cmdline,
)
self.__proc = self.__transport.get_extra_info('subprocess') # Popen instance
for name in [ "monitor", "console" ]:
c = self.__dict__[name]
await c.fifo(ConnFifos.Dir.Out).open(retry=4, retry_log_level=INFO)
await self.__await_monitor_prompt()
except:
slog(ERR, "failed to run Qemu process")
raise
def __reap_qemu(self):
if self.__rc is None and self.__transport:
self.__transport = None
self.__rc = self.__proc.wait()
async def __cleanup_qemu(self):
pid = self.__reap_qemu()
if self.__rc == 0 and self.__shutdown_requested:
slog(NOTICE, "the Qemu process (pid {}) has exited cleanly".format(self.__proc.pid))
self.monitor = self.console = self.__protocol = self.__task = None
return 0
self.__clear_for_tests = False
slog(ERR, "the Qemu process (pid {}) has exited {}with status code {}, aborting test".format(
pid, "" if self.__shutdown_requested else "prematurely ", self.__rc))
exit(1)
# ---- utilities
# to be called from SIGCHLD handler
def qemu_exited(self):
slog(INFO, "Qemu process exited")
self.__clear_for_tests = False
self.__reap_qemu()
#self.__cleanup_qemu()
@property
def invocation(self):
if not self.__invocation:
mod_name = "Invocation_" + self.env.args.platform
mod = importlib.import_module("devtest.os.be.qemu." + mod_name)
classes = get_derived_classes(mod, Invocation.Invocation)
if not len(classes):
raise Exception("unsupported platform >" + self.env.args.platform + "<")
self.__invocation = classes[0](self)
slog(DEBUG, "found invocation definition class >%s<" % self.__invocation.typename())
return self.__invocation
def net_helper_path(self, up):
if re.search('ifup\|ifdown', self.exe_basename):
return None
if not self.__net_helper:
slog_m(INFO, "config = >%s<" % self.invocation.helper_config)
self.__net_helper = []
script_name = 'ifupdown.sh'
script_path = self.tmpdir + "/" + script_name
if self.env.args.qe_network_script:
shutil.copyfile(self.env.args.qe_network_script, script_path)
else:
with open(script_path, "w") as fd:
fd.write(_ifupdown_script)
os.chmod(script_path, 0o755)
with open(self.tmpdir + "/config.sh", "w") as fd:
fd.write(self.invocation.helper_config)
for name in [ "ifdown", "ifup" ]:
path = "{}/{}.sh".format(self.tmpdir, name)
os.symlink(script_name, path)
self.__net_helper.append(path)
return self.__net_helper[up]
def fifo_base(self, tp):
return self.tmpdir + "/qemu-" + self.env.args.platform + "-" + tp
# ---- reimplementation of class API methods
async def init(self):
self.__task = await self.env.eloop.create_task(self.__exec_qemu(self.env))
async def cleanup(self, env):
await self.__cleanup_qemu()
def clear_for_tests(self):
return self.__clear_for_tests
def register_connection(self, env, info):
slog(NOTICE, 'registering Qemu connection "{}" to platform "{}"'.format(
info, self.env.args.platform))
try:
if info.proto == Connection.Proto.Console:
r = self.console # TODO: info.spec ignored
else:
return super().register_connection(env, info)
self._register_connection(r)
except Exception as e:
slog(ERR, "failed to open connection {}: {}".format(info, e))
raise
return r
async def unregister_connection(self, conn):
return await super().unregister_connection(conn)
async def request_power_on(self, env):
if self.__running:
raise Exception("Tried to power on a running Qemu machine")
slog(NOTICE, "switching on CPU")
await self.monitor.write(b'cont\n')
await self.__await_monitor_prompt()
slog(NOTICE, "switched on CPU")
self.__running = True
async def wait_up(self, env):
return True
async def request_shutdown(self, env):
if not self.__shutdown_requested:
slog(NOTICE, "requesting shutdown")
self.__shutdown_requested = True
if self.monitor and self.__rc is None:
await self.monitor.write(b'quit\n')
async def wait_poweroff(self, env):
slog(NOTICE, "waiting on powerdown")
await self.__cleanup_qemu()
return True

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,5 @@
This directory contains a reference implementation of the abstract
devtest.os.Machine class using Qemu as backend. If you want to support
your own machine backend devtest.os.be.foo, copy this directory's
Makefile to devtest/be/os/foo, and implement your own subclass in
devtest/be/os/foo/Machine.py.

View file

@ -0,0 +1,86 @@
# -*- coding: utf-8 -*-
import asyncio
from jwutils.log import *
from jwutils.asyncio import ShellCmd
from ...Machine import Machine as MachineBase
class Machine(MachineBase): # export
def __init__(self, env):
super().__init__(env)
self.__clear_for_tests = True
self.__running = False
self.__shutdown_requested = False
self.__cmds = {
'request-shutdown': "jw-switch-allesnix.sh off",
'wait-power-off' : "sleep 2",
'request-power-on': "jw-switch-allesnix.sh on",
'cleanup' : None
}
async def __run(self, phase):
cmd = None
if phase in self.__cmds.keys():
cmd = self.__cmds[phase]
if cmd is None:
slog(INFO, 'No command registered for phase "{}", not running'.format(phase))
return
if isinstance(cmd, str):
cmd = cmd.split(' ')
sc = ShellCmd(cmd)
await sc.run()
# ---- reimplementation of class API methods
async def init(self):
pass
#self.console = ConnSerial(self.env, spec='path=/dev/ttyS0')
#self.__task = await self.env.eloop.create_task(self.__exec_qemu(self.env))
async def cleanup(self, env):
await self.__run('cleanup')
def clear_for_tests(self):
return self.__clear_for_tests
def register_connection(self, env, info):
return super().register_connection(env, info)
async def unregister_connection(self, conn):
return await super().unregister_connection(conn)
async def request_power_on(self, env):
if self.__running:
raise Exception("Tried to power on a running shell command machine")
slog(NOTICE, "switching on CPU")
await self.__run('request-power-on')
slog(NOTICE, "switched on CPU")
#await asyncio.sleep(1)
self.__running = True
self.__clear_for_tests = True
async def wait_up(self, env):
count = 5
while not self.__running and count > 0:
await asyncio.sleep(1)
count -= 1
return self.__running
async def request_shutdown(self, env):
if not self.__shutdown_requested:
slog(NOTICE, "requesting shutdown")
await self.__run('request-shutdown')
self.__shutdown_requested = True
await asyncio.sleep(1)
self.__running = False
#self.__clear_for_tests = False
async def wait_poweroff(self, env):
slog(NOTICE, "waiting on powerdown")
count = 5
while self.__running and count > 0:
asyncio.sleep(1)
count -= 1
return self.__running

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,202 @@
# -*- coding: utf-8 -*-
import os
import errno
import asyncio
from enum import IntEnum
import jwutils
from jwutils.log import *
from ..Connection import Connection
class Fifos(Connection): # export
class Dir(IntEnum):
In = 0
Out = 1
class Protocol(asyncio.Protocol):
def connection_made(self, transport):
slog(INFO, 'pipe opened', file=sys.stderr, flush=True)
super().connection_made(transport=transport)
def data_received(self, data):
slog(INFO, 'received: {!r}'.format(data), file=sys.stderr, flush=True)
slog(INFO, data.decode(), file=sys.stderr, flush=True)
super().data_received(data)
def connection_lost(self, exc):
slog(INFO, 'pipe closed', file=sys.stderr, flush=True)
super().connection_lost(exc)
class Fifo:
def __init__(self, pair, path, flags):
self.pair = pair
self.path = path
self.flags = flags
self.fd = None
self.stream = None
self.async_pipe = None
async def open(self, timeout=None, retry=4, retry_log_level=WARNING):
if self.stream is not None:
return
if self.async_pipe is not None:
return
try:
mode = 'wb' if self.flags & os.O_WRONLY else 'rb'
slog(INFO, "opening {}-fifo {}".format(mode, self.path))
loop = asyncio.get_event_loop()
max_retry = retry
while True:
try:
self.fd = os.open(self.path, self.flags | os.O_NONBLOCK)
if retry < max_retry:
slog(retry_log_level, "successfully opened", self.path, "in", max_retry - retry + 1, "attempts")
break
except OSError as e:
if e.errno != errno.ENXIO or retry <= 0:
raise
if timeout is not None:
raise # TODO: implement timeout counter
slog(retry_log_level, "failed to open {}, retrying {} more times".format(self.path, retry))
await asyncio.sleep(1, loop=loop)
retry -= 1
assert(self.fd is not None)
#self.fd = os.open(self.path, self.flags)
self.stream = os.fdopen(self.fd, mode=mode, buffering=1)
limit=asyncio.streams._DEFAULT_LIMIT
if self.flags & os.O_WRONLY:
#transp, proto = await self.pair.env.eloop.connect_write_pipe(Fifos.Protocol, self.stream)
#self.async_pipe = asyncio.streams.StreamWriter(transp, proto, None, loop)
transp, proto = await loop.connect_write_pipe(lambda: asyncio.streams.FlowControlMixin(loop=loop), self.stream)
self.async_pipe = asyncio.streams.StreamWriter(transp, proto, None, loop)
else:
#transp, proto = await self.pair.env.eloop.connect_read_pipe(Fifos.Protocol, self.stream)
#self.async_pipe = asyncio.streams.StreamReader(transp, proto, None, loop)
self.async_pipe = asyncio.StreamReader(limit=limit, loop=loop)
await loop.connect_read_pipe(lambda: asyncio.StreamReaderProtocol(self.async_pipe, loop=loop), self.stream)
except:
self.close()
raise
def close(self):
if self.fd is not None:
os.close(self.fd)
self.fd = None
self.async_pipe = None
self.stream = None
async def readline(self, timeout = None):
await self.open(timeout=timeout)
try:
if timeout is None:
#slog(WARNING, "--- running readline on", self.async_pipe)
r = await self.async_pipe.readline()
#self.close() # otherwise this sucker gives me readuntil() called while another coroutine is already waiting for incoming data
else:
use_timeout_task = False
if use_timeout_task:
slog(DEBUG, "readline({})".format(timeout))
#slog(WARNING, "--- running readline with timeout on", self.async_pipe)
task = asyncio.create_task(self.async_pipe.readline())
done, pending = await asyncio.wait({task}, timeout=timeout)
r = task.result() if task in done else None
for t in pending:
t.cancel()
else:
try:
r = await asyncio.wait_for(self.async_pipe.readline(), timeout=timeout)
except asyncio.TimeoutError:
r = None
slog(DEBUG, "read: >{}<".format(r))
return r
except:
slog(ERR, "failed to read from {}".format(self.path))
self.close()
raise
async def read(self, act_timeout, flags):
await self.open(timeout=act_timeout)
r = bytearray()
try:
try:
if act_timeout is None:
#slog(WARNING, "--- running read on", self.async_pipe)
r = await self.async_pipe.read()
else:
while True:
byte = await asyncio.wait_for(self.async_pipe.read(1), timeout=act_timeout)
if byte == b'\n' or len(byte) == 0:
break
r += byte
except asyncio.TimeoutError:
if r == None or len(r) == 0 or (flags & Connection.Flags.FailOnTimeout):
slog(DEBUG, "reading from pipe timed out after {}s, r={}".format(act_timeout, r))
r = None
slog(DEBUG, "read: >{}<".format(r))
return r
except:
slog(ERR, "failed to read from {}".format(self.path))
raise
async def write(self, data):
if isinstance(data, str):
data = data.encode("utf-8")
await self.open(timeout=None)
try:
slog(DEBUG, "writing to async pipe", self.async_pipe)
#return await self.async_pipe.write(data)
self.async_pipe.write(data)
await self.async_pipe.drain()
return len(data) # no way to get this from asyncio, WTF?
except Exception as e:
slog(ERR, "failed to write to {}: {}".format(self.path, e))
raise
def __init__(self, env, spec, paths=None):
slog(DEBUG, 'initializing fifo connection "{}" over {}'.format(spec, paths))
super().__init__(env, spec)
if not paths:
paths = [ self.spec['rd'], self.spec['wr'] ] # untested
self.__paths = paths
self.__fifos = []
for (n, mode, flags) in [(0, "r", os.O_RDONLY), (1, "w", os.O_WRONLY)]:
self.__fifos.append(Fifos.Fifo(self, paths[n], flags))
async def __readline(self, timeout=None):
r = await self.__fifos[0].readline(timeout)
if r is None:
return None
r = r.decode('utf-8').rstrip('\n\r')
return r
async def __read(self, act_timeout, flags):
r = await self.__fifos[0].read(act_timeout, flags)
if r is None:
return None
return r
async def __write(self, data):
slog(DEBUG, "writing data: type={}, contents=>{}<".format(type(data), data))
r = await self.__fifos[1].write(data)
#r = await self.__fifos[1].write(str.encode(data))
if r == 0:
slog(NOTICE, "reader closed")
async def _read(self, act_timeout, flags):
return await self.__read(act_timeout, flags)
async def _readline(self, timeout):
return await self.__readline(timeout)
async def _write(self, data):
return await self.__write(data)
async def _close(self):
slog(WARNING, "closing is not yet implemented (ignored)")
def fifo(self, d):
return self.__fifos[d]

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,403 @@
# -*- coding: utf-8 -*-
import os
import errno
import asyncio
import termios
from enum import IntEnum
import jwutils
from jwutils.log import *
from ..Connection import Connection
class Serial(Connection): # export
class OutputProtocol(asyncio.Protocol):
def connection_made(self, transport):
self.transport = transport
print('port opened', transport)
transport.serial.rts = False # You can manipulate Serial object via transport
transport.write(b'echo "Hello, World!"\n') # Write serial data via transport
def data_received(self, data):
print('data received', repr(data))
#if b'\n' in data:
# self.transport.close()
def connection_lost(self, exc):
print('port closed')
self.transport.loop.stop()
def pause_writing(self):
print('pause writing')
print(self.transport.get_write_buffer_size())
def resume_writing(self):
print(self.transport.get_write_buffer_size())
print('resume writing')
class Protocol(asyncio.Protocol):
def connection_made(self, transport):
slog(INFO, 'serial opened', file=sys.stderr, flush=True)
super().connection_made(transport=transport)
def data_received(self, data):
slog(INFO, 'received: {!r}'.format(data), file=sys.stderr, flush=True)
slog(INFO, data.decode(), file=sys.stderr, flush=True)
super().data_received(data)
def connection_lost(self, exc):
slog(INFO, 'serial closed', file=sys.stderr, flush=True)
super().connection_lost(exc)
class Device:
# definitions courtesy of python-periphery by Vanya Sergeev
_DATABITS_TO_CFLAG = {
5: termios.CS5, 6: termios.CS6, 7: termios.CS7, 8: termios.CS8
}
_CFLAG_TO_DATABITS = {v: k for k, v in _DATABITS_TO_CFLAG.items()}
_BAUDRATE_TO_OSPEED = {
50: termios.B50, 75: termios.B75, 110: termios.B110, 134: termios.B134,
150: termios.B150, 200: termios.B200, 300: termios.B300,
600: termios.B600, 1200: termios.B1200, 1800: termios.B1800,
2400: termios.B2400, 4800: termios.B4800, 9600: termios.B9600,
19200: termios.B19200, 38400: termios.B38400, 57600: termios.B57600,
115200: termios.B115200, 230400: termios.B230400,
# Linux baudrates bits missing in termios module included below
460800: 0x1004, 500000: 0x1005, 576000: 0x1006,
921600: 0x1007, 1000000: 0x1008, 1152000: 0x1009,
1500000: 0x100A, 2000000: 0x100B, 2500000: 0x100C,
3000000: 0x100D, 3500000: 0x100E, 4000000: 0x100F,
}
_OSPEED_TO_BAUDRATE = {v: k for k, v in _BAUDRATE_TO_OSPEED.items()}
"""
Instantiate a Serial object and open the tty device at the specified
path with the specified baudrate, and the defaults of 8 data bits, no
parity, 1 stop bit, no software flow control (xonxoff), and no hardware
flow control (rtscts).
devpath (str): tty device path.
baudrate (int): baudrate.
databits (int): data bits, can be 5, 6, 7, 8.
parity (str): parity, can be "none", "even", "odd".
stopbits (int): stop bits, can be 1 or 2.
xonxoff (bool): software flow control.
rtscts (bool): hardware flow control.
"""
def __init__(self, parent, path, flags=os.O_RDWR, baudrate=115200, databits=8, parity='none', stopbits=1, xonxoff=False, rtscts=False):
parity = parity.lower()
if not isinstance(path, str):
raise TypeError('Invalid type of path "{}", should be string.'.format(path))
if not isinstance(baudrate, int):
raise TypeError('Invalid type of baud rate "{}", should be integer.'.format(baudrate))
if not isinstance(databits, int):
raise TypeError('Invalid type of data bits "{}", should be integer.'.format(databits))
if not isinstance(parity, str):
raise TypeError('Invalid type of parity "{}", should be string.'.format(parity))
if not isinstance(stopbits, int):
raise TypeError('Invalid type of stop bits "{}", should be integer.'.format(stopbits))
if not isinstance(xonxoff, bool):
raise TypeError('Invalid type of xonxoff "{}", should be boolean.'.format(xonxoff))
if not isinstance(rtscts, bool):
raise TypeError('Invalid type of rtscts "{}", should be boolean.'.format(rtscts))
if baudrate not in self._BAUDRATE_TO_OSPEED:
raise ValueError("Unknown baud rate: {:d}".format(baudrate))
if databits not in [5, 6, 7, 8]:
raise ValueError("Invalid data bits, can be 5, 6, 7, 8.")
if parity.lower() not in ["none", "even", "odd"]:
raise ValueError("Invalid parity, can be: \"none\", \"even\", \"odd\".")
if stopbits not in [1, 2]:
raise ValueError("Invalid stop bits, can be 1, 2.")
self._parent = parent
self._rd_stream = None
self._wr_stream = None
self._fd = None
self._stream_reader = None
self._stream_writer = None
self._path = path
self._flags = flags
self._baudrate = baudrate
self._databits = databits
self._parity = parity
self._stopbits = stopbits
self._xonxoff = xonxoff
self._rtscts = rtscts
self._use_termios_timeout = False
# serial_asyncio isn't available for all platforms
#self.coro = serial_asyncio.create_serial_connection(loop, OutputProtocol, '/dev/ttyS0', baudrate=115200)
async def _open_fd(self):
# Open tty
try:
fd = os.open(self._path, (self._flags & os.O_RDWR) | os.O_NOCTTY)
slog(DEBUG, 'os.open("{}", 0x{:02X}) = {}'.format(self._path, (self._flags & os.O_RDWR), fd))
except OSError as e:
raise Exception("Failed to open serial port: " + e.strerror)
(iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = (0, 0, 0, 0, 0, 0, [0] * 32)
###
# iflag
# Ignore break characters
iflag = termios.IGNBRK
# Setup parity
if self._parity != "none":
iflag |= (termios.INPCK | termios.ISTRIP)
# Setup xonxoff
if self._xonxoff:
iflag |= (termios.IXON | termios.IXOFF)
#######
# oflag
oflag = 0
#######
# lflag
lflag = 0
#######
# cflag
# Enable receiver, ignore modem control lines
cflag = (termios.CREAD | termios.CLOCAL)
# Setup data bits
cflag |= self._DATABITS_TO_CFLAG[self._databits]
# Setup parity
if self._parity == "even":
cflag |= termios.PARENB
elif self._parity == "odd":
cflag |= (termios.PARENB | termios.PARODD)
# Setup stop bits
if self._stopbits == 2:
cflag |= termios.CSTOPB
# Setup rtscts
if self._rtscts:
cflag |= termios.CRTSCTS
# Setup baud rate
cflag |= self._BAUDRATE_TO_OSPEED[self._baudrate]
########
# ispeed
ispeed = self._BAUDRATE_TO_OSPEED[self._baudrate]
########
# ospeed
ospeed = self._BAUDRATE_TO_OSPEED[self._baudrate]
# Set tty attributes
try:
termios.tcsetattr(fd, termios.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc])
except termios.error as e:
raise SerialError(e.errno, "Setting serial port attributes: " + e.strerror)
return fd
async def open(self, timeout=None, retry=4, retry_log_level=WARNING):
if self._rd_stream is not None:
return
if self._wr_stream is not None:
return
if self._stream_reader is not None:
return
if self._stream_writer is not None:
return
try:
mode = self._flags & os.O_RDWR
if mode == os.O_WRONLY:
mode_str = 'wb'
elif mode == os.O_RDONLY:
mode_str = 'rb'
else:
mode = os.O_RDWR
mode_str = 'r+b'
slog(INFO, "opening {} serial {}".format(mode_str, self._path))
loop = asyncio.get_event_loop()
max_retry = retry
while True:
try:
self._fd = await self._open_fd()
slog(DEBUG, "serial device open({}) = {}".format(self._path, self._fd))
if retry < max_retry:
slog(retry_log_level, "successfully opened", self._path, "in", max_retry - retry + 1, "attempts")
break
except OSError as e:
if e.errno != errno.ENXIO or retry <= 0:
raise
if timeout is not None:
raise # TODO: implement timeout counter
slog(retry_log_level, "failed to open {}, retrying {} more times".format(self._path, retry))
await asyncio.sleep(1, loop=loop)
retry -= 1
assert(self._fd is not None)
#self._fd = os.open(self._path, self._flags)
limit = asyncio.streams._DEFAULT_LIMIT
if mode == os.O_WRONLY or mode == os.O_RDWR:
self._wr_stream = os.fdopen(self._fd, mode='wb', buffering=1)
#transp, proto = await self.pair.env.eloop.connect_write_pipe(Fifos.Protocol, self._wr_stream)
#self._stream_writer = asyncio.streams.StreamWriter(transp, proto, None, loop)
transp, proto = await loop.connect_write_pipe(lambda: asyncio.streams.FlowControlMixin(loop=loop), self._wr_stream)
self._stream_writer = asyncio.streams.StreamWriter(transp, proto, None, loop)
if mode == os.O_RDONLY or mode == os.O_RDWR:
self._rd_stream = os.fdopen(self._fd, mode='rb', buffering=1)
#transp, proto = await self.pair.env.eloop.connect_read_pipe(Fifos.Protocol, self._rd_stream)
#self._stream_reader = asyncio.streams.StreamReader(transp, proto, None, loop)
self._stream_reader = asyncio.StreamReader(limit=limit, loop=loop)
await loop.connect_read_pipe(lambda: asyncio.StreamReaderProtocol(self._stream_reader, loop=loop), self._rd_stream)
except:
await self.close()
raise
async def close(self):
#if self._fd is not None:
# os.close(self._fd)
# self._fd = None
if self._wr_stream is not None:
self._wr_stream.close()
#await self._wr_stream.wait_closed()
self._wr_stream = None
if self._rd_stream is not None:
#self._rd_stream.close()
#await self._rd_stream.wait_closed()
self._rd_stream = None
self._stream_writer = None
self._stream_reader = None
self._fd = None
async def readline(self, timeout = None):
await self.open(timeout=timeout)
try:
if timeout is None:
#slog(WARNING, "--- running readline on", self._stream_reader)
r = await self._stream_reader.readline()
#self.close() # otherwise this sucker gives me readuntil() called while another coroutine is already waiting for incoming data
else:
use_timeout_task = False
if use_timeout_task:
slog(DEBUG, "readline({})".format(timeout))
#slog(WARNING, "--- running readline with timeout on", self._stream_reader)
task = asyncio.create_task(self._stream_reader.readline())
done, pending = await asyncio.wait({task}, timeout=timeout)
r = task.result() if task in done else None
for t in pending:
t.cancel()
else:
try:
r = await asyncio.wait_for(self._stream_reader.readline(), timeout=timeout)
except asyncio.TimeoutError:
r = None
slog(DEBUG, "read: >{}<".format(r))
return r
except:
slog(ERR, "failed to read from {}".format(self._path))
await self.close()
raise
async def read(self, act_timeout, flags):
await self.open(timeout=act_timeout)
r = bytearray()
try:
try:
if act_timeout is None:
#slog(WARNING, "--- running read on", self._stream_reader)
r = await self._stream_reader.read()
else:
while True:
byte = await asyncio.wait_for(self._stream_reader.read(1), timeout=act_timeout)
if byte == b'\n' or len(byte) == 0:
break
r += byte
except asyncio.TimeoutError:
if r == None or len(r) == 0 or (flags & Connection.Flags.FailOnTimeout):
slog(DEBUG, "reading from pipe timed out after {}s, r={}".format(act_timeout, r))
r = None
slog(DEBUG, "read: >{}<".format(r))
return r
except:
slog(ERR, "failed to read from {}".format(self._path))
raise
async def write(self, data):
if isinstance(data, str):
data = data.encode("utf-8")
await self.open(timeout=None)
try:
slog(DEBUG, "writing to async pipe", self._stream_writer)
#return await self._stream_writer.write(data)
self._stream_writer.write(data)
await self._stream_writer.drain()
return len(data) # no way to get this from asyncio, WTF?
except Exception as e:
slog(ERR, "failed to write to {}: {}".format(self._path, e))
raise
def __init__(self, env, spec, path=None):
#if not isinstance(spec, str):
# raise TypeError('Invalid type "{}" of spec "{}", should be string.'.format(type(spec), spec))
slog(DEBUG, 'initializing serial connection "{}" over {}'.format(spec, path))
super().__init__(env, spec)
if not path:
path = self.info['path']
self.__path = path
self.__device = self.Device(self, path,
flags=os.O_RDWR,
baudrate=int(self.info['baudrate', 115200]),
databits=int(self.info['databits', 8]),
parity=self.info['parity', 'none'],
stopbits=int(self.info['stopbits', 1]),
xonxoff=self.info['xonxoff', False],
rtscts=self.info['rtscts', False]
)
async def __readline(self, timeout=None):
r = await self.__device.readline(timeout)
if r is None:
return None
r = r.decode('utf-8').rstrip('\n\r')
return r
async def __read(self, act_timeout, flags):
r = await self.__device.read(act_timeout, flags)
if r is None:
return None
return r
async def __write(self, data):
slog(DEBUG, "writing data: type={}, contents=>{}<".format(type(data), data), only_printable=True)
r = await self.__device.write(data)
#r = await self.__device.write(str.encode(data))
if r == 0:
slog(NOTICE, "writer closed")
async def _read(self, act_timeout, flags):
return await self.__read(act_timeout, flags)
async def _readline(self, timeout):
return await self.__readline(timeout)
async def _write(self, data):
return await self.__write(data)
async def _close(self):
if self.__device is not None:
slog(NOTICE, "closing serial line {}".format(self.__path))
await self.__device.close()
self.__device = None

View file

@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
import importlib.util
import asyncio
import traceback
import re
import os
from datetime import datetime, timedelta
from jwutils.log import *
def consume_readbuffer(buf, carry, cmd, log_act=None, caller=None):
r = []
lines = buf.splitlines() if buf else []
if buf is None or not len(lines):
if carry:
return [carry], None
return [], None
if carry:
lines[0] = carry + lines[0]
carry = None
if log_act:
if caller is None:
caller = get_caller_pos(2)
for line in lines:
slog(log_act, line, caller=caller)
for line in lines:
if line.find(cmd) != -1:
slog(DEBUG, "ignoring echoed command >{}<".format(cmd))
else:
r.append(line)
if not buf.endswith(('\n', '\r')):
carry = r.pop(-1)
return r, carry
# returns
# None on error
# [] for an empty response
# [first line, second line]
async def cmd_exec(conn, cmd, act_timeout=0.1, total_timeout=None, newlines=False, log_act=None, caller=None, echo_cmd=True): # export
r = []
try:
if isinstance(cmd, str) and not cmd.endswith('\n'):
cmd += '\n'
slog_m(DEBUG, 'connection {}, timemout {}/{}: writing command "{}"'.format(
conn, act_timeout, total_timeout, cmd), only_printable=True)
await conn.write(cmd)
end = datetime.now() + timedelta(seconds=total_timeout) if total_timeout else None
carry = ''
while True:
rest = min((end - datetime.now()).total_seconds(), act_timeout) if end else act_timeout
if rest >= 0:
buf = await conn.read(act_timeout=rest)
slog_m(DEBUG, 'connection {}: read response "{}"'.format(conn, buf))
if buf is not None and end and len(buf) == 0 and datetime.now() < end:
raise Exception("connection {} reset while reading command response".format(conn))
ret, carry = consume_readbuffer(buf, carry, cmd, log_act=log_act, caller=caller)
r.extend(ret)
if rest < 0 or not buf:
if not echo_cmd and cmd in r:
r.remove(cmd)
d = os.getenv("JW_DEVTEST_RESPONSE_DIR")
if d is not None:
path = d + '/' + re.sub('[^a-zA-Z0-9]+', '_', cmd)
path = path[:60] + '.txt'
slog(INFO, 'Command is: {}'.format(cmd), only_printable=True)
slog(INFO, 'Writing response to "{}"'.format(path))
with open(path, 'w') as fd:
fd.write('# ' + cmd)
fd.write('\n'.join(r))
return r
except Exception as e:
slog(ERR, "exception during cmd_exec({}): {}".format(cmd, e))
slog_m(ERR, traceback.format_exc())
return None

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
import os
from os.path import basename, dirname, abspath
import asyncio
from jwutils.log import *
from devtest.os.tcf import CaseDir
from devtest.os.test import *
class TcLogin(CaseDir.Case): # export
def __init__(self, login_prompt, root_pw_env_var):
self.login_prompt = login_prompt
self.root_pw_env_var = root_pw_env_var
async def _run(self, env, machine, phase):
console = get_console(env)
if await expect(console, regex="Welcome to Buildroot", subject="prelogin message", act_timeout=60) is None:
return "timed out waiting for prelogin message"
if await expect(console, regex=self.login_prompt, subject="login prompt", act_timeout=3) is None:
return "timed out waiting for login prompt"
slog(NOTICE, "sending username")
await console.write("root\n")
if await expect(console, regex="Password:", subject="password prompt", act_timeout=3) is None:
return "timed out waiting for password prompt"
pw = os.getenv(self.root_pw_env_var, None)
if pw is None:
return "Environment variable " + self.root_pw_env_var + " is empty"
slog(NOTICE, "sending password")
await console.write(pw + "\n")
if await expect(console, regex="~ #", subject="shell prompt", act_timeout=3) is None:
return "timed out waiting for shell prompt"
return None

View file

@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
import glob
import importlib.util
from abc import abstractmethod
import asyncio
from jwutils.log import *
from jwutils.misc import *
from ..TestCaseFactory import TestCaseFactory
from ..TestCase import TestCase as TestCaseBase
from ..TestPhases import TestPhases
from ..Connection import Connection
from pathlib import PurePath
class CaseDir(TestCaseFactory): # export
# just to identify test cases as to be instantiated by this factory
class Case(TestCaseBase):
def __init__(self):
super().__init__()
class CaseWrapper(Case):
def __init__(self, case):
self.__case = case
async def _run(self, env, machine, phase):
return await self.__case.run(env, machine, phase)
def __init__(self, path):
super().__init__(path)
self.__cases = []
for p in path:
slog(INFO, "importing test cases from {}".format(p))
for phase in TestPhases.Phase:
pattern = p + '/' + phase.name.lower() + '/*.py'
slog(INFO, " importing test cases for phase {:<8} from {}".format(phase.name, pattern))
sources = sorted(glob.glob(pattern))
for s in sources:
slog(DEBUG, " found source", s)
spec = importlib.util.spec_from_file_location(s, s) # args are (name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
members = inspect.getmembers(mod, inspect.isclass)
for name, c in members:
slog(DEBUG, " found member", name)
if c.__module__ != s:
slog(DEBUG, " is not defined in", s, "ignoring")
continue
if inspect.isabstract(c):
slog(DEBUG, " is abstract, ignoring")
continue
if not CaseDir.Case in inspect.getmro(c):
slog(DEBUG, " is not a CaseDir.Case, ignoring")
continue
case = c()
case.phases = [phase]
case.source = s
case.name = name + ' from ' + s
case.short_name = name + '@' + PurePath(s).stem
self.__cases.append(case)
slog(INFO, "adding test cases:", ', '.join([c.name for c in self.__cases]))
def test_cases(self):
return self.__cases

View file

@ -0,0 +1,13 @@
# -*- coding: utf-8 -*-
from ..TestCaseFactory import TestCaseFactory
from ..TcDummy import TcDummy
class Dummy(TestCaseFactory): # export
def __init__(self, path):
super().__init__(path)
self.__cases = [ TcDummy() ]
def test_cases(self):
return self.__cases

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,11 @@
This directory contains subclasses of devtest.os.TestCaseFactory. They are
meant to produce arrays of test cases, i.e. instances of implementations of the
abstract devtest.os.TestCase class. Think of a TestCaseFactory subclass as a
hook to define a specific file system / directory format. You can put your own
factory implementation either here and be done with it, or, alternatively,
anywhere in PYTHONPATH and add an import statement to devtest.os.TestCases.py.
TestCaseFactory instances' constructors are passed an array of file system
paths they should check for compatible test case definitions. The path array
could for instance be populated from the --test-case-path command line option,
a colon separated search path.

View file

@ -0,0 +1,298 @@
# -*- coding: utf-8 -*-
import re
import os
import asyncio
from operator import itemgetter
from jwutils.log import *
from jwutils import Options
from devtest.os import *
from devtest.os.test import *
from .. import TestCase
class ListCmd(TestCase): # export
# ------------------------------------- class Row
class Row:
def field(self, key, default=None):
if key in self.__fields.keys():
return self.__fields[key]
if default is not None:
return default
raise KeyError('No field "{}" in row "{}"'.format(key, self))
def attrib(self, key, default=None):
if self.__attribs is not None:
return self.__attribs.get(key, default)
if default is not None:
return default
raise KeyError('No attrib "{}" in row "{}"'.format(key, self))
# "needed": [ "dummyd", "v3.23" ]
# "key_": [ feature ]
def check_first_match(self, key_, features):
if self.__attribs is None:
return False
for feature in features:
for key, val in self.__attribs.items():
if type(val) == bool:
if key_ == key:
return val
continue
if type(val) != list:
raise Exception('Found attribute {}="{}" of unexpected value type {}'.format(key, val, type(val)))
if feature in val: # first match
return True
return False
def check_attrib(self, key, features):
val = self.attrib(key, False)
if type(val) == bool:
return val
if type(val) != list:
raise Exception('Found attribute {}="{}" of unexpected value type {}'.format(key, val, type(val)))
for feature in features:
if feature in val:
return True
return False
def cmp(self, other):
decisive = self.parent.decisive
for field in decisive:
ret = self.field(field) < other.field(field)
if ret:
return ret
return 0
def to_str(self, only_values=False, quotes=None, fields=['fields']):
use_fields = None
for f_set_name in fields:
use_fields = self.parent.row_info(f_set_name, default=False)
if use_fields != False:
break
if use_fields == False:
raise Exception("None of the fields wanted for formatting are available: {}".format(fields))
q = '"' if quotes == True else ('' if quotes is None and only_values else '')
if only_values:
r = ', '.join(['{}{}{}'.format(q, self.field(f), q) for f in use_fields])
else:
r = ', '.join(['{}={}{}{}'.format(f, q, self.field(f), q) for f in use_fields])
if self.__attribs is not None and len(self.__attribs) > 0:
r += " | " + str(self.__attribs)
return r
@property
def name(self):
return '(' + self.to_str(fields=['name-fields', 'cmp-fields'], only_values=True) + ')'
def __lt__(self, other):
return self.cmp(other) < 0
def __le__(self, other):
return self.cmp(other) <= 0
def __eq__(self, other):
return self.cmp(other) == 0
def __ne__(self, other):
return self.cmp(other) != 0
def __gt__(self, other):
return self.cmp(other) > 0
def __ge__(self, other):
return self.cmp(other) >= 0
def __str__(self):
return self.to_str()
def _repr__(self):
return self.to_str()
def __format__(self, fmt):
return self.to_str()
def __init__(self, parent, line, fields, attribs):
self.parent = parent
self.__line = line
self.__fields = fields
self.__attribs = attribs
def __getitem__(self, key):
return self.field(key)
def __hash__(self):
decisive = self.parent.decisive
return hash(tuple([self.field(field, '') for field in decisive]))
# ------------------------------------- class ListCmd methods
def __init__(self, refpath, act_timeout=2, total_timeout=None, write_response=None):
self.refpath = refpath
self.act_timeout = act_timeout
self.total_timeout = total_timeout
self.__decisive = None
self.__row_info = None
if write_response is not None:
self.__write_response = write_response
else:
key = "JW_DEVTEST_WRITE_LIST_CMD_RESPONSE_DEFAULT"
val = os.getenv(key)
if val is not None:
val = val.lower()
if val in ['true', '1', 'y', 'yes' ]:
self.__write_response = True
elif val in ['false', '0', 'n', 'no' ]:
self.__write_response = False
else:
raise Exception('Invalid value "{}" of environment variable "{}"'.format(val, key))
# override this
def _row_info(self):
return {
'cmd': "/bin/ps",
# PID USER VSZ STAT COMMAND
# 38 root 0 SW [kjournald]
# 40 root 2300 S {rc.start} /bin/sh /etc/rc.start
# 40 root 2300 S {rc.start} /bin/sh /etc/rc.start
'regex': "^ *([0-9]+) +([a-z_][a-z0-9_-]*) +([0-9]+) +([A-Z]+) +(.+)",
'fields': [ 'pid', 'user', 'size', 'stat', 'cmd'],
'cmp-fields': [ 'user', 'cmd'],
'name-fields': [ 'cmd'],
}
def _filter(self, output):
return output
def row_info(self, key, default=None):
if self.__row_info == None:
info = self._row_info()
if type(info) == dict:
self.__row_info = info
else: # be backwards compatible and swallow any iterable container
self.__row_info = dict()
keys = ['cmd', 'regex', 'fields', 'cmp-fields', 'name-fields']
for i in range(0, len(info)):
self.__row_info[keys[i]] = info[i]
if not key in self.__row_info.keys():
if default is not None:
return default
raise Exception('Required row info "{}" missing'.format(key))
return self.__row_info[key]
@property
def decisive(self):
if self.__decisive == None:
self.__decisive = self.row_info('cmp-fields', True)
return self.__decisive
def parse(self, lines):
def parse_line(line, fields):
slog(DEBUG, "parsing line >%s<" % line)
parsed = line.split('# ')
line = parsed[0].strip()
match = regex.search(line)
if not match:
slog(INFO, "Ignoring unparseable line >%s<" % line)
return None
attribs = None if len(parsed) <= 1 else Options(parsed[1])
for k in range(0, len(fields)):
slog(DEBUG, " match {} = >{}<".format(k+1, match.group(k+1)))
fields = {fields[k]: match.group(k+1) for k in range(0, len(fields)) }
return self.Row(self, line=line, fields=fields, attribs=attribs)
fields = self.row_info('fields')
re_str = self.row_info('regex')
try:
regex = re.compile(re_str)
except Exception as e:
raise Exception('Failed to compile regex ({}): >{}<'.format(e, str(re_str)))
return list(filter(None, [parse_line(l, fields) for l in lines]))
def _eval(self, output, features, header=None):
def format_rows(rows, quotes=False):
def cmp(r1, r2):
for k in sort_keys:
if r1[k] < r2[k]:
return True
return False
sort_keys = []
key_sets = ['name-fields', 'cmp-fields']
for s in key_sets:
for k in self.row_info(s, []):
if k not in sort_keys:
sort_keys.append(k)
#return sorted([ row.to_str(fields=['cmp-fields', 'fields'], only_values=True, quotes=quotes) for row in rows], key=cmp)
return [ row.to_str(fields=['cmp-fields', 'fields'], only_values=True, quotes=quotes) for row in sorted(rows, key=itemgetter(*sort_keys))]
if self.__write_response and not os.path.exists(self.refpath):
ref_lines = []
else:
with open(self.refpath, "r") as f:
ref_lines = f.readlines()
output = self._filter(output)
if self.__write_response:
response_path = self.refpath + '.last'
with open(response_path, "w") as f:
slog(INFO, 'Writing response to "{}"'.format(response_path))
if header:
f.write(header)
f.write('\n'.join(output))
reference = set(self.parse(ref_lines))
actual = set(self.parse(output))
# -- Attributes:
# default: error-if-not-present (no matching feature, same as "needed', see below
# ignore: no-error-if-present and no-error-if-not-present
# bad: error-if-present
ignore = set([row for row in reference if row.check_first_match('ignore', features)])
bad = set([row for row in reference if row.check_first_match('bad', features)])
slog_m(DEBUG, "--- reference:\n", format_rows(reference))
slog_m(DEBUG, "--- ignore:\n", format_rows(ignore))
slog_m(DEBUG, "--- bad:\n", format_rows(bad))
# the "needed" attribute is actually just as good as any string: it
# gets ignored, leading to the line being seen as mandatory
needed = reference - ignore - bad
missing = needed - actual
too_many = actual - needed - ignore
ignored = ignore - (ignore - actual)
slog_m(NOTICE, "--- needed:\n", format_rows(needed))
slog_m(NOTICE, "--- actual:\n", format_rows(actual))
slog_m(NOTICE, "--- ignored:\n", format_rows(ignored))
r = []
if len(missing):
slog_m(ERR, "--- should be there but are not:\n", format_rows(missing, quotes=True))
r.append("missing:" + ', '.join([row.name for row in missing]))
if len(too_many):
slog_m(ERR, "--- too many:\n", format_rows(too_many, quotes=True))
r.append("too many: " + ', '.join([row.name for row in too_many]))
if not len(r):
return None
return ' and '.join(r)
async def _run(self, env, machine, phase):
console = get_console(env)
cmd = self.row_info('cmd')
output = await cmd_exec(console, cmd, act_timeout=self.act_timeout,
total_timeout=self.total_timeout, echo_cmd=False)
if output is None:
return "Failed to run command: " + cmd
header = '# ' + cmd + '\n' if self.__write_response else None
return self._eval(output, env.features, header=header)
def dump(self, prio, *args, **kwargs):
caller = kwargs['caller'] if 'caller' in kwargs.keys() else get_caller_pos(1)
with open(self.refpath, "r") as f:
reference = self.parse(f.readlines())
for l in reference:
slog(NOTICE, "{}".format(l), caller=caller)
def test(self, output, features):
self._eval(output, features)

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1 @@
This submodule contains utilities to be used by test case implementations.

View file

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
import re
import asyncio
from jwutils.log import *
from devtest.os.test import *
from devtest.os.misc import *
from ..TestCase import TestCase
class TcScript(TestCase): # export
def __init__(self, script=None, inpath=None, tmppath=None, support_files=None, show_files=False, *args):
self.script = script
if inpath is not None:
with open(inpath, 'r') as f:
self.script = r.read()
if not self.script:
raise Exception("tried to run script command without script")
self.args = args
self.tmppath = tmppath
self.act_timeout = 2
self.total_timeout = 10
self.support_files = support_files
self.show_files = show_files
async def __create(self, console, path, contents, mode=None):
lines = [
'cat > "{}"\n'.format(path),
contents,
'\003' # ctrl-c
]
for l in lines:
#await console.write(l)
await cmd_exec(console, l)
if mode:
await cmd_exec(console, 'chmod {:o} "{:s}"\n'.format(mode, path))
#await console.write('chmod {:o} "{:s}"\n'.format(mode, path))
async def _run(self, env, machine, phase):
console = get_console(env)
tmppath = self.tmppath
if not tmppath:
tmppath = "/tmp/script.tmp"
#raise Exception("tried to run script without temporary path")
files = [ (0o755, tmppath, self.script) ]
if self.support_files:
files.extend(self.support_files)
for f in files:
await self.__create(console, f[1], f[2], f[0])
if self.show_files:
slog_m(WARNING, await cmd_exec(console, "cat {}\n".format(f[1]), act_timeout=self.act_timeout, total_timeout=self.total_timeout))
cmd = "if {}; then echo PASS; else echo FAIL; fi # ignore me\n".format(tmppath)
rr = await cmd_exec(console, cmd,
act_timeout=self.act_timeout, total_timeout=self.total_timeout)
if not rr:
return "failed to run command >{}<".format(cmd)
for f in files:
await cmd_exec(console, 'unlink {}\n'.format(f[1]))
#slog(WARNING, "rr=", rr)
failures = []
for l in rr:
slog(INFO, "{}".format(l))
if re.search("# ignore me", l):
continue
if re.search("FAIL", l):
l = re.sub("FAIL[: ]*", "", l)
if not len(failures):
failures.append("script test failed")
if len(l):
failures.append(l)
return '\n'.join(failures) if len(failures) else None

View file

@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
from .GrepLog import GrepLog
class CheckMounts(GrepLog): # export
def __init__(self, mounts, act_timeout=2, total_timeout=None):
regexes = []
for mount in mounts:
regexes.append('^{} \+{} \+'.format(*mount))
super().__init__(regex=regexes, log_glob='/proc/mounts',
act_timeout=act_timeout, total_timeout=total_timeout)

View file

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
from os.path import *
from jwutils.log import *
from devtest.os import misc, test, tcf, TestCase
class CompareCmdOutput(TestCase): # export
def __init__(self, cmd=None, ref=None, refpath=None, act_timeout=5, total_timeout=5, fail_missing=False, fail_unknown=False):
self._cmd = cmd
self._act_timeout = act_timeout
self._total_timeout = total_timeout
self._fail_missing = fail_missing
self._fail_unknown = fail_unknown
self._refpath = refpath
if refpath is not None:
with open(refpath, 'r') as fd:
self._ref = fd.read().splitlines()
elif isinstance(ref, list) or isinstance(ref, set):
self._ref = ref
elif isinstance(ref, str):
self._ref = ref.splitlines()
if self._ref is None:
raise Exception('No reference to compare output of command to: {}'.format(self._cmd))
def _filter(self, output):
return output
async def _run(self, env, machine, phase):
console = test.get_console(env)
ret = await misc.cmd_exec(console, self._cmd, log_act=INFO,
act_timeout=self._act_timeout, total_timeout=self._total_timeout)
if self._cmd in ret: ret.remove(self._cmd)
ret = self._filter(ret)
return test.diff_iterables(self._ref, ret, fail_missing=self._fail_missing, fail_unknown=self._fail_unknown)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from .GrepLog import GrepLog
class ConfigUpdater(GrepLog): # export
def __init__(self, regex = None, log_glob=None, act_timeout=2, total_timeout=None):
if regex is None:
regex = 'Configuration updated (version=.*)\|Configuration (version=.*) is up-to-date (target-version=.*)'
super().__init__(regex=regex, log_glob=log_glob, act_timeout=act_timeout, total_timeout=total_timeout)

View file

@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
import asyncio
from jwutils.log import *
from devtest.os import TestCase
from devtest.os.test import *
from devtest.os.Connection import Connection
from datetime import datetime, timedelta
class Delay(TestCase): # export
def __init__(self, seconds, drain_console=True):
self.__seconds = seconds
self.__drain_console = drain_console
async def _run(self, env, machine, phase):
rest = self.__seconds
slog(NOTICE, "waiting %0.2f seconds" % rest)
if not self.__drain_console:
await asyncio.sleep(rest)
return None
console = get_console(env)
end = datetime.now() + timedelta(seconds=rest)
flags = Connection.Flags.ReadStripNewline|Connection.Flags.ReadDecodeToString
while True:
rest = (end - datetime.now()).total_seconds()
if rest <= 0:
return None
buf = await console.read(rest, flags=flags)
if buf is not None:
slog(INFO, "read from console: >%s<" % buf, only_printable=True)
if rest > 0 and len(buf) == 0:
return "reading from console failed"
return None

View file

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
import asyncio
from jwutils.log import *
from devtest.os import TestCase
from devtest.os.test import *
from devtest.os.misc import *
class DisableIPv6(TestCase): # export
async def _run(self, env, machine, phase):
path = '/proc/sys/net/ipv6/conf/all/disable_ipv6'
cmd = 'cat ' + path
ret = await cmd_exec(get_console(env), cmd, log_act=INFO, act_timeout=1)
if ret is None:
return 'Failed to run "{}"'.format(cmd)
if len(ret) < 2:
return 'Failed to read "{}"'.format(path)
if ret[-2] != '1': # -1 = prompt
for s in ret:
slog(ERR, 'read "{}"'.format(s))
return '{} contains "{}"'.format(path, ret[-2])
return None

View file

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
from devtest.os import TestCase
from devtest.os.test import *
class Eth1(TestCase): # export
def __init__(self, regex = None, act_timeout=30, total_timeout=None):
self.regex = regex
self.act_timeout = act_timeout
self.total_timeout = total_timeout
async def _run(self, env, machine, phase):
console = get_console(env)
regex = self.regex
if regex is None:
regex = "Searching for eth1 driver: found"
if await expect(console, regex=regex, subject="load eth1 driver",
act_timeout=self.act_timeout, total_timeout=self.total_timeout) is None:
return "timed out waiting for set eth1 driver"
return None

View file

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
from devtest.os import TestCase
from devtest.os.test import *
class ExpectClock(TestCase): # export
def __init__(self, regex = None, act_timeout=30, total_timeout=None):
self.regex = regex
self.act_timeout = act_timeout
self.total_timeout = total_timeout
async def _run(self, env, machine, phase):
console = get_console(env)
regex = self.regex
if regex is None:
# TODO: This doesn't say much, beside that the script ran. Not
# getting much better on Qemu, some virtual boards don't
# have an RTC (up to now)
regex = "Checking clock settings:"
if await expect(console, regex=regex, subject="set system time",
act_timeout=self.act_timeout, total_timeout=self.total_timeout) is None:
return "timed out waiting for setting of system time"
return None

View file

@ -0,0 +1,17 @@
# -*- coding: utf-8 -*-
from ..ListCmd import ListCmd
class Files(ListCmd): # export
# Override as needed.
def _row_info(self):
return (
r"find /etc/ | xargs ls -ld --color=never 2>/dev/null",
# -rw------- 1 root root 10819 Dec 16 2019 /etc/ssl/openssl.cnf
# -rw-r--r-- 1 root www 1479 May 15 18:22 /etc/comsystem/db2js_svcpumpdta.Config
r"^ *([cdlbrwxsS-]+) +([0-9]+) +([^ ]+) +([^ ]+) +([0-9]+), +([^ ]+) +([0-9]+) +([0-9:]+) +(.+)",
[ 'mode', 'inodes', 'user', 'group', 'size', 'month', 'day', 'year_time', 'path'],
[ 'mode', 'user', 'group', 'path'],
[ 'path'],
)

View file

@ -0,0 +1,31 @@
# -*- coding: utf-8 -*-
import asyncio
from jwutils.log import *
from devtest.os import TestCase, misc
from devtest.os.test import *
class GrepLog(TestCase): # export
def __init__(self, regex, log_glob=None, act_timeout=2, total_timeout=None):
self.regex = regex
self.log_glob = log_glob if log_glob is not None else '/var/log/messages'
self.act_timeout = act_timeout
self.total_timeout = total_timeout
async def _run(self, env, machine, phase):
console = get_console(env)
rx_list = self.regex if type(self.regex) == list else [ self.regex ]
for rx in rx_list:
if self.log_glob == 'journal':
ret = await misc.cmd_exec(console, 'journalctl -b | grep "{}"'.format(rx),
act_timeout=self.act_timeout, total_timeout=self.total_timeout)
else:
ret = await grep_log(console, rx, log_glob=self.log_glob,
act_timeout=self.act_timeout, total_timeout=self.total_timeout)
if ret is None:
return "{}: failed to grep for regex >{}<".format(self.log_glob, rx)
if len(ret) == 0:
return "{}: no match for regex >{}<".format(self.log_glob, rx)
slog_m(INFO, 'found match "{}"'.format('\n'.join(ret)))
return None

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from .GrepLog import GrepLog
class LedUtils(GrepLog): # export
def __init__(self, regex = None, log_glob=None, act_timeout=2, total_timeout=None):
if regex is None:
regex = 'Successfully turned off LED after boot'
super().__init__(regex=regex, log_glob=log_glob, act_timeout=act_timeout, total_timeout=total_timeout)

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from .GrepLog import GrepLog
class PostStartup(GrepLog): # export
def __init__(self, regex = None, log_glob=None, act_timeout=2, total_timeout=None):
if regex is None:
regex = 'done with post-startup start'
super().__init__(regex=regex, log_glob=log_glob, act_timeout=act_timeout, total_timeout=total_timeout)

View file

@ -0,0 +1,17 @@
# -*- coding: utf-8 -*-
from ..ListCmd import ListCmd
class MountList(ListCmd): # export
# Override as needed.
def _row_info(self):
return (
#r"/bin/cat /proc/mounts | sed 's/,*size=[0-9]\+[a-z]*//g; s/,*nr_inodes=[0-9]\+//g; s/,,*/,/g'",
r"/bin/cat /proc/mounts",
# proc /proc proc rw,relatime 0 0
"^ *([a-zA-Z0-9/]+) (/[a-zA-Z0-9/]+) +(\S+) +(\S+) +([0-9]+) +([0-9]+)",
[ 'dev', 'mp', 'type','opts','dump', 'fsck'],
[ 'dev', 'mp', 'type','opts' ],
[ 'mp', ]
)

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
from ..ListCmd import ListCmd
class ProcNetTcp(ListCmd): # export
# Override as needed.
def _row_info(self):
return (
r"/bin/cat /proc/net/tcp",
# 0: 00000000:0050 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 614 1 ce0a3580 300 0 0 2 -1"
# 0: 00000000:0050 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 614 1 ce0a3580 300 0 0 2 -1
# sl local_address rem_address st tx_queue rx_queue tr:tm->when retrnsmt uid timeout inode rest
"^ *([0-9]+): ([0-9A-F]{8}):([0-9A-F]{4}) ([0-9A-F]{8}):([0-9A-F]{4}) (0A) ([0-9A-F]{8}):([0-9A-F]{8}) ([0-9A-F]{2}:[0-9A-F]{8}) ([0-9A-F]{8}) +([0-9]+) +([0-9]+) +([0-9]+) +(.*)",
[ 'sl', 'local_address','local_port','rem_address','rem_port','st','tx_qeue', 'rx_queue', 'tr_tm_when', 'retrnsmt', 'uid', 'timeout','inode', 'rest' ],
[ 'local_address','local_port' ],
[ 'local_port' ]
)

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
from ..ListCmd import ListCmd
class ProcSysvipcShm(ListCmd): # export
# Override as needed.
def _row_info(self):
return (
r"/bin/cat /proc/sysvipc/shm",
# key shmid perms size cpid lpid nattch uid gid cuid cgid atime dtime ctime rss swap
# 0 20 777 8294400 2575 3440 2 824 207 824 207 1589785156 1589785156 1589785149 8294400 0
# key shmid perms size cpid lpid nattch uid gid cuid cgid atime dtime ctime rss swap
# 0 20 777 8294400 2575 3440 2 824 207 824 207 1589785156 1589785156 1589785149 8294400 0
r"^ *([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+)",
[ 'key', 'shmid', 'perms', 'size', 'cpid', 'lpid', 'nattch', 'uid', 'gid', 'cuid', 'cgid', 'atime', 'dtime', 'ctime'],
[ 'perms', 'size' ]
)

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
from ..ListCmd import ListCmd
class ProcessList(ListCmd): # export
# Override as needed.
def _row_info(self):
return (
"/bin/ps",
# PID USER VSZ STAT COMMAND
# 38 root 0 SW [kjournald]
# 40 root 2300 S {rc.start} /bin/sh /etc/rc.start
# 40 root 2300 S {rc.start} /bin/sh /etc/rc.start
"^ *([0-9]+) +([a-z_][a-z0-9_-]*) +([0-9]+) +([A-Z]+) +(.+)",
[ 'pid', 'user', 'size', 'stat', 'cmd'],
[ 'user', 'cmd']
)

View file

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
import asyncio
from jwutils.log import *
from devtest.os import TestCase
from devtest.os.test import *
from devtest.os.misc import *
class RegenSSL(TestCase): # export
def __init__(self, rcpath=None, act_timeout=2, total_timeout=None):
self.rcpath = rcpath if rcpath is not None else '/usr/sbin/rccomsystem-ssl'
self.act_timeout = act_timeout
self.total_timeout = total_timeout
async def _run(self, env, machine, phase):
console = get_console(env)
pattern = 'FAIL'
cmd = '{} regenerate || echo {}\n'.format(self.rcpath, pattern)
ret = await cmd_exec(console, cmd, log_act=INFO,
act_timeout=self.act_timeout, total_timeout=self.total_timeout)
if ret and len(ret) > 1 and ret[-1].find(pattern) != -1:
return "failed to regenerate SSL keys"
return None

View file

@ -0,0 +1,87 @@
# -*- coding: utf-8 -*-
import asyncio
from ..TcScript import TcScript
_script = """#!/bin/sh
_cat() {
sed 's/^[ ]*|//'
}
config() {
_cat <<-EOT
|[global]
|blah = blub
|thing = thang
|special = ^+=234 ewep
|
|[not.so.global]
| # comment = nothing
|another = "value"
|
|[complete_section]
|give me text
|# this is a comment
|
|another = value after empty line
EOT
}
reference() {
_cat <<-EOT
|global.blah|blub|0
|global.dings|bums|0
|global.special|^+=234 ewep|0
|not.so.global||1
|not.so.global.||1
|not.so.global.comment||1
|not.so.global.nothing||1
|not.so.global.another|value|0
|complete_section||1
|complete_section.another|value after empty line|0
EOT
}
run_tests() {
pass() {
echo "PASS: $@"
}
fail() {
echo "FAIL: $@"
}
local tkey tval tret label val
IFS="|"
reference | while read tkey tval tret; do
label="test '$tkey' = '$tval' -> $tret"
val=`sc_config -f $conf -e "$tkey"`
[ $? = "$tret" ] || {
fail "wrong return code for $label, expected $tret"
continue
}
[ "$val" = "$tval" ] || {
fail "wrong value '$val' for $label, expected '$tval'"
continue
}
pass $label
done | tee /dev/stderr | grep -q FAIL && return 1
return 0
}
. /usr/sbin/scutils
conf=/tmp/test.conf
config > $conf
run_tests 2>&1
r=$?
rm -f $conf
exit $r
"""
class ScUtilsIni(TcScript): # export
def __init__(self):
super().__init__(script=_script, show_files=True)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
from .GrepLog import GrepLog
class WifiLed(GrepLog): # export
def __init__(self, regex = None, log_glob=None, act_timeout=2, total_timeout=None):
if regex is None:
regex = "done, successfully started wifi-led application"
super().__init__(regex=regex, log_glob=log_glob, act_timeout=act_timeout, total_timeout=total_timeout)

View file

@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
import re
import asyncio
from jwutils.log import *
class ListCmd: # export
class Row:
def __init__(self, **fields):
self.__dict__.update(fields)
def __init__(self, cmd, regex, fields):
self.cmd = cmd
self.regex = re.compile(regex)
self.fields = fields
def parse(self, lines, row_class = Row):
def parse_line(line):
slog(DEBUG, "parsing line >%s<" % line)
match = self.regex.search(line)
if not match:
return None
for k in range(0, len(self.fields)):
slog(DEBUG, " match {} = >{}<".format(k+1, match.group(k+1)))
return row_class(line=line, **{self.fields[k]: match.group(k+1)
for k in range(0, len(self.fields))})
return list(filter(None, [parse_line(l) for l in lines]))
# untested
async def run(self, console, act_timeout=None, total_timeout=None):
return self.parse(await cmd_exec( console, self.cmd + ' # ignore me\n',
act_timeout=act_timeout, total_timeout=total_timeout))

View file

@ -0,0 +1,4 @@
TOPDIR = ../../../../../..
include $(TOPDIR)/make/proj.mk
include $(JWBDIR)/make/py-mod.mk

View file

@ -0,0 +1,147 @@
# -*- coding: utf-8 -*-
import asyncio
import time
import re
from jwutils.log import *
from ..misc import *
from ..Connection import Connection
from ..Connections import Connections
async def expect(conn, regex=None, subject=None, act_timeout=0.1, total_timeout=None): # export
# regex is a keyword-argument so that other arguments to support other
# checks can be added later
if regex is None:
raise Exception("passed empty pattern to test.expect()")
deadline = None
if total_timeout is not None:
deadline = time.time() + total_timeout
if subject == None:
subject = ">" + regex + "<"
while True:
flags = Connection.Flags.ReadStripNewline|Connection.Flags.ReadDecodeToString
timeout = act_timeout
now = time.time()
if deadline is not None:
timeout = min(deadline - now, timeout)
buf = await conn.read(timeout, flags=flags)
if buf is None:
if deadline is not None and now > deadline:
slog(INFO, "reading from console timed out after %d seconds total" % total_timeout)
else:
slog(INFO, "reading from console timed out after %d seconds of inactivity" % act_timeout)
return None
slog(INFO, "read from console: >%s<" % buf, only_printable=True)
if len(buf) == 0:
raise Exception('connection "{}" reset while waiting on {}'.format(conn, subject))
if re.search(regex, buf):
slog(NOTICE, "found %s" % subject)
return buf
raise Exception("never reached")
async def cmd_expect(conn, cmd, subject=None, regex=None, act_timeout=0.1): # export
slog(NOTICE, 'sending command "{}"'.format(str(cmd).rstrip("\n")))
res = await cmd_exec(conn, cmd)
if not isinstance(res, list):
msg = 'failed to send command over connection {}'.format(conn)
slog(ERR, 'FAIL: ' + msg)
return msg
if expect(conn, regex=regex, subject=subject, act_timeout=act_timeout):
slog(NOTICE, 'PASS: found pattern "{}" in command output'.format(regex))
return None
r = "FAIL: failed to find {} in command output".format(regex)
slog(ERR, r)
return r
async def grep_log(conn, regex, log_glob='/var/log/messages', act_timeout=0.1, total_timeout=None): # export
cmd = "ls {} | tail -1".format(log_glob)
res = await cmd_exec(conn, cmd, act_timeout=act_timeout, total_timeout=total_timeout)
if res is None or len(res) < 2:
raise Exception("failed to get log file {}".format(log_glob))
log = res[1]
slog(DEBUG, 'found log file path "{}"'.format(log))
cmd = 'grep "{}" {}'.format(regex, log)
res = await cmd_exec(conn, cmd, act_timeout=act_timeout, total_timeout=total_timeout)
if res is None or len(res) < 2:
raise Exception("{} failed".format(cmd))
r = []
# remove everything up to and including "grep .* log" from array, assume last line is a prompt
for line in reversed(res[:-1]):
if line.find(log) != -1:
return r
r.insert(0, line)
for line in res:
slog(ERR, "read >{}<".format(line))
raise Exception("{} returned garbage".format(cmd))
def diff_iterables(reference, actual, ignore = None, unneeded = None, fail_unknown = False, fail_missing = True): # export
reference = set(reference)
actual = set(actual)
needed = reference
if ignore is not None:
ignore = set(ignore)
needed -= ignore
actual -= ignore
if unneeded is not None:
unneeded = set(unneeded)
needed -= unneeded
both = needed | actual
only_n = needed - actual
only_a = actual - needed
needed = sorted(list(needed))
actual = sorted(list(actual))
only_n = sorted(list(only_n))
only_a = sorted(list(only_a))
slog_m(NOTICE, "--- needed:\n", sorted(list(needed)))
if ignore is not None:
slog_m(NOTICE, "--- ignored:\n", sorted(list(ignore)))
slog_m(NOTICE, "--- actual:\n", sorted(list(actual)))
r = []
if fail_missing and len(only_n):
slog_m(ERR, "--- should be there but are not:\n", only_n)
r.append("missing (" + ', '.join(only_n) + ')')
if fail_unknown and len(only_a):
slog_m(ERR, "--- too many:\n", only_a)
r.append("too many (" + ', '.join(only_a) + ')')
if not len(r):
return None
return ' and '.join(r)
def get_console(env, name=None, require=True): # export
# need to copy this because otherwise the iterator is not reset
connections = Connections(env.connections)
for c in connections:
if name is not None and hasattr(c.info, 'name') and c.info.name == name:
return c.instance
if c.info.proto == Connection.Proto.Console:
return c.instance
# FIXME: This whole proto == Console thing is BS: "Console" is not a protocol
if c.info.proto == Connection.Proto.Serial:
return c.instance
if require:
raise Exception("no console connection among connections: {}".format(', '.join(
[ '{}'.format(c.instance) for c in env.connections ])))
return None
def get_connection(env, name, require=True): # export
# need to copy this because otherwise the iterator is not reset
connections = Connections(env.connections)
for c in connections:
if hasattr(c.info, 'name') and c.info.name == name:
return c.instance
if require:
raise Exception('no connection named "{}" among connections: {}'.format(
name, ', '.join([ '{}'.format(c.instance) for c in env.connections ])))
return None
# untested
def get_connections(env, proto=None, require=True): # export
r = env.connections if proto is None else [c.instance for c in env.connections if c.info.proto == proto]
if require and len(r) == 0:
raise Exception("No connection of %s type" % proto.name if proto else "any")
return r