mirror of
ssh://git.janware.com/srv/git/janware/proj/jw-devtest
synced 2026-01-15 02:22:56 +01:00
Add test.cases.Audit
Signed-off-by: Jan Lindemann <jan@janware.com>
This commit is contained in:
parent
dc40decb96
commit
ca3b597341
1 changed files with 330 additions and 0 deletions
330
src/python/devtest/os/test/cases/Audit.py
Normal file
330
src/python/devtest/os/test/cases/Audit.py
Normal file
|
|
@ -0,0 +1,330 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from collections import OrderedDict
|
||||||
|
import copy
|
||||||
|
from jwutils import multi_regex_edit, Options
|
||||||
|
from jwutils.log import *
|
||||||
|
from ..ListCmd import ListCmd
|
||||||
|
|
||||||
|
class Audit(ListCmd): # export
|
||||||
|
|
||||||
|
class Row(ListCmd.Row):
|
||||||
|
|
||||||
|
def __init__(self, parent, line, fields, attribs, types):
|
||||||
|
super().__init__(parent, line, fields, attribs)
|
||||||
|
self.__types = types
|
||||||
|
#assert(len(self.__types) < 7)
|
||||||
|
|
||||||
|
# override this, because some rows don't have all required keys
|
||||||
|
def to_str(self, only_values=False, quotes=None, fields=['fields']):
|
||||||
|
use_fields = None
|
||||||
|
if fields is not None:
|
||||||
|
for f_set_name in fields:
|
||||||
|
use_fields = self.parent.row_info(f_set_name, throw=False)
|
||||||
|
if use_fields is not None:
|
||||||
|
break
|
||||||
|
#if use_fields is None:
|
||||||
|
# raise Exception("None of the fields wanted for formatting are available: {}".format(fields))
|
||||||
|
qq = '"' if quotes == True else ('' if quotes is None and only_values else '')
|
||||||
|
parts = []
|
||||||
|
#slog(WARNING, "use_fields={}".format(use_fields))
|
||||||
|
#slog(WARNING, "keys={}".format(self.__fields.keys()))
|
||||||
|
for key, val in self.__fields.items():
|
||||||
|
if use_fields is not None and key not in use_fields:
|
||||||
|
continue
|
||||||
|
q = qq if isinstance(val, str) else ''
|
||||||
|
if only_values:
|
||||||
|
parts.append('{}{}{}'.format(q, self.field(key), q))
|
||||||
|
else:
|
||||||
|
parts.append('{}{}{}: {}{}{}'.format(q, key, q, q, self.__fields[key], q))
|
||||||
|
if not len(parts):
|
||||||
|
self.dump(WARNING, "Returning empty string for row")
|
||||||
|
raise Exception("Here I am")
|
||||||
|
return ''
|
||||||
|
r = ', '.join(parts)
|
||||||
|
if self.__attribs is not None and len(self.__attribs) > 0:
|
||||||
|
r += " # " + str(self.__attribs)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def persist_keys(self):
|
||||||
|
#return self.field_keys()
|
||||||
|
ret = copy.deepcopy(self.parent.row_info('cmp-fields'))
|
||||||
|
try:
|
||||||
|
ret.remove('exe')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
t = self['types']
|
||||||
|
if 'ANOM_ABEND' in t:
|
||||||
|
ret.append('exe')
|
||||||
|
return ret
|
||||||
|
if 'SYSCALL' in t:
|
||||||
|
if 'ANOM_PROMISCUOUS' in t:
|
||||||
|
return ret
|
||||||
|
sc = self['syscall']
|
||||||
|
if 'name' in self.field_keys():
|
||||||
|
if sc in ['fchown32', 'fchown', 'chown', 'chown32']:
|
||||||
|
return ['types', 'proctitle', 'syscall', 'name', 'a0', 'a1', 'a2', 'exit', 'euid']
|
||||||
|
if sc in ['chmod', 'chmod32' 'fchmod32', 'fchmod']:
|
||||||
|
return ['types', 'proctitle', 'syscall', 'name', 'a0', 'a1', 'a2', 'exit', 'euid']
|
||||||
|
if sc in ['setsockopt']: # compensate missing proctitle field
|
||||||
|
ret.append('exe')
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if kwargs and 'filter_retvals' in kwargs:
|
||||||
|
filter_retvals = kwargs['filter_retvals']
|
||||||
|
self.__multi_regex = [
|
||||||
|
('match', r'type=SYSCALL'),
|
||||||
|
('sub', r'0x[0-9a-f]+', '0x<ptr>'),
|
||||||
|
('sub', 'type=SYSCALL msg=audit\([^)+]\) *:', '')
|
||||||
|
]
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __parse_ausearch(self, output):
|
||||||
|
def parse_line(line, fields=None):
|
||||||
|
#slog_m(WARNING, " parsing line{}".format(line))
|
||||||
|
# type=OBJ_PID msg=audit(01/01/70 00:08:00.650:511) : opid= ...
|
||||||
|
match = re.search(line_regex, line)
|
||||||
|
if match is None:
|
||||||
|
slog(INFO, "Ignoring unparseable line >%s<" % line)
|
||||||
|
return None
|
||||||
|
typ = match.group(1)
|
||||||
|
rhs = match.group(3).split('# ')
|
||||||
|
payload = re.sub(exit_regex, r'exit=\1', rhs[0].strip())
|
||||||
|
#slog_m(WARNING, " payload={}".format(payload))
|
||||||
|
fields = dict(map(str.strip, sub.split('=', 1)) for sub in payload.split(' ') if '=' in sub)
|
||||||
|
#slog_m(WARNING, " fields={}".format(fields))
|
||||||
|
if 'nametype' in fields.keys() and fields['nametype'] == 'PARENT': # there's going to be a line with 'CREATE', too
|
||||||
|
fields['parent-name'] = fields['name']
|
||||||
|
del fields['name']
|
||||||
|
attribs = None if len(rhs) <= 1 else Options(rhs[1])
|
||||||
|
return (typ, fields, attribs)
|
||||||
|
#slog(WARNING, "parsing ausearch")
|
||||||
|
#output = multi_regex_edit(self.__multi_regex, output)
|
||||||
|
output = re.split(r'^\s*----*\s*', '\n'.join(output), flags=re.MULTILINE)
|
||||||
|
line_regex = re.compile(r'type=([^)]+) msg=audit\(([^]]+)\) *: *(.*)')
|
||||||
|
exit_regex = re.compile(r'exit=([A-Z]+)\([^)]+\)')
|
||||||
|
records = []
|
||||||
|
rows = set()
|
||||||
|
for chunk in output:
|
||||||
|
r_types = []
|
||||||
|
r_fields = OrderedDict()
|
||||||
|
r_attribs = Options()
|
||||||
|
#slog_m(WARNING, "\n========= parsing chunk\n{}".format(chunk))
|
||||||
|
for l in chunk.splitlines():
|
||||||
|
parsed = parse_line(l.strip())
|
||||||
|
if parsed is None:
|
||||||
|
continue
|
||||||
|
(typ, fields, attribs) = parsed
|
||||||
|
r_fields.update(fields)
|
||||||
|
r_types.append(typ)
|
||||||
|
if attribs is not None:
|
||||||
|
r_attribs.update(attribs)
|
||||||
|
if len(r_fields):
|
||||||
|
if 'parent-name' in r_fields.keys() and not 'name' in r_fields.keys():
|
||||||
|
r_fields['name'] = r_fields['parent-name']
|
||||||
|
record = dict()
|
||||||
|
r_fields['types'] = r_types
|
||||||
|
if len(r_types) >= 7:
|
||||||
|
slog(WARNING, "r_types={}".format(r_types))
|
||||||
|
#assert(len(r_types) < 7)
|
||||||
|
r_fields.move_to_end('types', False)
|
||||||
|
record['types'] = r_types
|
||||||
|
record['fields'] = r_fields
|
||||||
|
record['attribs'] = r_attribs if len(r_attribs) else None
|
||||||
|
#slog(WARNING, "record={}".format(record))
|
||||||
|
records.append(record)
|
||||||
|
row = self.Row(self, line='too long', fields=r_fields, attribs=r_attribs, types=r_types)
|
||||||
|
#slog(WARNING, "row = {}".format(row.to_str(only_values=False, quotes=True, fields=None)))
|
||||||
|
#row.dump(WARNING)
|
||||||
|
rows.add(row)
|
||||||
|
#slog_m(WARNING, "records = {}".format(records))
|
||||||
|
#return records
|
||||||
|
return sorted(rows)
|
||||||
|
|
||||||
|
def __parse_json(self, spec):
|
||||||
|
spec = spec.strip()
|
||||||
|
if len(spec) < 3:
|
||||||
|
return None
|
||||||
|
if not spec[0] in ['{', '[']:
|
||||||
|
spec = '[' + spec + ']'
|
||||||
|
try:
|
||||||
|
return json.loads(spec, object_pairs_hook=OrderedDict)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __parse_serialized(self, output):
|
||||||
|
def parse_line(line, fields=None):
|
||||||
|
line = line.strip()
|
||||||
|
rhs = line.split('# ')
|
||||||
|
payload = rhs[0].strip()
|
||||||
|
#slog(WARNING, "parsing line={}".format(payload))
|
||||||
|
fields = self.__parse_json(payload)
|
||||||
|
if fields is None:
|
||||||
|
return None
|
||||||
|
attribs = None if len(rhs) <= 1 else Options(rhs[1])
|
||||||
|
#if len(fields['types']) >= 6:
|
||||||
|
#slog(WARNING, "types={} type = {}".format(fields['types'], type(fields['types'])))
|
||||||
|
#assert(len(fields['types']) < 7)
|
||||||
|
return (fields['types'], fields, attribs)
|
||||||
|
records = []
|
||||||
|
rows = set()
|
||||||
|
for chunk in output:
|
||||||
|
parsed = parse_line(chunk)
|
||||||
|
if parsed is None:
|
||||||
|
continue
|
||||||
|
r_types, r_fields, r_attribs = parsed
|
||||||
|
row = self.Row(self, line=chunk, fields=r_fields, attribs=r_attribs, types=r_types)
|
||||||
|
rows.add(row)
|
||||||
|
records.append({
|
||||||
|
'types': r_types,
|
||||||
|
'fields': r_fields,
|
||||||
|
'attribs': r_attribs
|
||||||
|
})
|
||||||
|
#return records
|
||||||
|
return sorted(rows)
|
||||||
|
|
||||||
|
def __serialize_records(self, records):
|
||||||
|
ret = []
|
||||||
|
def line(record):
|
||||||
|
keys = ['proctitle', 'name', 'exit']
|
||||||
|
fields = record['fields']
|
||||||
|
pairs = []
|
||||||
|
for key in keys:
|
||||||
|
if key in fields.keys():
|
||||||
|
pairs.append( '{}="{}"'.format(key, fields[key]))
|
||||||
|
if not len(pairs):
|
||||||
|
return None
|
||||||
|
ret = ', '.join(pairs)
|
||||||
|
#ret = 'exe="{}", name={}, exit={}'.format(record['exe'], record['name'], record['exit'])
|
||||||
|
attribs = record['attribs']
|
||||||
|
if attribs is not None and len(attribs):
|
||||||
|
ret += ' # ' + str(attribs)
|
||||||
|
return ret
|
||||||
|
for record in records:
|
||||||
|
string = line(record)
|
||||||
|
if string is not None:
|
||||||
|
ret.append(string)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def __serialize_rows(self, rows, keys=None):
|
||||||
|
ret = []
|
||||||
|
def line(row, keys):
|
||||||
|
#return row.to_str(only_values=False, quotes=True, fields=None)
|
||||||
|
subset = lambda row, keys: [(key, row[key]) for key in row.fields.keys() if key in set(keys)]
|
||||||
|
if keys is None:
|
||||||
|
keys = row.persist_keys()
|
||||||
|
pairs = subset(row, keys)
|
||||||
|
if not pairs:
|
||||||
|
return None
|
||||||
|
ret = json.dumps(OrderedDict(pairs))
|
||||||
|
#slog(WARNING, "json.dumps({}) = {}".format(pairs, ret))
|
||||||
|
if row.attribs is not None and len(row.attribs):
|
||||||
|
ret += ' # ' + json.dumps(row.attribs)
|
||||||
|
return ret
|
||||||
|
for row in rows:
|
||||||
|
string = line(row, keys)
|
||||||
|
if string is not None:
|
||||||
|
ret.append(string)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _row_name(self, row):
|
||||||
|
def to_xid(s):
|
||||||
|
try:
|
||||||
|
id = int(s, 0)
|
||||||
|
if id == 0xffffffff:
|
||||||
|
return -1
|
||||||
|
return id
|
||||||
|
except:
|
||||||
|
return s
|
||||||
|
def to_octal(s):
|
||||||
|
try:
|
||||||
|
id = int(s, 0)
|
||||||
|
return id
|
||||||
|
except:
|
||||||
|
return s
|
||||||
|
def exe(row):
|
||||||
|
cmd = row['proctitle'] if 'proctitle' in row.field_keys() else row['exe']
|
||||||
|
euid = row['euid'] if 'euid' in row.field_keys() else 'unknown'
|
||||||
|
return '[{}@{}]'.format(euid, os.path.basename(cmd))
|
||||||
|
|
||||||
|
t = row['types']
|
||||||
|
if 'ANOM_ABEND' in t:
|
||||||
|
return 'ANOM_ABEND ' + exe(row)
|
||||||
|
if 'SYSCALL' in t:
|
||||||
|
sc = row['syscall']
|
||||||
|
pt = exe(row)
|
||||||
|
if 'ANOM_PROMISCUOUS' in t:
|
||||||
|
return 'SYSCALL ' + exe(row) + ' ' + sc + '(promiscuous mode)' + ' -> ' + row['exit']
|
||||||
|
if 'name' in row.field_keys():
|
||||||
|
if sc in ['fchown32', 'fchown']:
|
||||||
|
owner = to_xid(row['a1'])
|
||||||
|
group = to_xid(row['a2'])
|
||||||
|
return 'SYSCALL {} {}(fd, {}, {}) -> {}'.format(pt, sc, owner, group, row['exit'])
|
||||||
|
if sc in ['chown32', 'chown']:
|
||||||
|
owner = to_xid(row['a1'])
|
||||||
|
group = to_xid(row['a2'])
|
||||||
|
return 'SYSCALL {} {}({}, {}, {}) -> {}'.format(pt, sc, row['name'], owner, group, row['exit'])
|
||||||
|
if sc in ['fchmod32', 'fchmod']:
|
||||||
|
mode = to_octal(row['a1'])
|
||||||
|
name = 'fd'
|
||||||
|
if name in row.field_keys():
|
||||||
|
name = row['name']
|
||||||
|
return 'SYSCALL {} {}({}, {}) -> {}'.format(pt, sc, name, mode, row['exit'])
|
||||||
|
if sc in ['chmod32', 'chmod']:
|
||||||
|
mode = to_octal(row['a1'])
|
||||||
|
return 'SYSCALL {} {}({}, {}) -> {}'.format(pt, sc, row['name'], mode, row['exit'])
|
||||||
|
return 'SYSCALL ' + pt + ' ' + sc + '(' + row['name'] + ')' + ' -> ' + row['exit']
|
||||||
|
return super()._row_name(row)
|
||||||
|
|
||||||
|
def _filter(self, output):
|
||||||
|
if '----\n' in output or '----' in output:
|
||||||
|
rows = self.__parse_ausearch(output)
|
||||||
|
#slog(WARNING, "Done parsing ausearch output")
|
||||||
|
else:
|
||||||
|
rows = self.__parse_serialized(output)
|
||||||
|
#slog(WARNING, "len(rows) = {}".format(len(rows)))
|
||||||
|
#slog(WARNING, "rows={}".format(rows))
|
||||||
|
oldrow = None
|
||||||
|
for row in set(rows):
|
||||||
|
#slog(WARNING, "row = {}".format(row))
|
||||||
|
#slog(WARNING, "row = {}".format(row.to_str(only_values=False, quotes=True, fields=None)))
|
||||||
|
if oldrow is not None:
|
||||||
|
slog(WARNING, "{}greater than last, {}smaller than last, {} equal to last".format(
|
||||||
|
'' if row > oldrow else 'Not ',
|
||||||
|
'' if oldrow > row else 'not ',
|
||||||
|
'' if oldrow == row else 'not ',
|
||||||
|
))
|
||||||
|
if not row > oldrow and not oldrow > row and not oldrow == row:
|
||||||
|
row.dump(WARNING, "Broken row")
|
||||||
|
oldrow.dump(WARNING, "Broken new")
|
||||||
|
raise Exception("Broken equality")
|
||||||
|
#oldrow = row
|
||||||
|
assert(type(row['types'] == list))
|
||||||
|
#raise Exception("Here I am")
|
||||||
|
#keys = ['proctitle', 'name', 'exit']
|
||||||
|
#slog(WARNING, "row = {}".format(self.__serialize_rows(rows, keys=keys)))
|
||||||
|
return self.__serialize_rows(rows) # , keys=self.row_info('name-fields'))
|
||||||
|
|
||||||
|
def parse(self, lines):
|
||||||
|
#slog(WARNING, "-- parsing")
|
||||||
|
if '----\n' in lines or '----' in lines:
|
||||||
|
return self.__parse_ausearch(lines)
|
||||||
|
return self.__parse_serialized(lines)
|
||||||
|
|
||||||
|
def _row_info(self):
|
||||||
|
return (
|
||||||
|
r"ausearch --interpret",
|
||||||
|
# type=SYSCALL msg=audit(01/01/70 00:06:03.120:29) : \
|
||||||
|
# arch=armeb syscall=openat per=PER_LINUX success=no exit=EACCES(Permission denied)
|
||||||
|
# a0=0xffffff9c a1=0xb6ed6864 a2=O_WRONLY a3=0x0 items=1 ppid=1 pid=1879 auid=unset
|
||||||
|
# uid=ethernetip gid=ethernetip euid=ethernetip suid=ethernetip fsuid=ethernetip egid=ethernetip
|
||||||
|
# sgid=ethernetip fsgid=ethernetip tty=(none) ses=unset comm=ethernetip exe=/usr/bin/ethernetip key=access
|
||||||
|
None, # regex
|
||||||
|
['types', 'proctitle', 'exe', 'syscall', 'name', 'arch', 'per', 'success', 'exit', 'a0', 'a1', 'a2', 'a3', 'items', 'ppid', 'pid', 'auid', 'uid', 'gid', 'euid', 'suid', 'fsuid', 'egid', 'sgid', 'fsgid', 'tty', 'ses', 'comm', 'key'],
|
||||||
|
['types', 'proctitle', 'exe', 'syscall', 'name', 'euid', 'exit'],
|
||||||
|
['types', 'proctitle', 'exe', 'syscall', 'name', 'euid', 'exit'],
|
||||||
|
)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue