mirror of
ssh://git.janware.com/srv/git/janware/proj/jw-pkg
synced 2026-01-15 12:03:31 +01:00
If P1 build-needs P2, make it run-need P2, too, since with the current dependency resolution algorithm, this also adds everything needed to _run_ P2. Which is the only thing this commit is after. It actually does too much, and enables P1 to run, too, at least WRT P2. But that's the easiest way to resolve the problem for now. Signed-off-by: Jan Lindemann <jan@janware.com>
932 lines
37 KiB
Python
932 lines
37 KiB
Python
#!/usr/bin/python3 -u
|
|
# -*- coding: utf-8 -*-
|
|
#
|
|
# This source code file is a merge of various build tools and a horrible mess.
|
|
#
|
|
|
|
from __future__ import print_function
|
|
import os
|
|
import sys
|
|
import argparse
|
|
import pwd
|
|
import time
|
|
from os.path import isfile
|
|
from os.path import isdir
|
|
from os.path import expanduser
|
|
from os.path import basename
|
|
from os.path import realpath
|
|
import subprocess
|
|
import re
|
|
import platform
|
|
import datetime
|
|
|
|
# meaning of pkg.requires.xxx variables
|
|
# build: needs to be built and installed before this can be built
|
|
# devel: needs to be installed before this-devel can be installed, i.e. before _other_ packages can be built against this
|
|
# run: needs to be installed before this-run can be installed, i.e. before this and other packages can run with this
|
|
|
|
# --------------------------------------------------------------------- Python 2 / 3 compatibility stuff
|
|
try:
|
|
basestring
|
|
except NameError:
|
|
basestring = str
|
|
|
|
# --------------------------------------------------------------------- helpers
|
|
|
|
class ResultCache(object):
|
|
|
|
def __init__(self):
|
|
self.__cache = {}
|
|
|
|
def run(self, func, args):
|
|
d = self.__cache
|
|
depth = 0
|
|
keys = [ func.__name__ ] + args
|
|
l = len(keys)
|
|
for k in keys:
|
|
if k is None:
|
|
k = 'None'
|
|
else:
|
|
k = str(k)
|
|
depth += 1
|
|
#self.projects.debug('depth = ', depth, 'key = ', k, 'd = ', str(d))
|
|
if k in d:
|
|
if l == depth:
|
|
return d[k]
|
|
d = d[k]
|
|
continue
|
|
if l == depth:
|
|
r = func(*args)
|
|
d[k] = r
|
|
return r
|
|
d = d[k] = {}
|
|
#d = d[k]
|
|
raise Exception("cache algorithm failed for function", func.__name__, "in depth", depth)
|
|
|
|
# ----------------------------------------------------------------- class Projects
|
|
|
|
class Projects(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.global_args = []
|
|
self.opt_os = None
|
|
self.top_name = None
|
|
self.glob_os_cascade = None
|
|
|
|
self.dep_cache = {}
|
|
self.my_dir = os.path.dirname(os.path.realpath(__file__))
|
|
self.opt_debug = False
|
|
self.res_cache = ResultCache()
|
|
self.topdir = None
|
|
self.projs_root = expanduser("~") + '/local/src/jw.dev/proj'
|
|
|
|
def debug(self, *objs):
|
|
if self.opt_debug:
|
|
print("DEBUG: ", *objs, file = sys.stderr)
|
|
|
|
def err(self, *objs):
|
|
print("ERR: ", *objs, file = sys.stderr)
|
|
|
|
def find_proj_path_unused(name):
|
|
name = name.replace("dspider-", "")
|
|
search_path = [".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ]
|
|
for sub in search_path:
|
|
path = self.projs_root + "/" + sub + "/" + name
|
|
if os.path.exists(path):
|
|
return os.path.abspath(path)
|
|
raise Exception("module " + name + " not found below " + self.projs_root)
|
|
|
|
def find_proj_path_cached_unused(name):
|
|
return self.res_cache.run(find_proj_path_unused, [ name ])
|
|
|
|
def proj_dir(self, name):
|
|
if name == self.top_name:
|
|
return self.topdir
|
|
return self.projs_root + '/' + name
|
|
|
|
def re_section(self, name):
|
|
return re.compile('[' + name + ']'
|
|
'.*?'
|
|
'(?=[)',
|
|
re.DOTALL)
|
|
|
|
def remove_duplicates(self, seq):
|
|
seen = set()
|
|
seen_add = seen.add
|
|
return [x for x in seq if not (x in seen or seen_add(x))]
|
|
|
|
def get_os(self, args = ""):
|
|
for d in [ self.projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]:
|
|
script = d + '/get-os.sh'
|
|
if isfile(script):
|
|
cmd = '/bin/bash ' + script
|
|
if args:
|
|
cmd = cmd + ' ' + args
|
|
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
(out, rr) = p.communicate()
|
|
if rr:
|
|
self.err("failed to run ", cmd)
|
|
continue
|
|
out = re.sub('\n', '', out.decode('utf-8'))
|
|
return out
|
|
return "linux"
|
|
|
|
# TODO: add support for customizing this in project.conf
|
|
def htdocs_dir(self, name):
|
|
pd = self.proj_dir(name)
|
|
for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name ]:
|
|
if isdir(r):
|
|
return r
|
|
return None
|
|
|
|
def os_cascade(self):
|
|
if self.glob_os_cascade is not None:
|
|
return self.glob_os_cascade.copy()
|
|
r = [ 'os', platform.system().lower() ]
|
|
os = self.opt_os if self.opt_os is not None else self.res_cache.run(self.get_os, [])
|
|
name = re.sub('-.*', '', os)
|
|
series = os
|
|
while True:
|
|
n = re.sub('\.[0-9]+$', '', series)
|
|
if n == series:
|
|
break
|
|
r.append(n)
|
|
series = n
|
|
if not name in r:
|
|
r.append(name)
|
|
if not os in r:
|
|
r.append(os)
|
|
# e.g. os, linux, suse, suse-tumbleweed
|
|
#return [ 'os', platform.system().lower(), name, os ]
|
|
self.glob_os_cascade = r
|
|
return r
|
|
|
|
def strip_module_from_spec(self, mod):
|
|
return re.sub(r'-dev$|-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip())
|
|
|
|
def get_section(self, path, section):
|
|
r = ''
|
|
file = open(path)
|
|
pat = '[' + section + ']'
|
|
in_section = False
|
|
for line in file:
|
|
if (line.rstrip() == pat):
|
|
in_section = True
|
|
continue
|
|
if in_section:
|
|
if len(line) and line[0] == '[':
|
|
break
|
|
r = r + line
|
|
file.close()
|
|
return r.rstrip()
|
|
|
|
def read_value(self, path, section, key):
|
|
|
|
def scan_section(f, key):
|
|
if key is None:
|
|
r = ''
|
|
for line in f:
|
|
if len(line) and line[0] == '[':
|
|
break
|
|
r += line
|
|
return r if len(r) else None
|
|
for line in f:
|
|
if len(line) and line[0] == '[':
|
|
return None
|
|
rr = re.findall('^ *' + key + ' *= *(.*)', line)
|
|
if len(rr) > 0:
|
|
return rr[0]
|
|
return None
|
|
|
|
def scan_section_debug(f, key):
|
|
rr = scan_section(f, key)
|
|
#self.debug(" returning", rr)
|
|
return rr
|
|
|
|
try:
|
|
#self.debug("looking for {}::[{}].{}".format(path, section, key))
|
|
with open(path, 'r') as f:
|
|
if not len(section):
|
|
rr = scan_section(f, key)
|
|
pat = '[' + section + ']'
|
|
for line in f:
|
|
if line.rstrip() == pat:
|
|
return scan_section(f, key)
|
|
return None
|
|
except:
|
|
self.debug(path, "not found")
|
|
# TODO: handle this special case cleaner somewhere up the stack
|
|
if section == 'build' and key == 'libname':
|
|
return 'none'
|
|
return None
|
|
|
|
def get_value(self, name, section, key):
|
|
self.debug("getting value [%s].%s for project %s (%s)" %(section, key, name, self.top_name))
|
|
if self.top_name and name == self.top_name:
|
|
proj_root = self.topdir
|
|
else:
|
|
proj_root = self.projs_root + '/' + name
|
|
self.debug("proj_root = " + proj_root)
|
|
|
|
if section == 'version':
|
|
proj_version_dirs = [ proj_root ]
|
|
if proj_root != self.topdir:
|
|
proj_version_dirs.append('/usr/share/doc/packages/' + name)
|
|
for d in proj_version_dirs:
|
|
version_path = d + '/VERSION'
|
|
try:
|
|
with open(version_path) as fd:
|
|
r = fd.read().replace('\n', '').replace('-dev', '')
|
|
fd.close()
|
|
return r
|
|
except EnvironmentError:
|
|
self.debug("ignoring unreadable file " + version_path)
|
|
continue
|
|
raise Exception("No version file found for project \"" + name + "\"")
|
|
|
|
path = proj_root + '/make/project.conf'
|
|
#print('path = ', path, 'self.top_name = ', self.top_name, 'name = ', name)
|
|
return self.res_cache.run(self.read_value, [path, section, key])
|
|
|
|
def collect_values(self, names, section, key):
|
|
r = ""
|
|
for n in names:
|
|
val = self.get_value(n, section, key)
|
|
if val:
|
|
r = r + " " + val
|
|
return self.remove_duplicates([x.strip() for x in r.split(",")])
|
|
|
|
# scope 0: no children
|
|
# scope 1: children
|
|
# scope 2: recursive
|
|
|
|
def add_modules_from_project_txt_cached(self, buf, visited, spec, section, key, add_self, scope,
|
|
names_only):
|
|
return self.res_cache.run(self.add_modules_from_project_txt, [buf, visited, spec, section, key,
|
|
add_self, scope, names_only])
|
|
|
|
def add_modules_from_project_txt(self, buf, visited, spec, section, key, add_self, scope,
|
|
names_only):
|
|
name = self.strip_module_from_spec(spec)
|
|
if names_only:
|
|
spec = name
|
|
if spec in buf:
|
|
return
|
|
if spec in visited:
|
|
if add_self:
|
|
buf.append(spec)
|
|
return
|
|
visited.add(spec)
|
|
deps = self.get_value(name, section, key)
|
|
self.debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited)
|
|
if deps and scope > 0:
|
|
if scope == 1:
|
|
subscope = 0
|
|
else:
|
|
subscope = 2
|
|
deps = deps.split(',')
|
|
for dep in deps:
|
|
dep = dep.strip()
|
|
if not(len(dep)):
|
|
continue
|
|
self.add_modules_from_project_txt_cached(buf, visited, dep,
|
|
section, key, add_self=True, scope=subscope,
|
|
names_only=names_only)
|
|
if add_self:
|
|
buf.append(spec)
|
|
|
|
def get_modules_from_project_txt(self, names, sections, keys, add_self, scope,
|
|
names_only = True):
|
|
if isinstance(keys, basestring):
|
|
keys = [ keys ]
|
|
#r = set()
|
|
r = []
|
|
for section in sections:
|
|
for key in keys:
|
|
visited = set()
|
|
for name in names:
|
|
rr = []
|
|
self.add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope,
|
|
names_only)
|
|
# TODO: this looks like a performance hogger
|
|
for m in rr:
|
|
if not m in r:
|
|
r.append(m)
|
|
return r
|
|
|
|
def pkg_relations(self, rel_type, args_):
|
|
parser = argparse.ArgumentParser(description='pkg-' + rel_type)
|
|
# TODO: implement Vendor evaluation
|
|
|
|
parser.add_argument('-S', '--subsections', nargs='?', default=None, help='Subsections to consider, comma-separated')
|
|
parser.add_argument('-d', '--delimiter', nargs='?', default=', ', help='Output words delimiter')
|
|
parser.add_argument('flavour', help='Flavour')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('-p', '--no-subpackages', action='store_true',
|
|
default=False, help='Cut -run and -devel from package names')
|
|
parser.add_argument('--no-version', action='store_true',
|
|
default=False, help='Don\'t report version information')
|
|
parser.add_argument('--dont-strip-revision', action='store_true',
|
|
default=False, help='Always treat VERSION macro as VERSION-REVISION')
|
|
parser.add_argument('--recursive', action='store_true',
|
|
default=False, help='Find dependencies recursively')
|
|
parser.add_argument('--dont-expand-version-macros', action='store_true',
|
|
default=False, help='Don\'t expand VERSION and REVISION macros')
|
|
parser.add_argument('--ignore', nargs='?', default='', help='Packages that '
|
|
'should be ignored together with their dependencies')
|
|
args = parser.parse_args(args_)
|
|
version_pattern=re.compile("[0-9-.]*")
|
|
if args.subsections is None:
|
|
subsecs = self.os_cascade()
|
|
subsecs.append('jw')
|
|
else:
|
|
subsecs = args.subsections.split(',')
|
|
self.debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs))
|
|
ignore = args.ignore.split(',')
|
|
self.debug("ignore = ", ignore)
|
|
|
|
r = []
|
|
flavours = args.flavour.split(',')
|
|
for flavour in flavours:
|
|
for s in subsecs:
|
|
section = 'pkg.' + rel_type + '.' + s
|
|
visited = set()
|
|
modules = args.module.copy()
|
|
while len(modules):
|
|
m = modules.pop(0)
|
|
if m in visited or m in ignore:
|
|
continue
|
|
visited.add(m)
|
|
value = self.get_value(m, section, flavour)
|
|
if not value:
|
|
continue
|
|
deps = value.split(',')
|
|
for spec in deps:
|
|
dep = re.split('([=><]+)', spec)
|
|
if args.no_version:
|
|
dep = dep[:1]
|
|
dep = list(map(str.strip, dep))
|
|
dep_name = re.sub('-dev$|-devel$|-run$', '', dep[0])
|
|
if dep_name in ignore or dep[0] in ignore:
|
|
continue
|
|
if args.no_subpackages:
|
|
dep[0] = dep_name
|
|
for i, item in enumerate(dep):
|
|
dep[i] = item.strip()
|
|
if s == 'jw':
|
|
if args.recursive and not dep_name in visited and not dep_name in modules:
|
|
modules.append(dep_name)
|
|
if len(dep) == 3:
|
|
if args.dont_expand_version_macros and dep_name in args.module:
|
|
version = dep[2]
|
|
else:
|
|
version = self.get_value(dep_name, 'version', '')
|
|
if dep[2] == 'VERSION':
|
|
if args.dont_strip_revision:
|
|
dep[2] = version
|
|
else:
|
|
dep[2] = version.split('-')[0]
|
|
elif dep[2] == 'VERSION-REVISION':
|
|
dep[2] = version
|
|
elif version_pattern.match(dep[2]):
|
|
# dep[2] = dep[2]
|
|
pass
|
|
else:
|
|
raise Exception("Unknown version specifier in " + spec)
|
|
cleaned_dep = ' '.join(dep)
|
|
if not cleaned_dep in r:
|
|
self.debug("appending", cleaned_dep)
|
|
r.append(cleaned_dep)
|
|
return args.delimiter.join(r)
|
|
|
|
def print_pkg_relations(self, rel_type, args_):
|
|
print(self.pkg_relations(rel_type, args_))
|
|
|
|
def get_libname(self, names):
|
|
vals = self.get_modules_from_project_txt(names, ['build'], 'libname',
|
|
scope = 1, add_self=False, names_only=True)
|
|
if not vals:
|
|
return ' '.join(names)
|
|
if 'none' in vals:
|
|
vals.remove('none')
|
|
return ' '.join(reversed(vals))
|
|
|
|
def is_excluded_from_build(self, module):
|
|
self.debug("checking if module " + module + " is excluded from build")
|
|
exclude = self.get_modules_from_project_txt([ module ], ['build'], 'exclude',
|
|
scope = 1, add_self=False, names_only=True)
|
|
cascade = self.os_cascade() + [ 'all' ]
|
|
for p1 in exclude:
|
|
for p2 in cascade:
|
|
if p1 == p2:
|
|
return p1
|
|
return None
|
|
|
|
# -L needs to contain more paths than libs linked with -l would require
|
|
def get_ldpathflags(self, names, exclude = []):
|
|
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
if m in exclude:
|
|
continue
|
|
libname = self.get_libname([m])
|
|
if len(libname):
|
|
r = r + ' -L' + self.proj_dir(m) + '/lib'
|
|
print(r[1:])
|
|
|
|
def get_ldflags(self, names, exclude = [], add_self_ = False):
|
|
#print(names)
|
|
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
|
scope = 1, add_self=add_self_, names_only=True)
|
|
self.debug("deps = " + ' '.join(deps))
|
|
#print(deps)
|
|
r = ''
|
|
for m in reversed(deps):
|
|
if m in exclude:
|
|
continue
|
|
libname = self.get_libname([m])
|
|
if len(libname):
|
|
#r = r + ' -L' + self.proj_dir(m) + '/lib -l' + libname
|
|
r = r + ' -l' + libname
|
|
if len(r):
|
|
ldpathflags = self.get_ldpathflags(names, exclude)
|
|
if ldpathflags:
|
|
r = ldpathflags + ' ' + r
|
|
return r[1::]
|
|
return ''
|
|
|
|
def commands(self):
|
|
f = open(sys.argv[0])
|
|
cmds = []
|
|
for line in f:
|
|
self.debug("checking line ", line)
|
|
rr = re.findall('^ def *cmd_([a-z0-9_]+).*', line)
|
|
if len(rr):
|
|
cmds.append(rr[0].replace('_', '-'))
|
|
f.close()
|
|
return ' '.join(cmds)
|
|
|
|
# ----------------------------------------------------------------- commands
|
|
|
|
def cmd_commands(self, args_):
|
|
print(self.commands())
|
|
|
|
def cmd_build(self, args_):
|
|
|
|
def read_deps(cur, prereq_type):
|
|
# dep cache doesn't make a difference at all
|
|
if prereq_type in self.dep_cache:
|
|
if cur in self.dep_cache[prereq_type]:
|
|
return self.dep_cache[prereq_type][cur]
|
|
else:
|
|
self.dep_cache[prereq_type] = {}
|
|
|
|
r = self.get_modules_from_project_txt([ cur ], ['pkg.requires.jw'],
|
|
prereq_type, scope = 2, add_self=False, names_only=True)
|
|
self.debug('prerequisites = ' + ' '.join(r))
|
|
if cur in r:
|
|
r.remove(cur)
|
|
self.debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r))
|
|
self.dep_cache[prereq_type][cur] = r
|
|
return r
|
|
|
|
def read_deps_cached(cur, prereq_type):
|
|
return self.res_cache.run(read_deps, [ cur, prereq_type ])
|
|
|
|
def add_dep_tree(cur, prereq_types, tree, all_deps):
|
|
self.debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur)
|
|
if cur in all_deps:
|
|
self.debug('already handled module ' + cur)
|
|
return 0
|
|
deps = set()
|
|
all_deps.add(cur)
|
|
for t in prereq_types:
|
|
self.debug("checking prereqisites of type " + t)
|
|
deps.update(read_deps_cached(cur, t))
|
|
for d in deps:
|
|
add_dep_tree(d, prereq_types, tree, all_deps)
|
|
tree[cur] = deps
|
|
return len(deps)
|
|
|
|
def calculate_order(order, modules, prereq_types):
|
|
all_deps = set()
|
|
dep_tree = {}
|
|
for m in modules:
|
|
self.debug("--- adding dependency tree of module " + m)
|
|
add_dep_tree(m, prereq_types, dep_tree, all_deps)
|
|
while len(all_deps):
|
|
for d in all_deps:
|
|
if not len(dep_tree[d]):
|
|
break
|
|
else:
|
|
print(all_deps)
|
|
raise Exception("fatal: the dependencies between these modules are unresolvable")
|
|
order.append(d)
|
|
all_deps.remove(d)
|
|
for k in dep_tree.keys():
|
|
if d in dep_tree[k]:
|
|
dep_tree[k].remove(d)
|
|
return 1
|
|
|
|
def run_make(module, target, cur_project, num_projects):
|
|
#make_cmd = "make " + target + " 2>&1"
|
|
make_cmd = [ "make", target ]
|
|
path = self.proj_dir(module)
|
|
delim_len = 120
|
|
delim = '---- [%d/%d]: running %s in %s -' % (cur_project, num_projects, make_cmd, path)
|
|
delim = delim + '-' * (delim_len - len(delim))
|
|
|
|
print(',' + delim + ' >')
|
|
|
|
patt = self.is_excluded_from_build(module)
|
|
if patt is not None:
|
|
print('| Configured to skip build on platform >' + patt + '<')
|
|
print('`' + delim + ' <')
|
|
return
|
|
|
|
os.chdir(path)
|
|
p = subprocess.Popen(make_cmd, shell=False, stdout=subprocess.PIPE, stderr=None, close_fds=True)
|
|
for line in iter(p.stdout.readline, b''):
|
|
line = line.decode(sys.stdout.encoding)
|
|
sys.stdout.write('| ' + line) # avoid extra newlines from print()
|
|
sys.stdout.flush()
|
|
p.wait()
|
|
print('`' + delim + ' <')
|
|
if p.returncode:
|
|
print(' '.join(make_cmd) + ' failed')
|
|
raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + self.projs_root)
|
|
|
|
def run_make_on_modules(modules, order, target):
|
|
cur_project = 0
|
|
num_projects = len(order)
|
|
if target in ["clean", "distclean"]:
|
|
for m in reversed(order):
|
|
cur_project += 1
|
|
run_make(m, target, cur_project, num_projects)
|
|
if m in modules:
|
|
modules.remove(m)
|
|
if not len(modules):
|
|
print("all modules cleaned")
|
|
return
|
|
else:
|
|
for m in order:
|
|
cur_project += 1
|
|
run_make(m, target, cur_project, num_projects)
|
|
|
|
def run(args_):
|
|
|
|
# -- parse command line
|
|
parser = argparse.ArgumentParser(description='janware software project build tool')
|
|
parser.add_argument('--exclude', default='', help='Space seperated ist of modules to be excluded from build')
|
|
parser.add_argument('-n', '--dry-run', action='store_true',
|
|
default=False, help='Don\'t build anything, just print what would be done.')
|
|
parser.add_argument('-O', '--build-order', action='store_true',
|
|
default=False, help='Don\'t build anything, just print the build order.')
|
|
parser.add_argument('-I', '--ignore-deps', action='store_true',
|
|
default=False, help='Don\'t build dependencies, i.e. build only modules specified on the command line')
|
|
parser.add_argument('target', default='all', help='Build target')
|
|
parser.add_argument('modules', nargs='+', default='', help='Modules to be built')
|
|
|
|
args = parser.parse_args(args_)
|
|
|
|
self.debug("----------------------------------------- running ", ' '.join(args_))
|
|
|
|
modules = args.modules
|
|
exclude = args.exclude.split()
|
|
target = args.target
|
|
|
|
env_exclude = os.getenv('BUILD_EXCLUDE', '')
|
|
if len(env_exclude):
|
|
print("exluding modules from environment: " + env_exclude)
|
|
exclude += " " + env_exclude
|
|
|
|
# -- build
|
|
order = []
|
|
|
|
glob_prereq_types = [ "build" ]
|
|
if re.match("pkg-.*", target) is not None:
|
|
glob_prereq_types = [ "build", "run", "release", "devel" ]
|
|
|
|
if target != 'order' and not args.build_order:
|
|
print("using prerequisite types " + ' '.join(glob_prereq_types))
|
|
print("calculating order for modules ... ")
|
|
|
|
calculate_order(order, modules, glob_prereq_types)
|
|
if args.ignore_deps:
|
|
order = [m for m in order if m in args.modules]
|
|
order = [m for m in order if m not in exclude]
|
|
if target == 'order' or args.build_order:
|
|
print(' '.join(order))
|
|
exit(0)
|
|
|
|
cur_project = 0
|
|
print("Building target %s in %d projects:" % (target, len(order)))
|
|
for m in order:
|
|
cur_project += 1
|
|
print(" %3d %s" % (cur_project, m))
|
|
|
|
if args.dry_run:
|
|
exit(0)
|
|
|
|
run_make_on_modules(modules, order, target)
|
|
|
|
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
|
run(args_)
|
|
|
|
def cmd_test(self, args_):
|
|
parser = argparse.ArgumentParser(description='Test')
|
|
parser.add_argument('blah', default='', help='The blah argument')
|
|
args = parser.parse_args(args_)
|
|
print("blah = " + args.blah)
|
|
|
|
def cmd_os_cascade(self, args_):
|
|
print(' '.join(self.os_cascade()))
|
|
|
|
def cmd_ldlibpath(self, args_):
|
|
parser = argparse.ArgumentParser(description='ldlibpath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + self.proj_dir(m) + '/lib'
|
|
print(r[1:])
|
|
|
|
def cmd_pythonpath(self, args_):
|
|
parser = argparse.ArgumentParser(description='pythonpath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
pdir = self.proj_dir(m)
|
|
for subdir in [ 'src/python', 'tools/python' ]:
|
|
cand = pdir + "/" + subdir
|
|
if isdir(cand):
|
|
r = r + ':' + cand
|
|
print(r[1:])
|
|
|
|
def cmd_exepath(self, args_):
|
|
parser = argparse.ArgumentParser(description='exepath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
self.debug('deps = ', deps)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + self.proj_dir(m) + '/bin'
|
|
print(r[1:])
|
|
|
|
def cmd_libname(self, args_):
|
|
parser = argparse.ArgumentParser(description='libname')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
print(self.get_libname(args.module))
|
|
|
|
def cmd_ldflags(self, args_):
|
|
parser = argparse.ArgumentParser(description='ldflags')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[])
|
|
parser.add_argument('-s', '--add-self', action='store_true',
|
|
default=False, help='Include libflags of specified modules, too, not only their dependencies')
|
|
args = parser.parse_args(args_)
|
|
print(self.get_ldflags(args.module, args.exclude, args.add_self))
|
|
|
|
def cmd_cflags(self, args_):
|
|
parser = argparse.ArgumentParser(description='cflags')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in reversed(deps):
|
|
r = r + ' -I' + self.proj_dir(m) + '/include'
|
|
print(r[1:])
|
|
|
|
def cmd_path(self, args_):
|
|
parser = argparse.ArgumentParser(description='path')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + self.proj_dir(m) + '/bin'
|
|
print(r[1:])
|
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / print_pkg_relations
|
|
def cmd_prereq(self, args_):
|
|
parser = argparse.ArgumentParser(description='path')
|
|
parser.add_argument('flavour', help='Flavour')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'],
|
|
args.flavour, scope = 2, add_self=False, names_only=True)
|
|
print(' '.join(deps))
|
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / print_pkg_relations
|
|
def cmd_required_os_pkg(self, args_):
|
|
parser = argparse.ArgumentParser(description='required-os-pkg')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--flavours', help='Dependency flavours', default='build')
|
|
parser.add_argument('--skip-excluded', action='store_true', default=False,
|
|
help='Output empty prerequisite list if module is excluded')
|
|
args = parser.parse_args(args_)
|
|
modules = args.module
|
|
flavours = args.flavours.split()
|
|
if 'build' in flavours and not 'run' in flavours:
|
|
# TODO: This adds too much. Only the run dependencies of the build dependencies would be needed.
|
|
flavours.append('run')
|
|
self.debug("flavours = " + args.flavours)
|
|
deps = self.get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours,
|
|
scope = 2, add_self=True, names_only=True)
|
|
if args.skip_excluded:
|
|
for d in deps:
|
|
if self.is_excluded_from_build(d) is not None:
|
|
deps.remove(d)
|
|
subsecs = self.os_cascade()
|
|
self.debug("subsecs = ", subsecs)
|
|
requires = []
|
|
for s in subsecs:
|
|
for f in flavours:
|
|
vals = self.collect_values(deps, 'pkg.requires.' + s, f)
|
|
if vals:
|
|
requires = requires + vals
|
|
# TODO: add all not in build tree as -devel
|
|
r = ''
|
|
for m in requires:
|
|
r = r + ' ' + m
|
|
print(r[1:])
|
|
|
|
def cmd_pkg_requires(self, args_):
|
|
return self.print_pkg_relations("requires", args_)
|
|
|
|
def cmd_pkg_conflicts(self, args_):
|
|
return self.print_pkg_relations("conflicts", args_)
|
|
|
|
def cmd_pkg_provides(self, args_):
|
|
return self.print_pkg_relations("provides", args_)
|
|
|
|
def cmd_proj_dir(self, args_):
|
|
parser = argparse.ArgumentParser(description='proj-dir')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
r.append(self.proj_dir(m))
|
|
print(' '.join(r))
|
|
|
|
def cmd_htdocs_dir(self, args_):
|
|
parser = argparse.ArgumentParser(description='htdocs-dir')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
r.append(self.htdocs_dir(m))
|
|
print(' '.join(r))
|
|
|
|
def cmd_summary(self, args_):
|
|
parser = argparse.ArgumentParser(description='summary')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
summary = self.get_value(m, "summary", None)
|
|
if summary is not None:
|
|
r.append(summary)
|
|
print(' '.join(r))
|
|
|
|
def contains(self, small, big):
|
|
for i in xrange(len(big)-len(small)+1):
|
|
for j in xrange(len(small)):
|
|
if big[i+j] != small[j]:
|
|
break
|
|
else:
|
|
return i, i+len(small)
|
|
return False
|
|
|
|
def read_dep_graph(self, modules, section, graph):
|
|
for m in modules:
|
|
if m in graph:
|
|
continue
|
|
deps = self.get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section,
|
|
scope = 1, add_self=False, names_only=True)
|
|
if not deps is None:
|
|
graph[m] = deps
|
|
for d in deps:
|
|
self.read_dep_graph([ d ], section, graph)
|
|
|
|
def flip_graph(self, graph):
|
|
r = {}
|
|
for m, deps in graph.items():
|
|
for d in deps:
|
|
if not d in r:
|
|
r[d] = set()
|
|
r[d].add(m)
|
|
return r
|
|
|
|
def check_circular_deps(self, module, section, graph, unvisited, temp, path):
|
|
if module in temp:
|
|
self.debug('found circular dependency at module', module)
|
|
return module
|
|
if not module in unvisited:
|
|
return None
|
|
temp.add(module)
|
|
if module in graph:
|
|
for m in graph[module]:
|
|
last = self.check_circular_deps(m, section, graph, unvisited, temp, path)
|
|
if last is not None:
|
|
path.insert(0, m)
|
|
return last
|
|
unvisited.remove(module)
|
|
temp.remove(module)
|
|
|
|
def cmd_check(self, args_):
|
|
parser = argparse.ArgumentParser(description='check')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('-f', '--flavour', nargs='?', default = 'build')
|
|
args = parser.parse_args(args_)
|
|
|
|
graph = {}
|
|
path = []
|
|
self.read_dep_graph(args.module, args.flavour, graph)
|
|
unvisited = graph.keys()
|
|
temp = set()
|
|
while len(unvisited) is not 0:
|
|
m = unvisited[0]
|
|
self.debug('checking circular dependency of', m)
|
|
last = self.check_circular_deps(m, args.flavour, self.flip_graph(graph), unvisited, temp, path)
|
|
if last is not None:
|
|
self.debug('found circular dependency below', m, ', last is', last)
|
|
print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path))
|
|
exit(1)
|
|
|
|
print('no circular dependency found for flavour', args.flavour, ' in modules:',
|
|
' '.join(args.module))
|
|
exit(0)
|
|
|
|
def cmd_getval(self, args_):
|
|
parser = argparse.ArgumentParser(description='Get value from project config')
|
|
parser.add_argument('--project', default = self.top_name, help = 'Project name')
|
|
parser.add_argument('section', default = '', help = 'Config section')
|
|
parser.add_argument('key', default = '', help = 'Config key')
|
|
args = parser.parse_args(args_)
|
|
print(self.get_value(args.project, args.section, args.key))
|
|
|
|
# -------------------------------------------------------------------- here we go
|
|
def run(self):
|
|
|
|
skip = 0
|
|
for a in sys.argv[1::]:
|
|
self.global_args.append(a)
|
|
if a in [ '-p', '--prefix', '-t', '--topdir', '-O', '--os' ]:
|
|
skip = 1
|
|
continue
|
|
if skip > 0:
|
|
skip = skip -1
|
|
continue
|
|
if a[0] != '-':
|
|
break
|
|
|
|
# -- defaults
|
|
self.projs_root = pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj"
|
|
|
|
parser = argparse.ArgumentParser(description='Project metadata evaluation')
|
|
parser.add_argument('-d', '--debug', action='store_true',
|
|
default=False, help='Output debug information to stderr')
|
|
parser.add_argument('-t', '--topdir', nargs=1, default = [], help='Project Path')
|
|
parser.add_argument('-p', '--prefix', nargs=1, default = [ self.projs_root ], help='Projects Path Prefix')
|
|
parser.add_argument('-O', '--os', nargs=1, default = [], help='Target operating system')
|
|
parser.add_argument('cmd', default='', help='Command, one of: ' + self.commands())
|
|
parser.add_argument('arg', nargs='*', help='Command arguments')
|
|
args = parser.parse_args(self.global_args)
|
|
|
|
self.opt_debug = args.debug
|
|
if len(args.os):
|
|
self.opt_os = args.os[0]
|
|
|
|
self.debug("----------------------------------------- running ", ' '.join(sys.argv))
|
|
|
|
self.projs_root = args.prefix[0]
|
|
self.debug("projs_root = ", self.projs_root)
|
|
if len(args.topdir):
|
|
self.topdir = args.topdir[0]
|
|
|
|
if self.topdir:
|
|
self.top_name = self.res_cache.run(self.read_value, [self.topdir + '/make/project.conf', 'build', 'name'])
|
|
if not self.top_name:
|
|
self.top_name = re.sub('-[0-9.-]*$', '', basename(realpath(self.topdir)))
|
|
|
|
cmd_name = 'cmd_' + args.cmd.replace('-', '_')
|
|
cmd = getattr(self, cmd_name)
|
|
cmd(sys.argv[(len(self.global_args) + 1)::])
|
|
|
|
if __name__ == "__main__":
|
|
projects = Projects()
|
|
projects.run()
|