mirror of
ssh://git.janware.com/srv/git/janware/proj/jw-pkg
synced 2026-01-15 03:53:32 +01:00
The append() shell function was unable to cope with special characters, notably with () in RPM provides libnnz12.so()(64bit), this commit fixes that. And introduces some (disabled) debug code into projects.py. Signed-off-by: Jan Lindemann <jan@janware.com>
901 lines
32 KiB
Python
901 lines
32 KiB
Python
#!/usr/bin/python2 -u
|
|
|
|
from __future__ import print_function
|
|
import os
|
|
import sys
|
|
import argparse
|
|
import pwd
|
|
from sets import Set
|
|
from os.path import isfile
|
|
from os.path import isdir
|
|
from os.path import expanduser
|
|
from os.path import basename
|
|
from os.path import realpath
|
|
import subprocess
|
|
import re
|
|
import platform
|
|
import datetime
|
|
|
|
# meaning of pkg.requires.xxx variables
|
|
# build: needs to be built and installed before this can be built
|
|
# devel: needs to be installed before this-devel can be installed, i.e. before _other_ packages can be built against this
|
|
# run: needs to be installed before this-run can be installed, i.e. before this and other packages can run with this
|
|
|
|
# --------------------------------------------------------------------- helpers
|
|
|
|
class ResultCache(object):
|
|
|
|
def __init__(self):
|
|
self.__cache = {}
|
|
|
|
def run(self, func, args):
|
|
d = self.__cache
|
|
depth = 0
|
|
keys = [ func.__name__ ] + args
|
|
l = len(keys)
|
|
for k in keys:
|
|
if k is None:
|
|
k = 'None'
|
|
else:
|
|
k = str(k)
|
|
depth += 1
|
|
#debug('depth = ', depth, 'key = ', k, 'd = ', str(d))
|
|
if k in d:
|
|
if l == depth:
|
|
return d[k]
|
|
d = d[k]
|
|
continue
|
|
if l == depth:
|
|
r = func(*args)
|
|
d[k] = r
|
|
return r
|
|
d = d[k] = {}
|
|
#d = d[k]
|
|
raise Exception("cache algorithm failed for function", func.__name__, "in depth", depth)
|
|
|
|
class Build(object):
|
|
|
|
def __init__(self):
|
|
pass
|
|
|
|
def find_proj_path(self, name):
|
|
name=name.replace("dspider-", "")
|
|
search_path=[".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ]
|
|
for sub in search_path:
|
|
path=projs_root + "/" + sub + "/" + name
|
|
if os.path.exists(path):
|
|
return os.path.abspath(path)
|
|
raise Exception("module " + name + " not found below " + projs_root)
|
|
|
|
def find_proj_path_cached(self, name):
|
|
return res_cache.run(self.find_proj_path, [ name ])
|
|
|
|
def read_deps(self, cur, prereq_type):
|
|
# dep cache doesn't make a difference at all
|
|
if prereq_type in dep_cache:
|
|
if cur in dep_cache[prereq_type]:
|
|
return dep_cache[prereq_type][cur]
|
|
else:
|
|
dep_cache[prereq_type] = {}
|
|
|
|
if True:
|
|
r = get_modules_from_project_txt([ cur ], 'pkg.requires.jw',
|
|
prereq_type, scope = 2, add_self=False, names_only=True)
|
|
debug('prerequisites = ' + ' '.join(r))
|
|
else: # legacy from build.py
|
|
projects_py="/usr/bin/python2 " + my_dir + "/projects.py --prefix " + projs_root + " " + os.getenv('PROJECTS_PY_EXTRA_ARGS', "")
|
|
cmd = projects_py + " prereq " + prereq_type + " " + cur
|
|
debug('running', cmd)
|
|
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
p.wait()
|
|
if p.returncode:
|
|
raise Exception("failed to get " + prereq_type + " prerequisites for " + cur + ": " + cmd)
|
|
r = Set()
|
|
pattern = re.compile(r'.*') # might be useful at a later point, currently pointless
|
|
for line in iter(p.stdout.readline,''):
|
|
debug(cmd + ' returned: ', line)
|
|
if not pattern.match(line):
|
|
continue
|
|
for d in line.split():
|
|
r.add(d)
|
|
if cur in r:
|
|
r.remove(cur)
|
|
debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r))
|
|
dep_cache[prereq_type][cur] = r
|
|
return r
|
|
|
|
def read_deps_cached(self, cur, prereq_type):
|
|
return res_cache.run(self.read_deps, [ cur, prereq_type ])
|
|
|
|
def add_dep_tree(self, cur, prereq_types, tree, all_deps):
|
|
debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur)
|
|
if cur in all_deps:
|
|
debug('already handled module ' + cur)
|
|
return 0
|
|
|
|
deps = Set()
|
|
all_deps.add(cur)
|
|
for t in prereq_types:
|
|
debug("checking prereqisites of type " + t)
|
|
deps.update(self.read_deps_cached(cur, t))
|
|
for d in deps:
|
|
self.add_dep_tree(d, prereq_types, tree, all_deps)
|
|
tree[cur] = deps
|
|
return len(deps)
|
|
|
|
def calculate_order(self, order, modules, prereq_types):
|
|
all_deps = Set()
|
|
dep_tree = {}
|
|
for m in modules:
|
|
debug("--- adding dependency tree of module " + m)
|
|
self.add_dep_tree(m, prereq_types, dep_tree, all_deps)
|
|
while len(all_deps):
|
|
for d in all_deps:
|
|
if not len(dep_tree[d]):
|
|
break
|
|
else:
|
|
print(all_deps)
|
|
raise Exception("fatal: the dependencies between these modules are unresolvable")
|
|
order.append(d)
|
|
all_deps.remove(d)
|
|
for k in dep_tree.keys():
|
|
if d in dep_tree[k]:
|
|
dep_tree[k].remove(d)
|
|
return 1
|
|
|
|
def run_make(self, module, target, cur_project, num_projects):
|
|
#make_cmd = "make " + target + " 2>&1"
|
|
make_cmd = [ "make", target ]
|
|
path = self.find_proj_path_cached(module)
|
|
delim_len=120
|
|
delim='---- [%d/%d]: running %s in %s -' % (cur_project, num_projects, make_cmd, path)
|
|
delim = delim + '-' * (delim_len - len(delim))
|
|
|
|
print(',' + delim + ' >')
|
|
|
|
patt = is_excluded_from_build(module)
|
|
if patt is not None:
|
|
print('| Configured to skip build on platform >' + patt + '<')
|
|
print('`' + delim + ' <')
|
|
return
|
|
|
|
os.chdir(path)
|
|
p = subprocess.Popen(make_cmd, shell=False, stdout=subprocess.PIPE, stderr=None, close_fds=True)
|
|
for line in iter(p.stdout.readline, ''):
|
|
sys.stdout.write('| ' + line) # avoid extra newlines from print()
|
|
sys.stdout.flush()
|
|
p.wait()
|
|
print('`' + delim + ' <')
|
|
if p.returncode:
|
|
print(make_cmd + ' failed')
|
|
raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + projs_root)
|
|
|
|
def run_make_on_modules(self, modules, order, target):
|
|
cur_project = 0
|
|
num_projects = len(order)
|
|
if target in ["clean", "distclean"]:
|
|
for m in reversed(order):
|
|
cur_project += 1
|
|
self.run_make(m, target, cur_project, num_projects)
|
|
if m in modules:
|
|
modules.remove(m)
|
|
if not len(modules):
|
|
print("all modules cleaned")
|
|
return
|
|
else:
|
|
for m in order:
|
|
cur_project += 1
|
|
self.run_make(m, target, cur_project, num_projects)
|
|
|
|
def run(self, args_):
|
|
global do_debug
|
|
visited = {}
|
|
glob_order = []
|
|
projs_root=pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj"
|
|
|
|
# -- parse command line
|
|
parser = argparse.ArgumentParser(description='janware software project build tool')
|
|
parser.add_argument('--exclude', default='', help='Space seperated ist of modules to be excluded from build')
|
|
parser.add_argument('--debug', '-d', action='store_true',
|
|
default=False, help='Output debug information to stderr')
|
|
parser.add_argument('--dry-run', '-n', action='store_true',
|
|
default=False, help='Don\'t build anything, just print what would be done.')
|
|
parser.add_argument('--build-order', '-O', action='store_true',
|
|
default=False, help='Don\'t build anything, just print the build order.')
|
|
parser.add_argument('--ignore-deps', '-I', action='store_true',
|
|
default=False, help='Don\'t build dependencies, i.e. build only modules specified on the command line')
|
|
parser.add_argument('target', default='all', help='Build target')
|
|
parser.add_argument('modules', nargs='+', default='', help='Modules to be built')
|
|
|
|
args=parser.parse_args(args_)
|
|
if args.debug:
|
|
do_debug = True
|
|
|
|
debug("----------------------------------------- running ", ' '.join(args_))
|
|
|
|
modules=args.modules
|
|
exclude=args.exclude.split()
|
|
target=args.target
|
|
|
|
env_exclude=os.getenv('BUILD_EXCLUDE', '')
|
|
if len(env_exclude):
|
|
print("exluding modules from environment: " + env_exclude)
|
|
exclude += " " + env_exclude
|
|
|
|
# -- build
|
|
order = []
|
|
|
|
glob_prereq_types = [ "build" ]
|
|
if re.match("pkg-.*", target) is not None:
|
|
glob_prereq_types = [ "build", "run", "release", "devel" ]
|
|
|
|
if target != 'order' and not args.build_order:
|
|
print("using prerequisite types " + ' '.join(glob_prereq_types))
|
|
print("calculating order for modules ... ")
|
|
|
|
self.calculate_order(order, modules, glob_prereq_types)
|
|
if args.ignore_deps:
|
|
order = [m for m in order if m in args.modules]
|
|
order = [m for m in order if m not in exclude]
|
|
if target == 'order' or args.build_order:
|
|
print(' '.join(order))
|
|
exit(0)
|
|
|
|
cur_project = 0
|
|
print("Building target %s in %d projects:" % (target, len(order)))
|
|
for m in order:
|
|
cur_project += 1
|
|
print(" %3d %s" % (cur_project, m))
|
|
|
|
if args.dry_run:
|
|
exit(0)
|
|
|
|
self.run_make_on_modules(modules, order, target)
|
|
|
|
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
|
def debug(*objs):
|
|
if do_debug:
|
|
print("DEBUG: ", *objs, file=sys.stderr)
|
|
|
|
def err(*objs):
|
|
print("ERR: ", *objs, file=sys.stderr)
|
|
|
|
def proj_dir(name):
|
|
if name == top_name:
|
|
return topdir
|
|
return projs_root + '/' + name
|
|
|
|
def re_section(name):
|
|
return re.compile('[' + name + ']'
|
|
'.*?'
|
|
'(?=[)',
|
|
re.DOTALL)
|
|
|
|
def remove_duplicates(seq):
|
|
seen = set()
|
|
seen_add = seen.add
|
|
return [x for x in seq if not (x in seen or seen_add(x))]
|
|
|
|
def get_os(args = ""):
|
|
for d in [ projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]:
|
|
script = d + '/get-os.sh'
|
|
if isfile(script):
|
|
cmd = '/bin/bash ' + script
|
|
if args:
|
|
cmd = cmd + ' ' + args
|
|
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
(out, rr) = p.communicate()
|
|
if rr:
|
|
err("failed to run ", cmd)
|
|
continue
|
|
out = re.sub('\n', '', out)
|
|
return out
|
|
return "linux"
|
|
|
|
# TODO: add support for customizing this in project.conf
|
|
def htdocs_dir(name):
|
|
pd = proj_dir(name)
|
|
for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name
|
|
]:
|
|
if isdir(r):
|
|
return r
|
|
return None
|
|
|
|
def os_cascade():
|
|
global glob_os_cascade
|
|
if glob_os_cascade is not None:
|
|
return glob_os_cascade
|
|
r = [ 'os', platform.system().lower() ]
|
|
os = res_cache.run(get_os, [])
|
|
name = re.sub('-.*', '', os)
|
|
series = os
|
|
while True:
|
|
n = re.sub('\.[0-9]+$', '', series)
|
|
if n == series:
|
|
break
|
|
r.append(n)
|
|
series = n
|
|
if not name in r:
|
|
r.append(name)
|
|
if not os in r:
|
|
r.append(os)
|
|
# e.g. os, linux, suse, suse-tumbleweed
|
|
#return [ 'os', platform.system().lower(), name, os ]
|
|
glob_os_cascade = r
|
|
return r
|
|
|
|
def strip_module_from_spec(mod):
|
|
return re.sub(r'-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip())
|
|
|
|
def get_section(path, section):
|
|
r = ''
|
|
file = open(path)
|
|
pat = '[' + section + ']'
|
|
in_section = False
|
|
for line in file:
|
|
if (line.rstrip() == pat):
|
|
in_section = True
|
|
continue
|
|
if in_section:
|
|
if len(line) and line[0] == '[':
|
|
break
|
|
r = r + line
|
|
file.close()
|
|
return r.rstrip()
|
|
|
|
def read_value(path, section, key):
|
|
debug("opening ", path)
|
|
try:
|
|
file = open(path)
|
|
except:
|
|
debug(path, "not found")
|
|
# TODO: handle this special case cleaner somewhere up the stack
|
|
if section == 'build' and key == 'libname':
|
|
return 'none'
|
|
return None
|
|
r = []
|
|
if not len(section):
|
|
for line in file:
|
|
r = re.findall('^ *' + key + ' *= *(.*)', line)
|
|
if (len(r) > 0):
|
|
break
|
|
else:
|
|
in_section = False
|
|
pat = '[' + section + ']'
|
|
for line in file:
|
|
if (line.rstrip() == pat):
|
|
in_section = True
|
|
continue
|
|
if in_section:
|
|
if len(line) and line[0] == '[':
|
|
break
|
|
if key is None:
|
|
r.append(line)
|
|
else:
|
|
r = re.findall('^ *' + key + ' *= *(.*)', line)
|
|
#debug("key " + key + ": parsed line >" + line + "<, result is " + ' '.join(r))
|
|
if (len(r) > 0):
|
|
break
|
|
file.close()
|
|
|
|
if len(r):
|
|
return r[0]
|
|
return None
|
|
|
|
def get_value(name, section, key):
|
|
debug("getting value [%s].%s for project %s (%s)" %(section, key, name, top_name))
|
|
if top_name and name == top_name:
|
|
proj_root = topdir
|
|
else:
|
|
proj_root = projs_root + '/' + name
|
|
debug("proj_root = " + proj_root)
|
|
|
|
if section == 'version':
|
|
proj_version_dirs = [ proj_root ]
|
|
if proj_root != topdir:
|
|
proj_version_dirs.append('/usr/share/doc/packages/' + name)
|
|
for d in proj_version_dirs:
|
|
version_path = d + '/VERSION'
|
|
try:
|
|
with open(version_path) as fd:
|
|
r = fd.read().replace('\n', '').replace('-dev', '')
|
|
fd.close()
|
|
return r
|
|
except EnvironmentError:
|
|
debug("ignoring unreadable file " + version_path)
|
|
continue
|
|
raise Exception("No version file found for project \"" + name + "\"")
|
|
|
|
path = proj_root + '/make/project.conf'
|
|
#print('path = ', path, 'top_name = ', top_name, 'name = ', name)
|
|
return res_cache.run(read_value, [path, section, key])
|
|
|
|
def collect_values(names, section, key):
|
|
r = ""
|
|
for n in names:
|
|
val = get_value(n, section, key)
|
|
if val:
|
|
r = r + " " + val
|
|
return remove_duplicates([x.strip() for x in r.split(",")])
|
|
|
|
# scope 0: no children
|
|
# scope 1: children
|
|
# scope 2: recursive
|
|
|
|
def add_modules_from_project_txt_cached(buf, visited, spec, section, key, add_self, scope,
|
|
names_only):
|
|
return res_cache.run(add_modules_from_project_txt, [buf, visited, spec, section, key,
|
|
add_self, scope, names_only])
|
|
|
|
def add_modules_from_project_txt(buf, visited, spec, section, key, add_self, scope,
|
|
names_only):
|
|
name = strip_module_from_spec(spec)
|
|
if names_only:
|
|
spec = name
|
|
if spec in buf:
|
|
return
|
|
if spec in visited:
|
|
if add_self:
|
|
buf.append(spec)
|
|
return
|
|
visited.add(spec)
|
|
deps = get_value(name, section, key)
|
|
debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited)
|
|
if deps and scope > 0:
|
|
if scope == 1:
|
|
subscope = 0
|
|
else:
|
|
subscope = 2
|
|
deps = deps.split(',')
|
|
for dep in deps:
|
|
dep = dep.strip()
|
|
if not(len(dep)):
|
|
continue
|
|
add_modules_from_project_txt_cached(buf, visited, dep,
|
|
section, key, add_self=True, scope=subscope,
|
|
names_only=names_only)
|
|
if add_self:
|
|
buf.append(spec)
|
|
|
|
def get_modules_from_project_txt(names, section, keys, add_self, scope,
|
|
names_only = True):
|
|
if isinstance(keys, basestring):
|
|
keys = [ keys ]
|
|
#r = Set()
|
|
r = []
|
|
for key in keys:
|
|
visited = Set()
|
|
for name in names:
|
|
rr = []
|
|
add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope,
|
|
names_only)
|
|
# TODO: this looks like a performance hogger
|
|
for m in rr:
|
|
if not m in r:
|
|
r.append(m)
|
|
return r
|
|
|
|
def pkg_relations(rel_type, args_):
|
|
parser = argparse.ArgumentParser(description='pkg-' + rel_type)
|
|
# TODO: implement Vendor evaluation
|
|
|
|
parser.add_argument('--subsections', '-S', nargs='?', default=None, help='Subsections to consider, comma-separated')
|
|
parser.add_argument('--delimiter', '-d', nargs='?', default=', ', help='Output words delimiter')
|
|
parser.add_argument('flavour', help='Flavour')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--no-subpackages', '-p', action='store_true',
|
|
default=False, help='Cut -run and -devel from package names')
|
|
parser.add_argument('--no-version', action='store_true',
|
|
default=False, help='Don\'t report version information')
|
|
parser.add_argument('--dont-strip-revision', action='store_true',
|
|
default=False, help='Always treat VERSION macro as VERSION-REVISION')
|
|
parser.add_argument('--dont-expand-version-macros', action='store_true',
|
|
default=False, help='Don\'t expand VERSION and REVISION macros')
|
|
args = parser.parse_args(args_)
|
|
version_pattern=re.compile("[0-9-.]*")
|
|
if args.subsections is None:
|
|
subsecs = os_cascade()
|
|
subsecs.append('jw')
|
|
else:
|
|
subsecs = args.subsections.split(',')
|
|
debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs))
|
|
r = []
|
|
flavours = args.flavour.split(',')
|
|
for flavour in flavours:
|
|
for s in subsecs:
|
|
for m in args.module:
|
|
value = get_value(m, 'pkg.' + rel_type + '.' + s, flavour)
|
|
if not value:
|
|
continue
|
|
deps = value.split(',')
|
|
for spec in deps:
|
|
dep = re.split('([=><]+)', spec)
|
|
if args.no_version:
|
|
dep = dep[:1]
|
|
dep = map(str.strip, dep)
|
|
if args.no_subpackages:
|
|
dep[0] = re.sub('-dev$|-devel$|-run$', '', dep[0])
|
|
for i, item in enumerate(dep):
|
|
dep[i] = item.strip()
|
|
if s == 'jw' and len(dep) == 3:
|
|
dep_project = re.sub(r'-devel$|-dev$|-run$', '', dep[0])
|
|
if args.dont_expand_version_macros and dep_project in args.module:
|
|
version = dep[2]
|
|
else:
|
|
version = get_value(dep_project, 'version', '')
|
|
if dep[2] == 'VERSION':
|
|
if args.dont_strip_revision:
|
|
dep[2] = version
|
|
else:
|
|
dep[2] = version.split('-')[0]
|
|
elif dep[2] == 'VERSION-REVISION':
|
|
dep[2] = version
|
|
elif version_pattern.match(dep[2]):
|
|
# dep[2] = dep[2]
|
|
pass
|
|
else:
|
|
raise Exception("Unknown version specifier in " + spec)
|
|
r.append(' '.join(dep))
|
|
print(args.delimiter.join(r))
|
|
|
|
def get_libname(names):
|
|
vals = get_modules_from_project_txt(names, 'build', 'libname',
|
|
scope = 1, add_self=False, names_only=True)
|
|
if not vals:
|
|
return ' '.join(names)
|
|
if 'none' in vals:
|
|
vals.remove('none')
|
|
return ' '.join(reversed(vals))
|
|
|
|
def is_excluded_from_build(module):
|
|
debug("checking if module " + module + " is excluded from build")
|
|
exclude = get_modules_from_project_txt([ module ], 'build', 'exclude',
|
|
scope = 1, add_self=False, names_only=True)
|
|
cascade = os_cascade() + [ 'all' ]
|
|
for p1 in exclude:
|
|
for p2 in cascade:
|
|
if p1 == p2:
|
|
return p1
|
|
return None
|
|
|
|
# -L needs to contain more paths than libs linked with -l would require
|
|
def get_ldpathflags(names, exclude = []):
|
|
deps = get_modules_from_project_txt(names, 'pkg.requires.jw', 'build',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
if m in exclude:
|
|
continue
|
|
libname = get_libname([m])
|
|
if len(libname):
|
|
r = r + ' -L' + proj_dir(m) + '/lib'
|
|
print(r[1:])
|
|
|
|
def get_ldflags(names, exclude = [], add_self_ = False):
|
|
#print(names)
|
|
deps = get_modules_from_project_txt(names, 'pkg.requires.jw', 'build',
|
|
scope = 1, add_self=add_self_, names_only=True)
|
|
debug("deps = " + ' '.join(deps))
|
|
#print(deps)
|
|
r = ''
|
|
for m in reversed(deps):
|
|
if m in exclude:
|
|
continue
|
|
libname = get_libname([m])
|
|
if len(libname):
|
|
#r = r + ' -L' + proj_dir(m) + '/lib -l' + libname
|
|
r = r + ' -l' + libname
|
|
if len(r):
|
|
ldpathflags = get_ldpathflags(names, exclude)
|
|
if ldpathflags:
|
|
r = ldpathflags + ' ' + r
|
|
return r[1::]
|
|
return ''
|
|
|
|
def commands():
|
|
f = open(sys.argv[0])
|
|
cmds = []
|
|
for line in f:
|
|
debug("checking line ", line)
|
|
rr = re.findall('^def *cmd_([a-z0-9_]+).*', line)
|
|
if len(rr):
|
|
cmds.append(rr[0].replace('_', '-'))
|
|
f.close()
|
|
return ' '.join(cmds)
|
|
|
|
# --------------------------------------------------------------------- commands
|
|
|
|
def cmd_commands(args_):
|
|
print(commands())
|
|
|
|
def cmd_build(args_):
|
|
build = Build()
|
|
build.run(args_)
|
|
|
|
def cmd_test(args_):
|
|
parser = argparse.ArgumentParser(description='Test')
|
|
parser.add_argument('blah', default='', help='The blah argument')
|
|
args=parser.parse_args(args_)
|
|
print("blah = " + args.blah)
|
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
|
def cmd_requires_pkg(args_):
|
|
parser = argparse.ArgumentParser(description='requires-pkg')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--flavours', help='Dependency flavours', default='build')
|
|
parser.add_argument('--skip-excluded', action='store_true', default=False,
|
|
help='Output empty prerequisite list if module is excluded')
|
|
args = parser.parse_args(args_)
|
|
modules = args.module
|
|
flavours = args.flavours.split()
|
|
debug("flavours = " + args.flavours)
|
|
deps = get_modules_from_project_txt(modules, 'pkg.requires.jw', flavours,
|
|
scope = 2, add_self=True, names_only=True)
|
|
if args.skip_excluded:
|
|
for d in deps:
|
|
if is_excluded_from_build(d) is not None:
|
|
deps.remove(d)
|
|
subsecs = os_cascade()
|
|
debug("subsecs = ", subsecs)
|
|
requires = []
|
|
for s in subsecs:
|
|
for f in flavours:
|
|
vals = collect_values(deps, 'pkg.requires.' + s, f)
|
|
if vals:
|
|
requires = requires + vals
|
|
# TODO: add all not in build tree as -devel
|
|
r = ''
|
|
for m in requires:
|
|
r = r + ' ' + m
|
|
print(r[1:])
|
|
|
|
def cmd_os_cascade(args_):
|
|
print(' '.join(os_cascade()))
|
|
|
|
def cmd_ldlibpath(args_):
|
|
parser = argparse.ArgumentParser(description='ldlibpath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw', [ 'run', 'build', 'devel' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + proj_dir(m) + '/lib'
|
|
print(r[1:])
|
|
|
|
def cmd_pythonpath(args_):
|
|
parser = argparse.ArgumentParser(description='pythonpath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw', [ 'run', 'build' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
pdir = proj_dir(m)
|
|
for subdir in [ 'src/python', 'tools/python' ]:
|
|
cand = pdir + "/" + subdir
|
|
if isdir(cand):
|
|
r = r + ':' + cand
|
|
print(r[1:])
|
|
|
|
def cmd_exepath(args_):
|
|
parser = argparse.ArgumentParser(description='exepath')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw', [ 'run', 'build', 'devel' ],
|
|
scope = 2, add_self=True, names_only=True)
|
|
debug('deps = ', deps)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + proj_dir(m) + '/bin'
|
|
print(r[1:])
|
|
|
|
def cmd_libname(args_):
|
|
parser = argparse.ArgumentParser(description='libname')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
print(get_libname(args.module))
|
|
|
|
def cmd_ldflags(args_):
|
|
parser = argparse.ArgumentParser(description='ldflags')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[])
|
|
parser.add_argument('--add-self', '-s', action='store_true',
|
|
default=False, help='Include libflags of specified modules, too, not only their dependencies')
|
|
args=parser.parse_args(args_)
|
|
print(get_ldflags(args.module, args.exclude, args.add_self))
|
|
|
|
def cmd_cflags(args_):
|
|
parser = argparse.ArgumentParser(description='cflags')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw', 'build',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in reversed(deps):
|
|
r = r + ' -I' + proj_dir(m) + '/include'
|
|
print(r[1:])
|
|
|
|
def cmd_path(args_):
|
|
parser = argparse.ArgumentParser(description='path')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw', 'run',
|
|
scope = 2, add_self=True, names_only=True)
|
|
r = ''
|
|
for m in deps:
|
|
r = r + ':' + proj_dir(m) + '/bin'
|
|
print(r[1:])
|
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
|
def cmd_prereq(args_):
|
|
parser = argparse.ArgumentParser(description='path')
|
|
parser.add_argument('flavour', help='Flavour')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args = parser.parse_args(args_)
|
|
deps = get_modules_from_project_txt(args.module, 'pkg.requires.jw',
|
|
args.flavour, scope = 2, add_self=False, names_only=True)
|
|
print(' '.join(deps))
|
|
|
|
def cmd_pkg_requires(args_):
|
|
return pkg_relations("requires", args_)
|
|
|
|
def cmd_pkg_conflicts(args_):
|
|
return pkg_relations("conflicts", args_)
|
|
|
|
def cmd_pkg_provides(args_):
|
|
return pkg_relations("provides", args_)
|
|
|
|
def cmd_proj_dir(args_):
|
|
parser = argparse.ArgumentParser(description='proj-dir')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
r.append(proj_dir(m))
|
|
print(' '.join(r))
|
|
|
|
def cmd_htdocs_dir(args_):
|
|
parser = argparse.ArgumentParser(description='htdocs-dir')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
r.append(htdocs_dir(m))
|
|
print(' '.join(r))
|
|
|
|
def cmd_summary(args_):
|
|
parser = argparse.ArgumentParser(description='summary')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
args=parser.parse_args(args_)
|
|
r = []
|
|
for m in args.module:
|
|
summary = get_value(m, "summary", None)
|
|
if summary is not None:
|
|
r.append(summary)
|
|
print(' '.join(r))
|
|
|
|
def contains(small, big):
|
|
for i in xrange(len(big)-len(small)+1):
|
|
for j in xrange(len(small)):
|
|
if big[i+j] != small[j]:
|
|
break
|
|
else:
|
|
return i, i+len(small)
|
|
return False
|
|
|
|
def read_dep_graph(modules, section, graph):
|
|
for m in modules:
|
|
if m in graph:
|
|
continue
|
|
deps = get_modules_from_project_txt([ m ], 'pkg.requires.jw', section,
|
|
scope = 1, add_self=False, names_only=True)
|
|
if not deps is None:
|
|
graph[m] = deps
|
|
for d in deps:
|
|
read_dep_graph([ d ], section, graph)
|
|
|
|
def flip_graph(graph):
|
|
r = {}
|
|
for m, deps in graph.iteritems():
|
|
for d in deps:
|
|
if not d in r:
|
|
r[d] = Set()
|
|
r[d].add(m)
|
|
return r
|
|
|
|
def check_circular_deps(module, section, graph, unvisited, temp, path):
|
|
if module in temp:
|
|
debug('found circular dependency at module', module)
|
|
return module
|
|
if not module in unvisited:
|
|
return None
|
|
temp.add(module)
|
|
if module in graph:
|
|
for m in graph[module]:
|
|
last = check_circular_deps(m, section, graph, unvisited, temp, path)
|
|
if last is not None:
|
|
path.insert(0, m)
|
|
return last
|
|
unvisited.remove(module)
|
|
temp.remove(module)
|
|
|
|
def cmd_check(args_):
|
|
parser = argparse.ArgumentParser(description='check')
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
parser.add_argument('--flavour', '-f', nargs='?', default = 'build')
|
|
args=parser.parse_args(args_)
|
|
|
|
graph = {}
|
|
path = []
|
|
read_dep_graph(args.module, args.flavour, graph)
|
|
unvisited = graph.keys()
|
|
temp = Set()
|
|
while len(unvisited) is not 0:
|
|
m = unvisited[0]
|
|
debug('checking circular dependency of', m)
|
|
last = check_circular_deps(m, args.flavour, flip_graph(graph), unvisited, temp, path)
|
|
if last is not None:
|
|
debug('found circular dependency below', m, ', last is', last)
|
|
print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path))
|
|
exit(1)
|
|
|
|
print('no circular dependency found for flavour', args.flavour, ' in modules:',
|
|
' '.join(args.module))
|
|
exit(0)
|
|
|
|
def cmd_getval(args_):
|
|
parser = argparse.ArgumentParser(description='Get value from project config')
|
|
parser.add_argument('--project', default = top_name, help = 'Project name')
|
|
parser.add_argument('section', default = '', help = 'Config section')
|
|
parser.add_argument('key', default = '', help = 'Config key')
|
|
args=parser.parse_args(args_)
|
|
print(get_value(args.project, args.section, args.key))
|
|
|
|
# -------------------------------------------------------------------- here we go
|
|
|
|
global_args = []
|
|
res_cache = ResultCache()
|
|
dep_cache = {}
|
|
my_dir = os.path.dirname(os.path.realpath(__file__))
|
|
do_debug = False
|
|
topdir = None
|
|
top_name = None
|
|
glob_os_cascade = None
|
|
|
|
skip = 0
|
|
for a in sys.argv[1::]:
|
|
global_args.append(a)
|
|
if a in [ '--prefix', '-p', '--topdir', '-t' ]:
|
|
skip = 1
|
|
continue
|
|
if skip > 0:
|
|
skip = skip -1
|
|
continue
|
|
if a[0] != '-':
|
|
break
|
|
|
|
parser = argparse.ArgumentParser(description='Project metadata evaluation')
|
|
parser.add_argument('cmd', default='', help='Command')
|
|
parser.add_argument('--debug', '-d', action='store_true',
|
|
default=False, help='Output debug information to stderr')
|
|
parser.add_argument('--topdir', '-t', nargs='?', help='Project Path')
|
|
parser.add_argument('--prefix', '-p', nargs='?', default = expanduser("~") +
|
|
'/local/src/jw.dev/proj', help='Projects Path Prefix')
|
|
parser.add_argument('arg', nargs='*', help='Command arguments')
|
|
args = parser.parse_args(global_args)
|
|
|
|
do_debug = args.debug
|
|
|
|
debug("----------------------------------------- running ", ' '.join(sys.argv))
|
|
|
|
projs_root = args.prefix
|
|
if args.topdir:
|
|
topdir = args.topdir
|
|
top_name = res_cache.run(read_value, [topdir + '/make/project.conf', 'build', 'name'])
|
|
if not top_name:
|
|
top_name = re.sub('-[0-9.-]*$', '', basename(realpath(topdir)))
|
|
|
|
cmd = getattr(sys.modules[__name__], 'cmd_' + args.cmd.replace('-', '_'))
|
|
cmd(sys.argv[(len(global_args) + 1)::])
|