diff --git a/scripts/projects.py b/scripts/projects.py index 7350836f..e4678875 100644 --- a/scripts/projects.py +++ b/scripts/projects.py @@ -44,7 +44,7 @@ class ResultCache(object): else: k = str(k) depth += 1 - #debug('depth = ', depth, 'key = ', k, 'd = ', str(d)) + #self.projects.debug('depth = ', depth, 'key = ', k, 'd = ', str(d)) if k in d: if l == depth: return d[k] @@ -58,39 +58,42 @@ class ResultCache(object): #d = d[k] raise Exception("cache algorithm failed for function", func.__name__, "in depth", depth) +# ----------------------------------------------------------------- class Build + class Build(object): - def __init__(self): + def __init__(self, projects): + self.projects = projects pass def find_proj_path(self, name): - name=name.replace("dspider-", "") - search_path=[".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ] + name = name.replace("dspider-", "") + search_path = [".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ] for sub in search_path: - path=projs_root + "/" + sub + "/" + name + path = self.projects.projs_root + "/" + sub + "/" + name if os.path.exists(path): return os.path.abspath(path) - raise Exception("module " + name + " not found below " + projs_root) + raise Exception("module " + name + " not found below " + self.projects.projs_root) def find_proj_path_cached(self, name): - return res_cache.run(self.find_proj_path, [ name ]) + return self.projects.res_cache.run(self.find_proj_path, [ name ]) def read_deps(self, cur, prereq_type): # dep cache doesn't make a difference at all - if prereq_type in dep_cache: - if cur in dep_cache[prereq_type]: - return dep_cache[prereq_type][cur] + if prereq_type in self.projects.dep_cache: + if cur in self.projects.dep_cache[prereq_type]: + return self.projects.dep_cache[prereq_type][cur] else: - dep_cache[prereq_type] = {} + self.projects.dep_cache[prereq_type] = {} if True: - r = get_modules_from_project_txt([ cur ], ['pkg.requires.jw'], + r = self.projects.get_modules_from_project_txt([ cur ], ['pkg.requires.jw'], prereq_type, scope = 2, add_self=False, names_only=True) - debug('prerequisites = ' + ' '.join(r)) + self.projects.debug('prerequisites = ' + ' '.join(r)) else: # legacy from build.py - projects_py = sys.executable + " " + my_dir + "/projects.py --prefix " + projs_root + " " + os.getenv('PROJECTS_PY_EXTRA_ARGS', "") + projects_py = sys.executable + " " + self.projects.my_dir + "/projects.py --prefix " + self.projects.projs_root + " " + os.getenv('PROJECTS_PY_EXTRA_ARGS', "") cmd = projects_py + " prereq " + prereq_type + " " + cur - debug('running', cmd) + self.projects.debug('running', cmd) p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) p.wait() if p.returncode: @@ -99,30 +102,30 @@ class Build(object): pattern = re.compile(r'.*') # might be useful at a later point, currently pointless for line in iter(p.stdout.readline, b''): line = line.decode(sys.stdout.encoding) - debug(cmd + ' returned: ', line) + self.projects.debug(cmd + ' returned: ', line) if not pattern.match(line): continue for d in line.split(): r.add(d) if cur in r: r.remove(cur) - debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r)) - dep_cache[prereq_type][cur] = r + self.projects.debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r)) + self.projects.dep_cache[prereq_type][cur] = r return r def read_deps_cached(self, cur, prereq_type): - return res_cache.run(self.read_deps, [ cur, prereq_type ]) + return self.projects.res_cache.run(self.read_deps, [ cur, prereq_type ]) def add_dep_tree(self, cur, prereq_types, tree, all_deps): - debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur) + self.projects.debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur) if cur in all_deps: - debug('already handled module ' + cur) + self.projects.debug('already handled module ' + cur) return 0 deps = set() all_deps.add(cur) for t in prereq_types: - debug("checking prereqisites of type " + t) + self.projects.debug("checking prereqisites of type " + t) deps.update(self.read_deps_cached(cur, t)) for d in deps: self.add_dep_tree(d, prereq_types, tree, all_deps) @@ -133,7 +136,7 @@ class Build(object): all_deps = set() dep_tree = {} for m in modules: - debug("--- adding dependency tree of module " + m) + self.projects.debug("--- adding dependency tree of module " + m) self.add_dep_tree(m, prereq_types, dep_tree, all_deps) while len(all_deps): for d in all_deps: @@ -159,7 +162,7 @@ class Build(object): print(',' + delim + ' >') - patt = is_excluded_from_build(module) + patt = self.projects.is_excluded_from_build(module) if patt is not None: print('| Configured to skip build on platform >' + patt + '<') print('`' + delim + ' <') @@ -175,7 +178,7 @@ class Build(object): print('`' + delim + ' <') if p.returncode: print(make_cmd + ' failed') - raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + projs_root) + raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + self.projects.projs_root) def run_make_on_modules(self, modules, order, target): cur_project = 0 @@ -195,10 +198,9 @@ class Build(object): self.run_make(m, target, cur_project, num_projects) def run(self, args_): - global opt_debug visited = {} glob_order = [] - projs_root=pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj" + self.projects.projs_root = pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj" # -- parse command line parser = argparse.ArgumentParser(description='janware software project build tool') @@ -216,9 +218,9 @@ class Build(object): args=parser.parse_args(args_) if args.debug: - opt_debug = True + self.projects.opt_debug = True - debug("----------------------------------------- running ", ' '.join(args_)) + self.projects.debug("----------------------------------------- running ", ' '.join(args_)) modules=args.modules exclude=args.exclude.split() @@ -261,115 +263,101 @@ class Build(object): print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")) -def debug(*objs): - if opt_debug: - print("DEBUG: ", *objs, file=sys.stderr) +# ----------------------------------------------------------------- class Projects -def err(*objs): - print("ERR: ", *objs, file=sys.stderr) +class Projects(object): -def proj_dir(name): - if name == top_name: - return topdir - return projs_root + '/' + name + def __init__(self): -def re_section(name): - return re.compile('[' + name + ']' - '.*?' - '(?=[)', - re.DOTALL) + self.global_args = [] + self.opt_os = None + self.top_name = None + self.glob_os_cascade = None -def remove_duplicates(seq): - seen = set() - seen_add = seen.add - return [x for x in seq if not (x in seen or seen_add(x))] + self.dep_cache = {} + self.my_dir = os.path.dirname(os.path.realpath(__file__)) + self.opt_debug = False + self.res_cache = ResultCache() + self.topdir = None + self.projs_root = expanduser("~") + '/local/src/jw.dev/proj' -def get_os(args = ""): - for d in [ projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]: - script = d + '/get-os.sh' - if isfile(script): - cmd = '/bin/bash ' + script - if args: - cmd = cmd + ' ' + args - p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) - (out, rr) = p.communicate() - if rr: - err("failed to run ", cmd) - continue - out = re.sub('\n', '', out.decode('utf-8')) - return out - return "linux" + def debug(self, *objs): + if self.opt_debug: + print("DEBUG: ", *objs, file=sys.stderr) -# TODO: add support for customizing this in project.conf -def htdocs_dir(name): - pd = proj_dir(name) - for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name - ]: - if isdir(r): - return r - return None + def err(self, *objs): + print("ERR: ", *objs, file=sys.stderr) -def os_cascade(): - global glob_os_cascade - if glob_os_cascade is not None: - return glob_os_cascade - r = [ 'os', platform.system().lower() ] - os = res_cache.run(get_os, []) - name = re.sub('-.*', '', os) - series = os - while True: - n = re.sub('\.[0-9]+$', '', series) - if n == series: - break - r.append(n) - series = n - if not name in r: - r.append(name) - if not os in r: - r.append(os) - # e.g. os, linux, suse, suse-tumbleweed - #return [ 'os', platform.system().lower(), name, os ] - glob_os_cascade = r - return r + def proj_dir(self, name): + if name == self.top_name: + return self.topdir + return self.projs_root + '/' + name -def strip_module_from_spec(mod): - return re.sub(r'-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip()) + def re_section(self, name): + return re.compile('[' + name + ']' + '.*?' + '(?=[)', + re.DOTALL) -def get_section(path, section): - r = '' - file = open(path) - pat = '[' + section + ']' - in_section = False - for line in file: - if (line.rstrip() == pat): - in_section = True - continue - if in_section: - if len(line) and line[0] == '[': - break - r = r + line - file.close() - return r.rstrip() + def remove_duplicates(self, seq): + seen = set() + seen_add = seen.add + return [x for x in seq if not (x in seen or seen_add(x))] -def read_value(path, section, key): - debug("opening ", path) - try: - file = open(path) - except: - debug(path, "not found") - # TODO: handle this special case cleaner somewhere up the stack - if section == 'build' and key == 'libname': - return 'none' + def get_os(self, args = ""): + for d in [ self.projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]: + script = d + '/get-os.sh' + if isfile(script): + cmd = '/bin/bash ' + script + if args: + cmd = cmd + ' ' + args + p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) + (out, rr) = p.communicate() + if rr: + self.err("failed to run ", cmd) + continue + out = re.sub('\n', '', out.decode('utf-8')) + return out + return "linux" + + # TODO: add support for customizing this in project.conf + def htdocs_dir(self, name): + pd = self.proj_dir(name) + for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name ]: + if isdir(r): + return r return None - r = [] - if not len(section): - for line in file: - r = re.findall('^ *' + key + ' *= *(.*)', line) - if (len(r) > 0): + + def os_cascade(self): + if self.glob_os_cascade is not None: + return self.glob_os_cascade + r = [ 'os', platform.system().lower() ] + os = self.res_cache.run(self.get_os, []) + name = re.sub('-.*', '', os) + series = os + while True: + n = re.sub('\.[0-9]+$', '', series) + if n == series: break - else: - in_section = False + r.append(n) + series = n + if not name in r: + r.append(name) + if not os in r: + r.append(os) + # e.g. os, linux, suse, suse-tumbleweed + #return [ 'os', platform.system().lower(), name, os ] + self.glob_os_cascade = r + return r + + def strip_module_from_spec(self, mod): + return re.sub(r'-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip()) + + def get_section(self, path, section): + r = '' + file = open(path) pat = '[' + section + ']' + in_section = False for line in file: if (line.rstrip() == pat): in_section = True @@ -377,556 +365,580 @@ def read_value(path, section, key): if in_section: if len(line) and line[0] == '[': break - if key is None: - r.append(line) - else: - r = re.findall('^ *' + key + ' *= *(.*)', line) - #debug("key " + key + ": parsed line >" + line + "<, result is " + ' '.join(r)) - if (len(r) > 0): + r = r + line + file.close() + return r.rstrip() + + def read_value(self, path, section, key): + self.debug("opening ", path) + try: + file = open(path) + except: + self.debug(path, "not found") + # TODO: handle this special case cleaner somewhere up the stack + if section == 'build' and key == 'libname': + return 'none' + return None + r = [] + if not len(section): + for line in file: + r = re.findall('^ *' + key + ' *= *(.*)', line) + if (len(r) > 0): + break + else: + in_section = False + pat = '[' + section + ']' + for line in file: + if (line.rstrip() == pat): + in_section = True + continue + if in_section: + if len(line) and line[0] == '[': break - file.close() + if key is None: + r.append(line) + else: + r = re.findall('^ *' + key + ' *= *(.*)', line) + #self.debug("key " + key + ": parsed line >" + line + "<, result is " + ' '.join(r)) + if (len(r) > 0): + break + file.close() - if len(r): - return r[0] - return None + if len(r): + return r[0] + return None -def get_value(name, section, key): - debug("getting value [%s].%s for project %s (%s)" %(section, key, name, top_name)) - if top_name and name == top_name: - proj_root = topdir - else: - proj_root = projs_root + '/' + name - debug("proj_root = " + proj_root) + def get_value(self, name, section, key): + self.debug("getting value [%s].%s for project %s (%s)" %(section, key, name, self.top_name)) + if self.top_name and name == self.top_name: + proj_root = self.topdir + else: + proj_root = self.projs_root + '/' + name + self.debug("proj_root = " + proj_root) - if section == 'version': - proj_version_dirs = [ proj_root ] - if proj_root != topdir: - proj_version_dirs.append('/usr/share/doc/packages/' + name) - for d in proj_version_dirs: - version_path = d + '/VERSION' - try: - with open(version_path) as fd: - r = fd.read().replace('\n', '').replace('-dev', '') - fd.close() - return r - except EnvironmentError: - debug("ignoring unreadable file " + version_path) - continue - raise Exception("No version file found for project \"" + name + "\"") + if section == 'version': + proj_version_dirs = [ proj_root ] + if proj_root != self.topdir: + proj_version_dirs.append('/usr/share/doc/packages/' + name) + for d in proj_version_dirs: + version_path = d + '/VERSION' + try: + with open(version_path) as fd: + r = fd.read().replace('\n', '').replace('-dev', '') + fd.close() + return r + except EnvironmentError: + self.debug("ignoring unreadable file " + version_path) + continue + raise Exception("No version file found for project \"" + name + "\"") - path = proj_root + '/make/project.conf' - #print('path = ', path, 'top_name = ', top_name, 'name = ', name) - return res_cache.run(read_value, [path, section, key]) + path = proj_root + '/make/project.conf' + #print('path = ', path, 'self.top_name = ', self.top_name, 'name = ', name) + return self.res_cache.run(self.read_value, [path, section, key]) -def collect_values(names, section, key): - r = "" - for n in names: - val = get_value(n, section, key) - if val: - r = r + " " + val - return remove_duplicates([x.strip() for x in r.split(",")]) + def collect_values(self, names, section, key): + r = "" + for n in names: + val = self.get_value(n, section, key) + if val: + r = r + " " + val + return self.remove_duplicates([x.strip() for x in r.split(",")]) -# scope 0: no children -# scope 1: children -# scope 2: recursive + # scope 0: no children + # scope 1: children + # scope 2: recursive -def add_modules_from_project_txt_cached(buf, visited, spec, section, key, add_self, scope, - names_only): - return res_cache.run(add_modules_from_project_txt, [buf, visited, spec, section, key, - add_self, scope, names_only]) + def add_modules_from_project_txt_cached(self, buf, visited, spec, section, key, add_self, scope, + names_only): + return self.res_cache.run(self.add_modules_from_project_txt, [buf, visited, spec, section, key, + add_self, scope, names_only]) -def add_modules_from_project_txt(buf, visited, spec, section, key, add_self, scope, - names_only): - name = strip_module_from_spec(spec) - if names_only: - spec = name - if spec in buf: - return - if spec in visited: + def add_modules_from_project_txt(self, buf, visited, spec, section, key, add_self, scope, + names_only): + name = self.strip_module_from_spec(spec) + if names_only: + spec = name + if spec in buf: + return + if spec in visited: + if add_self: + buf.append(spec) + return + visited.add(spec) + deps = self.get_value(name, section, key) + self.debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited) + if deps and scope > 0: + if scope == 1: + subscope = 0 + else: + subscope = 2 + deps = deps.split(',') + for dep in deps: + dep = dep.strip() + if not(len(dep)): + continue + self.add_modules_from_project_txt_cached(buf, visited, dep, + section, key, add_self=True, scope=subscope, + names_only=names_only) if add_self: buf.append(spec) - return - visited.add(spec) - deps = get_value(name, section, key) - debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited) - if deps and scope > 0: - if scope == 1: - subscope = 0 + + def get_modules_from_project_txt(self, names, sections, keys, add_self, scope, + names_only = True): + if isinstance(keys, basestring): + keys = [ keys ] + #r = set() + r = [] + for section in sections: + for key in keys: + visited = set() + for name in names: + rr = [] + self.add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope, + names_only) + # TODO: this looks like a performance hogger + for m in rr: + if not m in r: + r.append(m) + return r + + def pkg_relations(self, rel_type, args_): + parser = argparse.ArgumentParser(description='pkg-' + rel_type) + # TODO: implement Vendor evaluation + + parser.add_argument('--subsections', '-S', nargs='?', default=None, help='Subsections to consider, comma-separated') + parser.add_argument('--delimiter', '-d', nargs='?', default=', ', help='Output words delimiter') + parser.add_argument('flavour', help='Flavour') + parser.add_argument('module', nargs='*', help='Modules') + parser.add_argument('--no-subpackages', '-p', action='store_true', + default=False, help='Cut -run and -devel from package names') + parser.add_argument('--no-version', action='store_true', + default=False, help='Don\'t report version information') + parser.add_argument('--dont-strip-revision', action='store_true', + default=False, help='Always treat VERSION macro as VERSION-REVISION') + parser.add_argument('--recursive', action='store_true', + default=False, help='Find dependencies recursively') + parser.add_argument('--dont-expand-version-macros', action='store_true', + default=False, help='Don\'t expand VERSION and REVISION macros') + args = parser.parse_args(args_) + version_pattern=re.compile("[0-9-.]*") + if args.subsections is None: + subsecs = self.os_cascade() + subsecs.append('jw') else: - subscope = 2 - deps = deps.split(',') - for dep in deps: - dep = dep.strip() - if not(len(dep)): - continue - add_modules_from_project_txt_cached(buf, visited, dep, - section, key, add_self=True, scope=subscope, - names_only=names_only) - if add_self: - buf.append(spec) + subsecs = args.subsections.split(',') + self.debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs)) -def get_modules_from_project_txt(names, sections, keys, add_self, scope, - names_only = True): - if isinstance(keys, basestring): - keys = [ keys ] - #r = set() - r = [] - for section in sections: - for key in keys: - visited = set() - for name in names: - rr = [] - add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope, - names_only) - # TODO: this looks like a performance hogger - for m in rr: - if not m in r: - r.append(m) - return r - -def pkg_relations(rel_type, args_): - parser = argparse.ArgumentParser(description='pkg-' + rel_type) - # TODO: implement Vendor evaluation - - parser.add_argument('--subsections', '-S', nargs='?', default=None, help='Subsections to consider, comma-separated') - parser.add_argument('--delimiter', '-d', nargs='?', default=', ', help='Output words delimiter') - parser.add_argument('flavour', help='Flavour') - parser.add_argument('module', nargs='*', help='Modules') - parser.add_argument('--no-subpackages', '-p', action='store_true', - default=False, help='Cut -run and -devel from package names') - parser.add_argument('--no-version', action='store_true', - default=False, help='Don\'t report version information') - parser.add_argument('--dont-strip-revision', action='store_true', - default=False, help='Always treat VERSION macro as VERSION-REVISION') - parser.add_argument('--recursive', action='store_true', - default=False, help='Find dependencies recursively') - parser.add_argument('--dont-expand-version-macros', action='store_true', - default=False, help='Don\'t expand VERSION and REVISION macros') - args = parser.parse_args(args_) - version_pattern=re.compile("[0-9-.]*") - if args.subsections is None: - subsecs = os_cascade() - subsecs.append('jw') - else: - subsecs = args.subsections.split(',') - debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs)) - - r = [] - flavours = args.flavour.split(',') - for flavour in flavours: - for s in subsecs: - section = 'pkg.' + rel_type + '.' + s - visited = set() - modules = args.module.copy() - while len(modules): - m = modules.pop(0) - if m in visited: - continue - visited.add(m) - value = get_value(m, section, flavour) - if not value: - continue - deps = value.split(',') - for spec in deps: - dep = re.split('([=><]+)', spec) - if args.no_version: - dep = dep[:1] - dep = list(map(str.strip, dep)) - dep_name = re.sub('-dev$|-devel$|-run$', '', dep[0]) - if args.no_subpackages: - dep[0] = dep_name - for i, item in enumerate(dep): - dep[i] = item.strip() - if s == 'jw': - if args.recursive and not dep_name in visited and not dep_name in modules: - modules.append(dep_name) - if len(dep) == 3: - if args.dont_expand_version_macros and dep_name in args.module: - version = dep[2] - else: - version = get_value(dep_name, 'version', '') - if dep[2] == 'VERSION': - if args.dont_strip_revision: - dep[2] = version + r = [] + flavours = args.flavour.split(',') + for flavour in flavours: + for s in subsecs: + section = 'pkg.' + rel_type + '.' + s + visited = set() + modules = args.module.copy() + while len(modules): + m = modules.pop(0) + if m in visited: + continue + visited.add(m) + value = self.get_value(m, section, flavour) + if not value: + continue + deps = value.split(',') + for spec in deps: + dep = re.split('([=><]+)', spec) + if args.no_version: + dep = dep[:1] + dep = list(map(str.strip, dep)) + dep_name = re.sub('-dev$|-devel$|-run$', '', dep[0]) + if args.no_subpackages: + dep[0] = dep_name + for i, item in enumerate(dep): + dep[i] = item.strip() + if s == 'jw': + if args.recursive and not dep_name in visited and not dep_name in modules: + modules.append(dep_name) + if len(dep) == 3: + if args.dont_expand_version_macros and dep_name in args.module: + version = dep[2] else: - dep[2] = version.split('-')[0] - elif dep[2] == 'VERSION-REVISION': - dep[2] = version - elif version_pattern.match(dep[2]): - # dep[2] = dep[2] - pass - else: - raise Exception("Unknown version specifier in " + spec) - cleaned_dep = ' '.join(dep) - if not cleaned_dep in r: - r.append(cleaned_dep) - print(args.delimiter.join(r)) + version = self.get_value(dep_name, 'version', '') + if dep[2] == 'VERSION': + if args.dont_strip_revision: + dep[2] = version + else: + dep[2] = version.split('-')[0] + elif dep[2] == 'VERSION-REVISION': + dep[2] = version + elif version_pattern.match(dep[2]): + # dep[2] = dep[2] + pass + else: + raise Exception("Unknown version specifier in " + spec) + cleaned_dep = ' '.join(dep) + if not cleaned_dep in r: + r.append(cleaned_dep) + print(args.delimiter.join(r)) -def get_libname(names): - vals = get_modules_from_project_txt(names, ['build'], 'libname', - scope = 1, add_self=False, names_only=True) - if not vals: - return ' '.join(names) - if 'none' in vals: - vals.remove('none') - return ' '.join(reversed(vals)) - -def is_excluded_from_build(module): - debug("checking if module " + module + " is excluded from build") - exclude = get_modules_from_project_txt([ module ], ['build'], 'exclude', - scope = 1, add_self=False, names_only=True) - cascade = os_cascade() + [ 'all' ] - for p1 in exclude: - for p2 in cascade: - if p1 == p2: - return p1 - return None - -# -L needs to contain more paths than libs linked with -l would require -def get_ldpathflags(names, exclude = []): - deps = get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build', - scope = 2, add_self=True, names_only=True) - r = '' - for m in deps: - if m in exclude: - continue - libname = get_libname([m]) - if len(libname): - r = r + ' -L' + proj_dir(m) + '/lib' - print(r[1:]) - -def get_ldflags(names, exclude = [], add_self_ = False): - #print(names) - deps = get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build', - scope = 1, add_self=add_self_, names_only=True) - debug("deps = " + ' '.join(deps)) - #print(deps) - r = '' - for m in reversed(deps): - if m in exclude: - continue - libname = get_libname([m]) - if len(libname): - #r = r + ' -L' + proj_dir(m) + '/lib -l' + libname - r = r + ' -l' + libname - if len(r): - ldpathflags = get_ldpathflags(names, exclude) - if ldpathflags: - r = ldpathflags + ' ' + r - return r[1::] - return '' - -def commands(): - f = open(sys.argv[0]) - cmds = [] - for line in f: - debug("checking line ", line) - rr = re.findall('^def *cmd_([a-z0-9_]+).*', line) - if len(rr): - cmds.append(rr[0].replace('_', '-')) - f.close() - return ' '.join(cmds) - -# --------------------------------------------------------------------- commands - -def cmd_commands(args_): - print(commands()) - -def cmd_build(args_): - build = Build() - build.run(args_) - -def cmd_test(args_): - parser = argparse.ArgumentParser(description='Test') - parser.add_argument('blah', default='', help='The blah argument') - args=parser.parse_args(args_) - print("blah = " + args.blah) - -def cmd_os_cascade(args_): - print(' '.join(os_cascade())) - -def cmd_ldlibpath(args_): - parser = argparse.ArgumentParser(description='ldlibpath') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ], - scope = 2, add_self=True, names_only=True) - r = '' - for m in deps: - r = r + ':' + proj_dir(m) + '/lib' - print(r[1:]) - -def cmd_pythonpath(args_): - parser = argparse.ArgumentParser(description='pythonpath') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ], - scope = 2, add_self=True, names_only=True) - r = '' - for m in deps: - pdir = proj_dir(m) - for subdir in [ 'src/python', 'tools/python' ]: - cand = pdir + "/" + subdir - if isdir(cand): - r = r + ':' + cand - print(r[1:]) - -def cmd_exepath(args_): - parser = argparse.ArgumentParser(description='exepath') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ], - scope = 2, add_self=True, names_only=True) - debug('deps = ', deps) - r = '' - for m in deps: - r = r + ':' + proj_dir(m) + '/bin' - print(r[1:]) - -def cmd_libname(args_): - parser = argparse.ArgumentParser(description='libname') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - print(get_libname(args.module)) - -def cmd_ldflags(args_): - parser = argparse.ArgumentParser(description='ldflags') - parser.add_argument('module', nargs='*', help='Modules') - parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[]) - parser.add_argument('--add-self', '-s', action='store_true', - default=False, help='Include libflags of specified modules, too, not only their dependencies') - args=parser.parse_args(args_) - print(get_ldflags(args.module, args.exclude, args.add_self)) - -def cmd_cflags(args_): - parser = argparse.ArgumentParser(description='cflags') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build', - scope = 2, add_self=True, names_only=True) - r = '' - for m in reversed(deps): - r = r + ' -I' + proj_dir(m) + '/include' - print(r[1:]) - -def cmd_path(args_): - parser = argparse.ArgumentParser(description='path') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run', - scope = 2, add_self=True, names_only=True) - r = '' - for m in deps: - r = r + ':' + proj_dir(m) + '/bin' - print(r[1:]) - -# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations -def cmd_prereq(args_): - parser = argparse.ArgumentParser(description='path') - parser.add_argument('flavour', help='Flavour') - parser.add_argument('module', nargs='*', help='Modules') - args = parser.parse_args(args_) - deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], - args.flavour, scope = 2, add_self=False, names_only=True) - print(' '.join(deps)) - -# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations -def cmd_required_os_pkg(args_): - parser = argparse.ArgumentParser(description='required-os-pkg') - parser.add_argument('module', nargs='*', help='Modules') - parser.add_argument('--flavours', help='Dependency flavours', default='build') - parser.add_argument('--skip-excluded', action='store_true', default=False, - help='Output empty prerequisite list if module is excluded') - args = parser.parse_args(args_) - modules = args.module - flavours = args.flavours.split() - debug("flavours = " + args.flavours) - deps = get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours, - scope = 2, add_self=True, names_only=True) - if args.skip_excluded: - for d in deps: - if is_excluded_from_build(d) is not None: - deps.remove(d) - subsecs = os_cascade() - debug("subsecs = ", subsecs) - requires = [] - for s in subsecs: - for f in flavours: - vals = collect_values(deps, 'pkg.requires.' + s, f) - if vals: - requires = requires + vals - # TODO: add all not in build tree as -devel - r = '' - for m in requires: - r = r + ' ' + m - print(r[1:]) - -def cmd_pkg_requires(args_): - return pkg_relations("requires", args_) - -def cmd_pkg_conflicts(args_): - return pkg_relations("conflicts", args_) - -def cmd_pkg_provides(args_): - return pkg_relations("provides", args_) - -def cmd_proj_dir(args_): - parser = argparse.ArgumentParser(description='proj-dir') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - r = [] - for m in args.module: - r.append(proj_dir(m)) - print(' '.join(r)) - -def cmd_htdocs_dir(args_): - parser = argparse.ArgumentParser(description='htdocs-dir') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - r = [] - for m in args.module: - r.append(htdocs_dir(m)) - print(' '.join(r)) - -def cmd_summary(args_): - parser = argparse.ArgumentParser(description='summary') - parser.add_argument('module', nargs='*', help='Modules') - args=parser.parse_args(args_) - r = [] - for m in args.module: - summary = get_value(m, "summary", None) - if summary is not None: - r.append(summary) - print(' '.join(r)) - -def contains(small, big): - for i in xrange(len(big)-len(small)+1): - for j in xrange(len(small)): - if big[i+j] != small[j]: - break - else: - return i, i+len(small) - return False - -def read_dep_graph(modules, section, graph): - for m in modules: - if m in graph: - continue - deps = get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section, + def get_libname(self, names): + vals = self.get_modules_from_project_txt(names, ['build'], 'libname', scope = 1, add_self=False, names_only=True) - if not deps is None: - graph[m] = deps - for d in deps: - read_dep_graph([ d ], section, graph) + if not vals: + return ' '.join(names) + if 'none' in vals: + vals.remove('none') + return ' '.join(reversed(vals)) -def flip_graph(graph): - r = {} - for m, deps in graph.items(): - for d in deps: - if not d in r: - r[d] = set() - r[d].add(m) - return r - -def check_circular_deps(module, section, graph, unvisited, temp, path): - if module in temp: - debug('found circular dependency at module', module) - return module - if not module in unvisited: + def is_excluded_from_build(self, module): + self.debug("checking if module " + module + " is excluded from build") + exclude = self.get_modules_from_project_txt([ module ], ['build'], 'exclude', + scope = 1, add_self=False, names_only=True) + cascade = self.os_cascade() + [ 'all' ] + for p1 in exclude: + for p2 in cascade: + if p1 == p2: + return p1 return None - temp.add(module) - if module in graph: - for m in graph[module]: - last = check_circular_deps(m, section, graph, unvisited, temp, path) + + # -L needs to contain more paths than libs linked with -l would require + def get_ldpathflags(self, names, exclude = []): + deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build', + scope = 2, add_self=True, names_only=True) + r = '' + for m in deps: + if m in exclude: + continue + libname = self.get_libname([m]) + if len(libname): + r = r + ' -L' + self.proj_dir(m) + '/lib' + print(r[1:]) + + def get_ldflags(self, names, exclude = [], add_self_ = False): + #print(names) + deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build', + scope = 1, add_self=add_self_, names_only=True) + self.debug("deps = " + ' '.join(deps)) + #print(deps) + r = '' + for m in reversed(deps): + if m in exclude: + continue + libname = self.get_libname([m]) + if len(libname): + #r = r + ' -L' + self.proj_dir(m) + '/lib -l' + libname + r = r + ' -l' + libname + if len(r): + ldpathflags = self.get_ldpathflags(names, exclude) + if ldpathflags: + r = ldpathflags + ' ' + r + return r[1::] + return '' + + def commands(self): + f = open(sys.argv[0]) + cmds = [] + for line in f: + self.debug("checking line ", line) + rr = re.findall('^def *cmd_([a-z0-9_]+).*', line) + if len(rr): + cmds.append(rr[0].replace('_', '-')) + f.close() + return ' '.join(cmds) + + # ----------------------------------------------------------------- commands + + def cmd_commands(self, args_): + print(self.commands()) + + def cmd_build(self, args_): + build = Build(self) + build.run(args_) + + def cmd_test(self, args_): + parser = argparse.ArgumentParser(description='Test') + parser.add_argument('blah', default='', help='The blah argument') + args=parser.parse_args(args_) + print("blah = " + args.blah) + + def cmd_os_cascade(self, args_): + print(' '.join(self.os_cascade())) + + def cmd_ldlibpath(self, args_): + parser = argparse.ArgumentParser(description='ldlibpath') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ], + scope = 2, add_self=True, names_only=True) + r = '' + for m in deps: + r = r + ':' + self.proj_dir(m) + '/lib' + print(r[1:]) + + def cmd_pythonpath(self, args_): + parser = argparse.ArgumentParser(description='pythonpath') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ], + scope = 2, add_self=True, names_only=True) + r = '' + for m in deps: + pdir = self.proj_dir(m) + for subdir in [ 'src/python', 'tools/python' ]: + cand = pdir + "/" + subdir + if isdir(cand): + r = r + ':' + cand + print(r[1:]) + + def cmd_exepath(self, args_): + parser = argparse.ArgumentParser(description='exepath') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ], + scope = 2, add_self=True, names_only=True) + self.debug('deps = ', deps) + r = '' + for m in deps: + r = r + ':' + self.proj_dir(m) + '/bin' + print(r[1:]) + + def cmd_libname(self, args_): + parser = argparse.ArgumentParser(description='libname') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + print(self.get_libname(args.module)) + + def cmd_ldflags(self, args_): + parser = argparse.ArgumentParser(description='ldflags') + parser.add_argument('module', nargs='*', help='Modules') + parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[]) + parser.add_argument('--add-self', '-s', action='store_true', + default=False, help='Include libflags of specified modules, too, not only their dependencies') + args=parser.parse_args(args_) + print(self.get_ldflags(args.module, args.exclude, args.add_self)) + + def cmd_cflags(self, args_): + parser = argparse.ArgumentParser(description='cflags') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build', + scope = 2, add_self=True, names_only=True) + r = '' + for m in reversed(deps): + r = r + ' -I' + self.proj_dir(m) + '/include' + print(r[1:]) + + def cmd_path(self, args_): + parser = argparse.ArgumentParser(description='path') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run', + scope = 2, add_self=True, names_only=True) + r = '' + for m in deps: + r = r + ':' + self.proj_dir(m) + '/bin' + print(r[1:]) + + # TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations + def cmd_prereq(self, args_): + parser = argparse.ArgumentParser(description='path') + parser.add_argument('flavour', help='Flavour') + parser.add_argument('module', nargs='*', help='Modules') + args = parser.parse_args(args_) + deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], + args.flavour, scope = 2, add_self=False, names_only=True) + print(' '.join(deps)) + + # TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations + def cmd_required_os_pkg(self, args_): + parser = argparse.ArgumentParser(description='required-os-pkg') + parser.add_argument('module', nargs='*', help='Modules') + parser.add_argument('--flavours', help='Dependency flavours', default='build') + parser.add_argument('--skip-excluded', action='store_true', default=False, + help='Output empty prerequisite list if module is excluded') + args = parser.parse_args(args_) + modules = args.module + flavours = args.flavours.split() + self.debug("flavours = " + args.flavours) + deps = self.get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours, + scope = 2, add_self=True, names_only=True) + if args.skip_excluded: + for d in deps: + if self.is_excluded_from_build(d) is not None: + deps.remove(d) + subsecs = self.os_cascade() + self.debug("subsecs = ", subsecs) + requires = [] + for s in subsecs: + for f in flavours: + vals = self.collect_values(deps, 'pkg.requires.' + s, f) + if vals: + requires = requires + vals + # TODO: add all not in build tree as -devel + r = '' + for m in requires: + r = r + ' ' + m + print(r[1:]) + + def cmd_pkg_requires(self, args_): + return self.pkg_relations("requires", args_) + + def cmd_pkg_conflicts(self, args_): + return self.pkg_relations("conflicts", args_) + + def cmd_pkg_provides(self, args_): + return self.pkg_relations("provides", args_) + + def cmd_proj_dir(self, args_): + parser = argparse.ArgumentParser(description='proj-dir') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + r = [] + for m in args.module: + r.append(self.proj_dir(m)) + print(' '.join(r)) + + def cmd_htdocs_dir(self, args_): + parser = argparse.ArgumentParser(description='htdocs-dir') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + r = [] + for m in args.module: + r.append(self.htdocs_dir(m)) + print(' '.join(r)) + + def cmd_summary(self, args_): + parser = argparse.ArgumentParser(description='summary') + parser.add_argument('module', nargs='*', help='Modules') + args=parser.parse_args(args_) + r = [] + for m in args.module: + summary = self.get_value(m, "summary", None) + if summary is not None: + r.append(summary) + print(' '.join(r)) + + def contains(self, small, big): + for i in xrange(len(big)-len(small)+1): + for j in xrange(len(small)): + if big[i+j] != small[j]: + break + else: + return i, i+len(small) + return False + + def read_dep_graph(self, modules, section, graph): + for m in modules: + if m in graph: + continue + deps = self.get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section, + scope = 1, add_self=False, names_only=True) + if not deps is None: + graph[m] = deps + for d in deps: + self.read_dep_graph([ d ], section, graph) + + def flip_graph(self, graph): + r = {} + for m, deps in graph.items(): + for d in deps: + if not d in r: + r[d] = set() + r[d].add(m) + return r + + def check_circular_deps(self, module, section, graph, unvisited, temp, path): + if module in temp: + self.debug('found circular dependency at module', module) + return module + if not module in unvisited: + return None + temp.add(module) + if module in graph: + for m in graph[module]: + last = self.check_circular_deps(m, section, graph, unvisited, temp, path) + if last is not None: + path.insert(0, m) + return last + unvisited.remove(module) + temp.remove(module) + + def cmd_check(self, args_): + parser = argparse.ArgumentParser(description='check') + parser.add_argument('module', nargs='*', help='Modules') + parser.add_argument('--flavour', '-f', nargs='?', default = 'build') + args=parser.parse_args(args_) + + graph = {} + path = [] + self.read_dep_graph(args.module, args.flavour, graph) + unvisited = graph.keys() + temp = set() + while len(unvisited) is not 0: + m = unvisited[0] + self.debug('checking circular dependency of', m) + last = self.check_circular_deps(m, args.flavour, self.flip_graph(graph), unvisited, temp, path) if last is not None: - path.insert(0, m) - return last - unvisited.remove(module) - temp.remove(module) + self.debug('found circular dependency below', m, ', last is', last) + print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path)) + exit(1) -def cmd_check(args_): - parser = argparse.ArgumentParser(description='check') - parser.add_argument('module', nargs='*', help='Modules') - parser.add_argument('--flavour', '-f', nargs='?', default = 'build') - args=parser.parse_args(args_) + print('no circular dependency found for flavour', args.flavour, ' in modules:', + ' '.join(args.module)) + exit(0) - graph = {} - path = [] - read_dep_graph(args.module, args.flavour, graph) - unvisited = graph.keys() - temp = set() - while len(unvisited) is not 0: - m = unvisited[0] - debug('checking circular dependency of', m) - last = check_circular_deps(m, args.flavour, flip_graph(graph), unvisited, temp, path) - if last is not None: - debug('found circular dependency below', m, ', last is', last) - print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path)) - exit(1) + def cmd_getval(self, args_): + parser = argparse.ArgumentParser(description='Get value from project config') + parser.add_argument('--project', default = self.top_name, help = 'Project name') + parser.add_argument('section', default = '', help = 'Config section') + parser.add_argument('key', default = '', help = 'Config key') + args=parser.parse_args(args_) + print(self.get_value(args.project, args.section, args.key)) - print('no circular dependency found for flavour', args.flavour, ' in modules:', - ' '.join(args.module)) - exit(0) + # -------------------------------------------------------------------- here we go + def run(self): + if __name__ == "__main__": + skip = 0 + for a in sys.argv[1::]: + self.global_args.append(a) + if a in [ '--prefix', '-p', '--topdir', '-t' ]: + skip = 1 + continue + if skip > 0: + skip = skip -1 + continue + if a[0] != '-': + break -def cmd_getval(args_): - parser = argparse.ArgumentParser(description='Get value from project config') - parser.add_argument('--project', default = top_name, help = 'Project name') - parser.add_argument('section', default = '', help = 'Config section') - parser.add_argument('key', default = '', help = 'Config key') - args=parser.parse_args(args_) - print(get_value(args.project, args.section, args.key)) + parser = argparse.ArgumentParser(description='Project metadata evaluation') + parser.add_argument('cmd', default='', help='Command') + parser.add_argument('--debug', '-d', action='store_true', + default=False, help='Output debug information to stderr') + parser.add_argument('--topdir', '-t', nargs=1, default = [], help='Project Path') + parser.add_argument('--prefix', '-p', nargs=1, default = [ self.projs_root ], help='Projects Path Prefix') + parser.add_argument('--os', '-O', nargs=1, default = [], help='Target operating system') + parser.add_argument('arg', nargs='*', help='Command arguments') + args = parser.parse_args(self.global_args) -# -------------------------------------------------------------------- here we go + self.opt_debug = args.debug + if len(args.os): + self.opt_os = args.os[0] -global_args = [] -res_cache = ResultCache() -dep_cache = {} -my_dir = os.path.dirname(os.path.realpath(__file__)) -opt_debug = False -opt_os = None -topdir = None -top_name = None -glob_os_cascade = None -projs_root = expanduser("~") + '/local/src/jw.dev/proj' + self.debug("----------------------------------------- running ", ' '.join(sys.argv)) + + self.projs_root = args.prefix[0] + if len(args.topdir): + self.topdir = args.topdir[0] + + if self.topdir: + self.top_name = self.res_cache.run(self.read_value, [self.topdir + '/make/project.conf', 'build', 'name']) + if not self.top_name: + self.top_name = re.sub('-[0-9.-]*$', '', basename(realpath(self.topdir))) + + if __name__ == "__main__": + cmd_name = 'cmd_' + args.cmd.replace('-', '_') + cmd = getattr(self, cmd_name) + cmd(sys.argv[(len(self.global_args) + 1)::]) if __name__ == "__main__": - skip = 0 - for a in sys.argv[1::]: - global_args.append(a) - if a in [ '--prefix', '-p', '--topdir', '-t' ]: - skip = 1 - continue - if skip > 0: - skip = skip -1 - continue - if a[0] != '-': - break - - parser = argparse.ArgumentParser(description='Project metadata evaluation') - parser.add_argument('cmd', default='', help='Command') - parser.add_argument('--debug', '-d', action='store_true', - default=False, help='Output debug information to stderr') - parser.add_argument('--topdir', '-t', nargs=1, default = [], help='Project Path') - parser.add_argument('--prefix', '-p', nargs=1, default = [ projs_root ], help='Projects Path Prefix') - parser.add_argument('--os', '-O', nargs=1, default = [], help='Target operating system') - parser.add_argument('arg', nargs='*', help='Command arguments') - args = parser.parse_args(global_args) - - opt_debug = args.debug - if len(args.os): - opt_os = args.os[0] - - debug("----------------------------------------- running ", ' '.join(sys.argv)) - - projs_root = args.prefix[0] - if len(args.topdir): - topdir = args.topdir[0] - -if topdir: - top_name = res_cache.run(read_value, [topdir + '/make/project.conf', 'build', 'name']) - if not top_name: - top_name = re.sub('-[0-9.-]*$', '', basename(realpath(topdir))) - -if __name__ == "__main__": - cmd = getattr(sys.modules[__name__], 'cmd_' + args.cmd.replace('-', '_')) - cmd(sys.argv[(len(global_args) + 1)::]) + projects = Projects() + projects.run()