2019-06-02 12:28:13 +00:00
|
|
|
#!/usr/bin/python3 -u
|
2019-06-12 11:26:15 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
#
|
|
|
|
|
# This source code file is a merge of various build tools and a horrible mess.
|
|
|
|
|
#
|
2016-01-25 12:25:51 +00:00
|
|
|
|
2016-01-29 16:29:43 +00:00
|
|
|
from __future__ import print_function
|
2018-03-07 10:17:14 +00:00
|
|
|
import os
|
2016-01-25 12:25:51 +00:00
|
|
|
import sys
|
|
|
|
|
import argparse
|
2018-03-07 10:51:23 +00:00
|
|
|
import pwd
|
2019-11-11 21:32:36 +00:00
|
|
|
import time
|
2024-04-30 08:00:39 +00:00
|
|
|
import pathlib
|
2016-12-31 14:10:18 +00:00
|
|
|
from os.path import isfile
|
2016-02-11 12:51:36 +00:00
|
|
|
from os.path import isdir
|
2016-01-25 12:25:51 +00:00
|
|
|
from os.path import expanduser
|
2016-01-29 14:02:29 +00:00
|
|
|
from os.path import basename
|
|
|
|
|
from os.path import realpath
|
2016-12-31 14:10:18 +00:00
|
|
|
import subprocess
|
2016-01-25 12:25:51 +00:00
|
|
|
import re
|
2016-12-31 14:10:18 +00:00
|
|
|
import platform
|
2018-03-07 10:51:50 +00:00
|
|
|
import datetime
|
2016-01-25 12:25:51 +00:00
|
|
|
|
2017-06-26 09:33:53 +00:00
|
|
|
# meaning of pkg.requires.xxx variables
|
2016-09-16 13:16:50 +00:00
|
|
|
# build: needs to be built and installed before this can be built
|
|
|
|
|
# devel: needs to be installed before this-devel can be installed, i.e. before _other_ packages can be built against this
|
|
|
|
|
# run: needs to be installed before this-run can be installed, i.e. before this and other packages can run with this
|
|
|
|
|
|
2019-03-10 15:34:08 +00:00
|
|
|
# --------------------------------------------------------------------- Python 2 / 3 compatibility stuff
|
|
|
|
|
try:
|
|
|
|
|
basestring
|
|
|
|
|
except NameError:
|
|
|
|
|
basestring = str
|
|
|
|
|
|
2016-01-25 12:25:51 +00:00
|
|
|
# --------------------------------------------------------------------- helpers
|
2016-01-29 16:29:43 +00:00
|
|
|
|
2017-08-31 18:57:13 +00:00
|
|
|
class ResultCache(object):
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
|
|
def run(self, func, args):
|
|
|
|
|
d = self.__cache
|
|
|
|
|
depth = 0
|
|
|
|
|
keys = [ func.__name__ ] + args
|
|
|
|
|
l = len(keys)
|
|
|
|
|
for k in keys:
|
|
|
|
|
if k is None:
|
|
|
|
|
k = 'None'
|
|
|
|
|
else:
|
|
|
|
|
k = str(k)
|
|
|
|
|
depth += 1
|
2019-06-11 19:03:43 +00:00
|
|
|
#self.projects.debug('depth = ', depth, 'key = ', k, 'd = ', str(d))
|
2017-08-31 18:57:13 +00:00
|
|
|
if k in d:
|
|
|
|
|
if l == depth:
|
|
|
|
|
return d[k]
|
|
|
|
|
d = d[k]
|
|
|
|
|
continue
|
|
|
|
|
if l == depth:
|
|
|
|
|
r = func(*args)
|
|
|
|
|
d[k] = r
|
|
|
|
|
return r
|
|
|
|
|
d = d[k] = {}
|
|
|
|
|
#d = d[k]
|
|
|
|
|
raise Exception("cache algorithm failed for function", func.__name__, "in depth", depth)
|
|
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
# ----------------------------------------------------------------- class Projects
|
2017-08-09 13:06:18 +00:00
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
class Projects(object):
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
|
|
self.global_args = []
|
|
|
|
|
self.opt_os = None
|
|
|
|
|
self.top_name = None
|
|
|
|
|
self.glob_os_cascade = None
|
|
|
|
|
|
|
|
|
|
self.dep_cache = {}
|
|
|
|
|
self.my_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
|
self.opt_debug = False
|
|
|
|
|
self.res_cache = ResultCache()
|
|
|
|
|
self.topdir = None
|
|
|
|
|
self.projs_root = expanduser("~") + '/local/src/jw.dev/proj'
|
|
|
|
|
|
|
|
|
|
def debug(self, *objs):
|
|
|
|
|
if self.opt_debug:
|
2019-06-12 13:04:07 +00:00
|
|
|
print("DEBUG: ", *objs, file = sys.stderr)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def err(self, *objs):
|
2019-06-12 13:04:07 +00:00
|
|
|
print("ERR: ", *objs, file = sys.stderr)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
2019-06-12 13:11:19 +00:00
|
|
|
def find_proj_path_unused(name):
|
|
|
|
|
name = name.replace("dspider-", "")
|
|
|
|
|
search_path = [".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ]
|
|
|
|
|
for sub in search_path:
|
|
|
|
|
path = self.projs_root + "/" + sub + "/" + name
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
|
return os.path.abspath(path)
|
|
|
|
|
raise Exception("module " + name + " not found below " + self.projs_root)
|
|
|
|
|
|
|
|
|
|
def find_proj_path_cached_unused(name):
|
|
|
|
|
return self.res_cache.run(find_proj_path_unused, [ name ])
|
|
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
def proj_dir(self, name):
|
|
|
|
|
if name == self.top_name:
|
|
|
|
|
return self.topdir
|
|
|
|
|
return self.projs_root + '/' + name
|
|
|
|
|
|
|
|
|
|
def re_section(self, name):
|
|
|
|
|
return re.compile('[' + name + ']'
|
|
|
|
|
'.*?'
|
|
|
|
|
'(?=[)',
|
|
|
|
|
re.DOTALL)
|
|
|
|
|
|
|
|
|
|
def remove_duplicates(self, seq):
|
|
|
|
|
seen = set()
|
|
|
|
|
seen_add = seen.add
|
|
|
|
|
return [x for x in seq if not (x in seen or seen_add(x))]
|
|
|
|
|
|
|
|
|
|
def get_os(self, args = ""):
|
|
|
|
|
for d in [ self.projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]:
|
|
|
|
|
script = d + '/get-os.sh'
|
|
|
|
|
if isfile(script):
|
|
|
|
|
cmd = '/bin/bash ' + script
|
|
|
|
|
if args:
|
|
|
|
|
cmd = cmd + ' ' + args
|
|
|
|
|
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
|
|
|
|
(out, rr) = p.communicate()
|
|
|
|
|
if rr:
|
|
|
|
|
self.err("failed to run ", cmd)
|
|
|
|
|
continue
|
|
|
|
|
out = re.sub('\n', '', out.decode('utf-8'))
|
|
|
|
|
return out
|
|
|
|
|
return "linux"
|
|
|
|
|
|
|
|
|
|
# TODO: add support for customizing this in project.conf
|
|
|
|
|
def htdocs_dir(self, name):
|
|
|
|
|
pd = self.proj_dir(name)
|
|
|
|
|
for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name ]:
|
|
|
|
|
if isdir(r):
|
|
|
|
|
return r
|
2016-01-29 16:29:43 +00:00
|
|
|
return None
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def os_cascade(self):
|
|
|
|
|
if self.glob_os_cascade is not None:
|
2019-06-15 15:20:01 +00:00
|
|
|
return self.glob_os_cascade.copy()
|
2019-06-11 19:03:43 +00:00
|
|
|
r = [ 'os', platform.system().lower() ]
|
2019-06-12 13:04:07 +00:00
|
|
|
os = self.opt_os if self.opt_os is not None else self.res_cache.run(self.get_os, [])
|
2019-06-11 19:03:43 +00:00
|
|
|
name = re.sub('-.*', '', os)
|
|
|
|
|
series = os
|
|
|
|
|
while True:
|
|
|
|
|
n = re.sub('\.[0-9]+$', '', series)
|
|
|
|
|
if n == series:
|
2016-01-29 16:29:43 +00:00
|
|
|
break
|
2019-06-11 19:03:43 +00:00
|
|
|
r.append(n)
|
|
|
|
|
series = n
|
|
|
|
|
if not name in r:
|
|
|
|
|
r.append(name)
|
|
|
|
|
if not os in r:
|
|
|
|
|
r.append(os)
|
|
|
|
|
# e.g. os, linux, suse, suse-tumbleweed
|
|
|
|
|
#return [ 'os', platform.system().lower(), name, os ]
|
|
|
|
|
self.glob_os_cascade = r
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
def strip_module_from_spec(self, mod):
|
2019-12-08 10:39:55 +00:00
|
|
|
return re.sub(r'-dev$|-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip())
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def get_section(self, path, section):
|
|
|
|
|
r = ''
|
|
|
|
|
file = open(path)
|
2016-01-29 16:29:43 +00:00
|
|
|
pat = '[' + section + ']'
|
2019-06-11 19:03:43 +00:00
|
|
|
in_section = False
|
2016-01-29 16:29:43 +00:00
|
|
|
for line in file:
|
|
|
|
|
if (line.rstrip() == pat):
|
|
|
|
|
in_section = True
|
|
|
|
|
continue
|
|
|
|
|
if in_section:
|
|
|
|
|
if len(line) and line[0] == '[':
|
|
|
|
|
break
|
2019-06-11 19:03:43 +00:00
|
|
|
r = r + line
|
|
|
|
|
file.close()
|
|
|
|
|
return r.rstrip()
|
|
|
|
|
|
|
|
|
|
def read_value(self, path, section, key):
|
2019-06-18 07:37:15 +00:00
|
|
|
|
|
|
|
|
def scan_section(f, key):
|
|
|
|
|
if key is None:
|
|
|
|
|
r = ''
|
|
|
|
|
for line in f:
|
|
|
|
|
if len(line) and line[0] == '[':
|
|
|
|
|
break
|
|
|
|
|
r += line
|
|
|
|
|
return r if len(r) else None
|
2020-03-19 17:36:58 +00:00
|
|
|
lines = []
|
|
|
|
|
cont_line = ''
|
2019-06-18 07:37:15 +00:00
|
|
|
for line in f:
|
|
|
|
|
if len(line) and line[0] == '[':
|
2020-03-19 17:36:58 +00:00
|
|
|
break
|
|
|
|
|
cont_line += line.rstrip()
|
|
|
|
|
if len(cont_line) and cont_line[-1] == '\\':
|
|
|
|
|
cont_line = cont_line[0:-1]
|
|
|
|
|
continue
|
|
|
|
|
lines.append(cont_line)
|
|
|
|
|
cont_line = ''
|
|
|
|
|
for line in lines:
|
|
|
|
|
#self.debug(" looking for >%s< in line=>%s<" % (key, line))
|
2019-06-18 07:37:15 +00:00
|
|
|
rr = re.findall('^ *' + key + ' *= *(.*)', line)
|
|
|
|
|
if len(rr) > 0:
|
|
|
|
|
return rr[0]
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def scan_section_debug(f, key):
|
|
|
|
|
rr = scan_section(f, key)
|
|
|
|
|
#self.debug(" returning", rr)
|
|
|
|
|
return rr
|
|
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
try:
|
2019-06-18 07:37:15 +00:00
|
|
|
#self.debug("looking for {}::[{}].{}".format(path, section, key))
|
|
|
|
|
with open(path, 'r') as f:
|
|
|
|
|
if not len(section):
|
|
|
|
|
rr = scan_section(f, key)
|
|
|
|
|
pat = '[' + section + ']'
|
|
|
|
|
for line in f:
|
|
|
|
|
if line.rstrip() == pat:
|
|
|
|
|
return scan_section(f, key)
|
|
|
|
|
return None
|
2019-06-11 19:03:43 +00:00
|
|
|
except:
|
|
|
|
|
self.debug(path, "not found")
|
|
|
|
|
# TODO: handle this special case cleaner somewhere up the stack
|
|
|
|
|
if section == 'build' and key == 'libname':
|
|
|
|
|
return 'none'
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def get_value(self, name, section, key):
|
|
|
|
|
self.debug("getting value [%s].%s for project %s (%s)" %(section, key, name, self.top_name))
|
|
|
|
|
if self.top_name and name == self.top_name:
|
|
|
|
|
proj_root = self.topdir
|
2016-01-27 16:25:06 +00:00
|
|
|
else:
|
2019-06-11 19:03:43 +00:00
|
|
|
proj_root = self.projs_root + '/' + name
|
|
|
|
|
self.debug("proj_root = " + proj_root)
|
|
|
|
|
|
|
|
|
|
if section == 'version':
|
|
|
|
|
proj_version_dirs = [ proj_root ]
|
|
|
|
|
if proj_root != self.topdir:
|
|
|
|
|
proj_version_dirs.append('/usr/share/doc/packages/' + name)
|
|
|
|
|
for d in proj_version_dirs:
|
|
|
|
|
version_path = d + '/VERSION'
|
|
|
|
|
try:
|
|
|
|
|
with open(version_path) as fd:
|
|
|
|
|
r = fd.read().replace('\n', '').replace('-dev', '')
|
|
|
|
|
fd.close()
|
|
|
|
|
return r
|
|
|
|
|
except EnvironmentError:
|
|
|
|
|
self.debug("ignoring unreadable file " + version_path)
|
2019-06-11 11:45:20 +00:00
|
|
|
continue
|
2019-06-11 19:03:43 +00:00
|
|
|
raise Exception("No version file found for project \"" + name + "\"")
|
|
|
|
|
|
|
|
|
|
path = proj_root + '/make/project.conf'
|
|
|
|
|
#print('path = ', path, 'self.top_name = ', self.top_name, 'name = ', name)
|
|
|
|
|
return self.res_cache.run(self.read_value, [path, section, key])
|
|
|
|
|
|
|
|
|
|
def collect_values(self, names, section, key):
|
|
|
|
|
r = ""
|
|
|
|
|
for n in names:
|
|
|
|
|
val = self.get_value(n, section, key)
|
|
|
|
|
if val:
|
|
|
|
|
r = r + " " + val
|
|
|
|
|
return self.remove_duplicates([x.strip() for x in r.split(",")])
|
|
|
|
|
|
|
|
|
|
# scope 0: no children
|
|
|
|
|
# scope 1: children
|
|
|
|
|
# scope 2: recursive
|
|
|
|
|
|
|
|
|
|
def add_modules_from_project_txt_cached(self, buf, visited, spec, section, key, add_self, scope,
|
|
|
|
|
names_only):
|
|
|
|
|
return self.res_cache.run(self.add_modules_from_project_txt, [buf, visited, spec, section, key,
|
|
|
|
|
add_self, scope, names_only])
|
|
|
|
|
|
|
|
|
|
def add_modules_from_project_txt(self, buf, visited, spec, section, key, add_self, scope,
|
|
|
|
|
names_only):
|
|
|
|
|
name = self.strip_module_from_spec(spec)
|
|
|
|
|
if names_only:
|
|
|
|
|
spec = name
|
|
|
|
|
if spec in buf:
|
|
|
|
|
return
|
|
|
|
|
if spec in visited:
|
|
|
|
|
if add_self:
|
|
|
|
|
buf.append(spec)
|
|
|
|
|
return
|
|
|
|
|
visited.add(spec)
|
|
|
|
|
deps = self.get_value(name, section, key)
|
|
|
|
|
self.debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited)
|
|
|
|
|
if deps and scope > 0:
|
|
|
|
|
if scope == 1:
|
|
|
|
|
subscope = 0
|
|
|
|
|
else:
|
|
|
|
|
subscope = 2
|
|
|
|
|
deps = deps.split(',')
|
|
|
|
|
for dep in deps:
|
|
|
|
|
dep = dep.strip()
|
|
|
|
|
if not(len(dep)):
|
2018-03-15 10:13:17 +00:00
|
|
|
continue
|
2019-06-11 19:03:43 +00:00
|
|
|
self.add_modules_from_project_txt_cached(buf, visited, dep,
|
|
|
|
|
section, key, add_self=True, scope=subscope,
|
|
|
|
|
names_only=names_only)
|
|
|
|
|
if add_self:
|
|
|
|
|
buf.append(spec)
|
|
|
|
|
|
|
|
|
|
def get_modules_from_project_txt(self, names, sections, keys, add_self, scope,
|
|
|
|
|
names_only = True):
|
|
|
|
|
if isinstance(keys, basestring):
|
|
|
|
|
keys = [ keys ]
|
|
|
|
|
#r = set()
|
|
|
|
|
r = []
|
|
|
|
|
for section in sections:
|
|
|
|
|
for key in keys:
|
|
|
|
|
visited = set()
|
|
|
|
|
for name in names:
|
|
|
|
|
rr = []
|
|
|
|
|
self.add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope,
|
|
|
|
|
names_only)
|
|
|
|
|
# TODO: this looks like a performance hogger
|
|
|
|
|
for m in rr:
|
|
|
|
|
if not m in r:
|
|
|
|
|
r.append(m)
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
def pkg_relations(self, rel_type, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='pkg-' + rel_type)
|
|
|
|
|
# TODO: implement Vendor evaluation
|
|
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-S', '--subsections', nargs='?', default=None, help='Subsections to consider, comma-separated')
|
|
|
|
|
parser.add_argument('-d', '--delimiter', nargs='?', default=', ', help='Output words delimiter')
|
2019-06-11 19:03:43 +00:00
|
|
|
parser.add_argument('flavour', help='Flavour')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-p', '--no-subpackages', action='store_true',
|
2019-06-11 19:03:43 +00:00
|
|
|
default=False, help='Cut -run and -devel from package names')
|
|
|
|
|
parser.add_argument('--no-version', action='store_true',
|
|
|
|
|
default=False, help='Don\'t report version information')
|
|
|
|
|
parser.add_argument('--dont-strip-revision', action='store_true',
|
|
|
|
|
default=False, help='Always treat VERSION macro as VERSION-REVISION')
|
|
|
|
|
parser.add_argument('--recursive', action='store_true',
|
|
|
|
|
default=False, help='Find dependencies recursively')
|
|
|
|
|
parser.add_argument('--dont-expand-version-macros', action='store_true',
|
|
|
|
|
default=False, help='Don\'t expand VERSION and REVISION macros')
|
2019-06-21 12:52:53 +00:00
|
|
|
parser.add_argument('--ignore', nargs='?', default='', help='Packages that '
|
|
|
|
|
'should be ignored together with their dependencies')
|
2019-06-11 19:03:43 +00:00
|
|
|
args = parser.parse_args(args_)
|
|
|
|
|
version_pattern=re.compile("[0-9-.]*")
|
|
|
|
|
if args.subsections is None:
|
|
|
|
|
subsecs = self.os_cascade()
|
|
|
|
|
subsecs.append('jw')
|
|
|
|
|
else:
|
|
|
|
|
subsecs = args.subsections.split(',')
|
|
|
|
|
self.debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs))
|
2019-06-21 12:52:53 +00:00
|
|
|
ignore = args.ignore.split(',')
|
|
|
|
|
self.debug("ignore = ", ignore)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
r = []
|
|
|
|
|
flavours = args.flavour.split(',')
|
|
|
|
|
for flavour in flavours:
|
|
|
|
|
for s in subsecs:
|
|
|
|
|
section = 'pkg.' + rel_type + '.' + s
|
|
|
|
|
visited = set()
|
|
|
|
|
modules = args.module.copy()
|
|
|
|
|
while len(modules):
|
|
|
|
|
m = modules.pop(0)
|
2019-06-21 12:52:53 +00:00
|
|
|
if m in visited or m in ignore:
|
2019-06-11 19:03:43 +00:00
|
|
|
continue
|
|
|
|
|
visited.add(m)
|
|
|
|
|
value = self.get_value(m, section, flavour)
|
|
|
|
|
if not value:
|
|
|
|
|
continue
|
|
|
|
|
deps = value.split(',')
|
|
|
|
|
for spec in deps:
|
|
|
|
|
dep = re.split('([=><]+)', spec)
|
|
|
|
|
if args.no_version:
|
|
|
|
|
dep = dep[:1]
|
|
|
|
|
dep = list(map(str.strip, dep))
|
|
|
|
|
dep_name = re.sub('-dev$|-devel$|-run$', '', dep[0])
|
2019-06-21 12:52:53 +00:00
|
|
|
if dep_name in ignore or dep[0] in ignore:
|
|
|
|
|
continue
|
2019-06-11 19:03:43 +00:00
|
|
|
if args.no_subpackages:
|
|
|
|
|
dep[0] = dep_name
|
|
|
|
|
for i, item in enumerate(dep):
|
|
|
|
|
dep[i] = item.strip()
|
|
|
|
|
if s == 'jw':
|
|
|
|
|
if args.recursive and not dep_name in visited and not dep_name in modules:
|
|
|
|
|
modules.append(dep_name)
|
|
|
|
|
if len(dep) == 3:
|
|
|
|
|
if args.dont_expand_version_macros and dep_name in args.module:
|
|
|
|
|
version = dep[2]
|
|
|
|
|
else:
|
|
|
|
|
version = self.get_value(dep_name, 'version', '')
|
|
|
|
|
if dep[2] == 'VERSION':
|
|
|
|
|
if args.dont_strip_revision:
|
|
|
|
|
dep[2] = version
|
|
|
|
|
else:
|
|
|
|
|
dep[2] = version.split('-')[0]
|
|
|
|
|
elif dep[2] == 'VERSION-REVISION':
|
2019-06-11 11:45:20 +00:00
|
|
|
dep[2] = version
|
2019-06-11 19:03:43 +00:00
|
|
|
elif version_pattern.match(dep[2]):
|
|
|
|
|
# dep[2] = dep[2]
|
|
|
|
|
pass
|
2019-06-11 11:45:20 +00:00
|
|
|
else:
|
2019-06-11 19:03:43 +00:00
|
|
|
raise Exception("Unknown version specifier in " + spec)
|
|
|
|
|
cleaned_dep = ' '.join(dep)
|
|
|
|
|
if not cleaned_dep in r:
|
2019-06-21 12:52:53 +00:00
|
|
|
self.debug("appending", cleaned_dep)
|
2019-06-11 19:03:43 +00:00
|
|
|
r.append(cleaned_dep)
|
2019-06-15 15:20:01 +00:00
|
|
|
return args.delimiter.join(r)
|
|
|
|
|
|
|
|
|
|
def print_pkg_relations(self, rel_type, args_):
|
|
|
|
|
print(self.pkg_relations(rel_type, args_))
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def get_libname(self, names):
|
|
|
|
|
vals = self.get_modules_from_project_txt(names, ['build'], 'libname',
|
|
|
|
|
scope = 1, add_self=False, names_only=True)
|
|
|
|
|
if not vals:
|
|
|
|
|
return ' '.join(names)
|
|
|
|
|
if 'none' in vals:
|
|
|
|
|
vals.remove('none')
|
|
|
|
|
return ' '.join(reversed(vals))
|
|
|
|
|
|
|
|
|
|
def is_excluded_from_build(self, module):
|
|
|
|
|
self.debug("checking if module " + module + " is excluded from build")
|
|
|
|
|
exclude = self.get_modules_from_project_txt([ module ], ['build'], 'exclude',
|
2016-03-06 15:16:25 +00:00
|
|
|
scope = 1, add_self=False, names_only=True)
|
2019-06-11 19:03:43 +00:00
|
|
|
cascade = self.os_cascade() + [ 'all' ]
|
|
|
|
|
for p1 in exclude:
|
|
|
|
|
for p2 in cascade:
|
|
|
|
|
if p1 == p2:
|
|
|
|
|
return p1
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
# -L needs to contain more paths than libs linked with -l would require
|
|
|
|
|
def get_ldpathflags(self, names, exclude = []):
|
|
|
|
|
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in deps:
|
|
|
|
|
if m in exclude:
|
|
|
|
|
continue
|
|
|
|
|
libname = self.get_libname([m])
|
|
|
|
|
if len(libname):
|
|
|
|
|
r = r + ' -L' + self.proj_dir(m) + '/lib'
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def get_ldflags(self, names, exclude = [], add_self_ = False):
|
|
|
|
|
#print(names)
|
|
|
|
|
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
|
|
|
|
scope = 1, add_self=add_self_, names_only=True)
|
|
|
|
|
self.debug("deps = " + ' '.join(deps))
|
|
|
|
|
#print(deps)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in reversed(deps):
|
|
|
|
|
if m in exclude:
|
|
|
|
|
continue
|
|
|
|
|
libname = self.get_libname([m])
|
|
|
|
|
if len(libname):
|
|
|
|
|
#r = r + ' -L' + self.proj_dir(m) + '/lib -l' + libname
|
|
|
|
|
r = r + ' -l' + libname
|
|
|
|
|
if len(r):
|
|
|
|
|
ldpathflags = self.get_ldpathflags(names, exclude)
|
|
|
|
|
if ldpathflags:
|
|
|
|
|
r = ldpathflags + ' ' + r
|
|
|
|
|
return r[1::]
|
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
def commands(self):
|
|
|
|
|
f = open(sys.argv[0])
|
|
|
|
|
cmds = []
|
|
|
|
|
for line in f:
|
|
|
|
|
self.debug("checking line ", line)
|
2019-06-12 13:04:07 +00:00
|
|
|
rr = re.findall('^ def *cmd_([a-z0-9_]+).*', line)
|
2019-06-11 19:03:43 +00:00
|
|
|
if len(rr):
|
|
|
|
|
cmds.append(rr[0].replace('_', '-'))
|
|
|
|
|
f.close()
|
|
|
|
|
return ' '.join(cmds)
|
|
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------- commands
|
|
|
|
|
|
|
|
|
|
def cmd_commands(self, args_):
|
|
|
|
|
print(self.commands())
|
|
|
|
|
|
2024-04-30 08:00:39 +00:00
|
|
|
def cmd_modules(self, args_):
|
|
|
|
|
proj_root = self.projs_root
|
|
|
|
|
self.debug("proj_root = " + proj_root)
|
|
|
|
|
path = pathlib.Path(self.projs_root)
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='Query existing janware packages')
|
|
|
|
|
|
|
|
|
|
parser.add_argument('-F', '--filter', nargs='?', default=None, help='Key-value pairs, seperated by commas, to be searched for in project.conf')
|
|
|
|
|
args = parser.parse_args(args_)
|
|
|
|
|
modules = [p.parents[1].name for p in path.glob('*/make/project.conf')]
|
|
|
|
|
self.debug("modules = ", modules)
|
|
|
|
|
out = []
|
|
|
|
|
filters = None if args.filter is None else [re.split("=", f) for f in re.split(",", args.filter)]
|
|
|
|
|
for m in modules:
|
|
|
|
|
if filters:
|
|
|
|
|
for f in filters:
|
|
|
|
|
path = f[0].rsplit('.')
|
|
|
|
|
if len(path) > 1:
|
|
|
|
|
sec = path[0]
|
|
|
|
|
key = path[1]
|
|
|
|
|
else:
|
|
|
|
|
sec = None
|
|
|
|
|
key = path[0]
|
|
|
|
|
val = self.get_value(m, sec, key)
|
|
|
|
|
self.debug('Checking in {} if {}="{}", is "{}"'.format(m, f[0], f[1], val))
|
|
|
|
|
if val and val == f[1]:
|
|
|
|
|
out.append(m)
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
out.append(m)
|
|
|
|
|
print(' '.join(out))
|
|
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
def cmd_build(self, args_):
|
2019-06-12 11:26:15 +00:00
|
|
|
|
|
|
|
|
def read_deps(cur, prereq_type):
|
|
|
|
|
# dep cache doesn't make a difference at all
|
|
|
|
|
if prereq_type in self.dep_cache:
|
|
|
|
|
if cur in self.dep_cache[prereq_type]:
|
|
|
|
|
return self.dep_cache[prereq_type][cur]
|
|
|
|
|
else:
|
|
|
|
|
self.dep_cache[prereq_type] = {}
|
|
|
|
|
|
2019-06-12 11:28:20 +00:00
|
|
|
r = self.get_modules_from_project_txt([ cur ], ['pkg.requires.jw'],
|
|
|
|
|
prereq_type, scope = 2, add_self=False, names_only=True)
|
|
|
|
|
self.debug('prerequisites = ' + ' '.join(r))
|
2019-06-12 11:26:15 +00:00
|
|
|
if cur in r:
|
|
|
|
|
r.remove(cur)
|
|
|
|
|
self.debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r))
|
|
|
|
|
self.dep_cache[prereq_type][cur] = r
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
|
def read_deps_cached(cur, prereq_type):
|
|
|
|
|
return self.res_cache.run(read_deps, [ cur, prereq_type ])
|
|
|
|
|
|
|
|
|
|
def add_dep_tree(cur, prereq_types, tree, all_deps):
|
|
|
|
|
self.debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur)
|
|
|
|
|
if cur in all_deps:
|
|
|
|
|
self.debug('already handled module ' + cur)
|
|
|
|
|
return 0
|
|
|
|
|
deps = set()
|
|
|
|
|
all_deps.add(cur)
|
|
|
|
|
for t in prereq_types:
|
|
|
|
|
self.debug("checking prereqisites of type " + t)
|
|
|
|
|
deps.update(read_deps_cached(cur, t))
|
|
|
|
|
for d in deps:
|
|
|
|
|
add_dep_tree(d, prereq_types, tree, all_deps)
|
|
|
|
|
tree[cur] = deps
|
|
|
|
|
return len(deps)
|
|
|
|
|
|
|
|
|
|
def calculate_order(order, modules, prereq_types):
|
|
|
|
|
all_deps = set()
|
|
|
|
|
dep_tree = {}
|
|
|
|
|
for m in modules:
|
|
|
|
|
self.debug("--- adding dependency tree of module " + m)
|
|
|
|
|
add_dep_tree(m, prereq_types, dep_tree, all_deps)
|
|
|
|
|
while len(all_deps):
|
2023-12-18 13:00:45 +00:00
|
|
|
# Find any leaf
|
2019-06-12 11:26:15 +00:00
|
|
|
for d in all_deps:
|
2023-12-18 13:00:45 +00:00
|
|
|
if not len(dep_tree[d]): # Dependency d doesn't have dependencies itself
|
|
|
|
|
break # found
|
|
|
|
|
else: # no Leaf found
|
2019-06-12 11:26:15 +00:00
|
|
|
print(all_deps)
|
|
|
|
|
raise Exception("fatal: the dependencies between these modules are unresolvable")
|
2023-12-18 13:00:45 +00:00
|
|
|
order.append(d) # do it
|
|
|
|
|
# bookkeep it
|
2019-06-12 11:26:15 +00:00
|
|
|
all_deps.remove(d)
|
|
|
|
|
for k in dep_tree.keys():
|
|
|
|
|
if d in dep_tree[k]:
|
|
|
|
|
dep_tree[k].remove(d)
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
def run_make(module, target, cur_project, num_projects):
|
|
|
|
|
#make_cmd = "make " + target + " 2>&1"
|
|
|
|
|
make_cmd = [ "make", target ]
|
2019-06-12 13:11:19 +00:00
|
|
|
path = self.proj_dir(module)
|
2019-06-12 13:04:07 +00:00
|
|
|
delim_len = 120
|
|
|
|
|
delim = '---- [%d/%d]: running %s in %s -' % (cur_project, num_projects, make_cmd, path)
|
2019-06-12 11:26:15 +00:00
|
|
|
delim = delim + '-' * (delim_len - len(delim))
|
|
|
|
|
|
|
|
|
|
print(',' + delim + ' >')
|
|
|
|
|
|
|
|
|
|
patt = self.is_excluded_from_build(module)
|
|
|
|
|
if patt is not None:
|
|
|
|
|
print('| Configured to skip build on platform >' + patt + '<')
|
|
|
|
|
print('`' + delim + ' <')
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
os.chdir(path)
|
|
|
|
|
p = subprocess.Popen(make_cmd, shell=False, stdout=subprocess.PIPE, stderr=None, close_fds=True)
|
|
|
|
|
for line in iter(p.stdout.readline, b''):
|
|
|
|
|
line = line.decode(sys.stdout.encoding)
|
|
|
|
|
sys.stdout.write('| ' + line) # avoid extra newlines from print()
|
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
p.wait()
|
|
|
|
|
print('`' + delim + ' <')
|
|
|
|
|
if p.returncode:
|
2019-11-11 21:25:23 +00:00
|
|
|
print(' '.join(make_cmd) + ' failed')
|
2019-06-12 11:26:15 +00:00
|
|
|
raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + self.projs_root)
|
|
|
|
|
|
|
|
|
|
def run_make_on_modules(modules, order, target):
|
|
|
|
|
cur_project = 0
|
|
|
|
|
num_projects = len(order)
|
|
|
|
|
if target in ["clean", "distclean"]:
|
|
|
|
|
for m in reversed(order):
|
|
|
|
|
cur_project += 1
|
|
|
|
|
run_make(m, target, cur_project, num_projects)
|
|
|
|
|
if m in modules:
|
|
|
|
|
modules.remove(m)
|
|
|
|
|
if not len(modules):
|
|
|
|
|
print("all modules cleaned")
|
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
for m in order:
|
|
|
|
|
cur_project += 1
|
|
|
|
|
run_make(m, target, cur_project, num_projects)
|
|
|
|
|
|
|
|
|
|
def run(args_):
|
|
|
|
|
|
|
|
|
|
# -- parse command line
|
|
|
|
|
parser = argparse.ArgumentParser(description='janware software project build tool')
|
|
|
|
|
parser.add_argument('--exclude', default='', help='Space seperated ist of modules to be excluded from build')
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-n', '--dry-run', action='store_true',
|
2019-06-12 11:26:15 +00:00
|
|
|
default=False, help='Don\'t build anything, just print what would be done.')
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-O', '--build-order', action='store_true',
|
2019-06-12 11:26:15 +00:00
|
|
|
default=False, help='Don\'t build anything, just print the build order.')
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-I', '--ignore-deps', action='store_true',
|
2019-06-12 11:26:15 +00:00
|
|
|
default=False, help='Don\'t build dependencies, i.e. build only modules specified on the command line')
|
|
|
|
|
parser.add_argument('target', default='all', help='Build target')
|
|
|
|
|
parser.add_argument('modules', nargs='+', default='', help='Modules to be built')
|
|
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-12 11:26:15 +00:00
|
|
|
|
|
|
|
|
self.debug("----------------------------------------- running ", ' '.join(args_))
|
|
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
modules = args.modules
|
|
|
|
|
exclude = args.exclude.split()
|
|
|
|
|
target = args.target
|
2019-06-12 11:26:15 +00:00
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
env_exclude = os.getenv('BUILD_EXCLUDE', '')
|
2019-06-12 11:26:15 +00:00
|
|
|
if len(env_exclude):
|
|
|
|
|
print("exluding modules from environment: " + env_exclude)
|
|
|
|
|
exclude += " " + env_exclude
|
|
|
|
|
|
|
|
|
|
# -- build
|
|
|
|
|
order = []
|
|
|
|
|
|
|
|
|
|
glob_prereq_types = [ "build" ]
|
|
|
|
|
if re.match("pkg-.*", target) is not None:
|
|
|
|
|
glob_prereq_types = [ "build", "run", "release", "devel" ]
|
|
|
|
|
|
|
|
|
|
if target != 'order' and not args.build_order:
|
|
|
|
|
print("using prerequisite types " + ' '.join(glob_prereq_types))
|
|
|
|
|
print("calculating order for modules ... ")
|
|
|
|
|
|
|
|
|
|
calculate_order(order, modules, glob_prereq_types)
|
|
|
|
|
if args.ignore_deps:
|
|
|
|
|
order = [m for m in order if m in args.modules]
|
|
|
|
|
order = [m for m in order if m not in exclude]
|
|
|
|
|
if target == 'order' or args.build_order:
|
|
|
|
|
print(' '.join(order))
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
cur_project = 0
|
|
|
|
|
print("Building target %s in %d projects:" % (target, len(order)))
|
|
|
|
|
for m in order:
|
|
|
|
|
cur_project += 1
|
|
|
|
|
print(" %3d %s" % (cur_project, m))
|
|
|
|
|
|
|
|
|
|
if args.dry_run:
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
run_make_on_modules(modules, order, target)
|
|
|
|
|
|
|
|
|
|
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
|
|
|
|
|
|
|
run(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def cmd_test(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='Test')
|
|
|
|
|
parser.add_argument('blah', default='', help='The blah argument')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
print("blah = " + args.blah)
|
|
|
|
|
|
|
|
|
|
def cmd_os_cascade(self, args_):
|
|
|
|
|
print(' '.join(self.os_cascade()))
|
|
|
|
|
|
|
|
|
|
def cmd_ldlibpath(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='ldlibpath')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in deps:
|
|
|
|
|
r = r + ':' + self.proj_dir(m) + '/lib'
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def cmd_pythonpath(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='pythonpath')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ],
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in deps:
|
|
|
|
|
pdir = self.proj_dir(m)
|
|
|
|
|
for subdir in [ 'src/python', 'tools/python' ]:
|
|
|
|
|
cand = pdir + "/" + subdir
|
|
|
|
|
if isdir(cand):
|
|
|
|
|
r = r + ':' + cand
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def cmd_exepath(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='exepath')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
self.debug('deps = ', deps)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in deps:
|
|
|
|
|
r = r + ':' + self.proj_dir(m) + '/bin'
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def cmd_libname(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='libname')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
print(self.get_libname(args.module))
|
|
|
|
|
|
|
|
|
|
def cmd_ldflags(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='ldflags')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
|
|
|
parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[])
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-s', '--add-self', action='store_true',
|
2019-06-11 19:03:43 +00:00
|
|
|
default=False, help='Include libflags of specified modules, too, not only their dependencies')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
print(self.get_ldflags(args.module, args.exclude, args.add_self))
|
|
|
|
|
|
|
|
|
|
def cmd_cflags(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='cflags')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build',
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in reversed(deps):
|
|
|
|
|
r = r + ' -I' + self.proj_dir(m) + '/include'
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def cmd_path(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='path')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run',
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
r = ''
|
|
|
|
|
for m in deps:
|
|
|
|
|
r = r + ':' + self.proj_dir(m) + '/bin'
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
2019-06-15 15:20:01 +00:00
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / print_pkg_relations
|
2019-06-11 19:03:43 +00:00
|
|
|
def cmd_prereq(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='path')
|
|
|
|
|
parser.add_argument('flavour', help='Flavour')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
|
|
|
args = parser.parse_args(args_)
|
|
|
|
|
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'],
|
|
|
|
|
args.flavour, scope = 2, add_self=False, names_only=True)
|
|
|
|
|
print(' '.join(deps))
|
|
|
|
|
|
2019-06-15 15:20:01 +00:00
|
|
|
# TODO: seems at least partly redundant to cmd_pkg_requires / print_pkg_relations
|
2019-06-11 19:03:43 +00:00
|
|
|
def cmd_required_os_pkg(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='required-os-pkg')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
|
|
|
|
parser.add_argument('--flavours', help='Dependency flavours', default='build')
|
|
|
|
|
parser.add_argument('--skip-excluded', action='store_true', default=False,
|
|
|
|
|
help='Output empty prerequisite list if module is excluded')
|
|
|
|
|
args = parser.parse_args(args_)
|
|
|
|
|
modules = args.module
|
|
|
|
|
flavours = args.flavours.split()
|
2019-12-27 14:43:29 +00:00
|
|
|
if 'build' in flavours and not 'run' in flavours:
|
|
|
|
|
# TODO: This adds too much. Only the run dependencies of the build dependencies would be needed.
|
|
|
|
|
flavours.append('run')
|
2019-06-11 19:03:43 +00:00
|
|
|
self.debug("flavours = " + args.flavours)
|
|
|
|
|
deps = self.get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours,
|
|
|
|
|
scope = 2, add_self=True, names_only=True)
|
|
|
|
|
if args.skip_excluded:
|
|
|
|
|
for d in deps:
|
|
|
|
|
if self.is_excluded_from_build(d) is not None:
|
|
|
|
|
deps.remove(d)
|
|
|
|
|
subsecs = self.os_cascade()
|
|
|
|
|
self.debug("subsecs = ", subsecs)
|
|
|
|
|
requires = []
|
|
|
|
|
for s in subsecs:
|
|
|
|
|
for f in flavours:
|
|
|
|
|
vals = self.collect_values(deps, 'pkg.requires.' + s, f)
|
|
|
|
|
if vals:
|
|
|
|
|
requires = requires + vals
|
|
|
|
|
# TODO: add all not in build tree as -devel
|
|
|
|
|
r = ''
|
|
|
|
|
for m in requires:
|
|
|
|
|
r = r + ' ' + m
|
|
|
|
|
print(r[1:])
|
|
|
|
|
|
|
|
|
|
def cmd_pkg_requires(self, args_):
|
2019-06-15 15:20:01 +00:00
|
|
|
return self.print_pkg_relations("requires", args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def cmd_pkg_conflicts(self, args_):
|
2019-06-15 15:20:01 +00:00
|
|
|
return self.print_pkg_relations("conflicts", args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def cmd_pkg_provides(self, args_):
|
2019-06-15 15:20:01 +00:00
|
|
|
return self.print_pkg_relations("provides", args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
def cmd_proj_dir(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='proj-dir')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
r = []
|
|
|
|
|
for m in args.module:
|
|
|
|
|
r.append(self.proj_dir(m))
|
|
|
|
|
print(' '.join(r))
|
|
|
|
|
|
|
|
|
|
def cmd_htdocs_dir(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='htdocs-dir')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
r = []
|
|
|
|
|
for m in args.module:
|
|
|
|
|
r.append(self.htdocs_dir(m))
|
|
|
|
|
print(' '.join(r))
|
|
|
|
|
|
|
|
|
|
def cmd_summary(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='summary')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
r = []
|
|
|
|
|
for m in args.module:
|
|
|
|
|
summary = self.get_value(m, "summary", None)
|
|
|
|
|
if summary is not None:
|
|
|
|
|
r.append(summary)
|
|
|
|
|
print(' '.join(r))
|
|
|
|
|
|
|
|
|
|
def contains(self, small, big):
|
|
|
|
|
for i in xrange(len(big)-len(small)+1):
|
|
|
|
|
for j in xrange(len(small)):
|
|
|
|
|
if big[i+j] != small[j]:
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
return i, i+len(small)
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def read_dep_graph(self, modules, section, graph):
|
|
|
|
|
for m in modules:
|
|
|
|
|
if m in graph:
|
|
|
|
|
continue
|
|
|
|
|
deps = self.get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section,
|
|
|
|
|
scope = 1, add_self=False, names_only=True)
|
|
|
|
|
if not deps is None:
|
|
|
|
|
graph[m] = deps
|
|
|
|
|
for d in deps:
|
|
|
|
|
self.read_dep_graph([ d ], section, graph)
|
|
|
|
|
|
|
|
|
|
def flip_graph(self, graph):
|
|
|
|
|
r = {}
|
|
|
|
|
for m, deps in graph.items():
|
2016-03-06 15:16:25 +00:00
|
|
|
for d in deps:
|
2019-06-11 19:03:43 +00:00
|
|
|
if not d in r:
|
|
|
|
|
r[d] = set()
|
|
|
|
|
r[d].add(m)
|
|
|
|
|
return r
|
2016-03-06 15:16:25 +00:00
|
|
|
|
2019-06-11 19:03:43 +00:00
|
|
|
def check_circular_deps(self, module, section, graph, unvisited, temp, path):
|
|
|
|
|
if module in temp:
|
|
|
|
|
self.debug('found circular dependency at module', module)
|
|
|
|
|
return module
|
|
|
|
|
if not module in unvisited:
|
|
|
|
|
return None
|
|
|
|
|
temp.add(module)
|
|
|
|
|
if module in graph:
|
|
|
|
|
for m in graph[module]:
|
|
|
|
|
last = self.check_circular_deps(m, section, graph, unvisited, temp, path)
|
|
|
|
|
if last is not None:
|
|
|
|
|
path.insert(0, m)
|
|
|
|
|
return last
|
|
|
|
|
unvisited.remove(module)
|
|
|
|
|
temp.remove(module)
|
|
|
|
|
|
|
|
|
|
def cmd_check(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='check')
|
|
|
|
|
parser.add_argument('module', nargs='*', help='Modules')
|
2019-06-12 13:04:07 +00:00
|
|
|
parser.add_argument('-f', '--flavour', nargs='?', default = 'build')
|
|
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
graph = {}
|
|
|
|
|
path = []
|
|
|
|
|
self.read_dep_graph(args.module, args.flavour, graph)
|
|
|
|
|
unvisited = graph.keys()
|
|
|
|
|
temp = set()
|
2020-03-19 17:34:35 +00:00
|
|
|
while len(unvisited) != 0:
|
2019-06-11 19:03:43 +00:00
|
|
|
m = unvisited[0]
|
|
|
|
|
self.debug('checking circular dependency of', m)
|
|
|
|
|
last = self.check_circular_deps(m, args.flavour, self.flip_graph(graph), unvisited, temp, path)
|
2016-03-06 15:16:25 +00:00
|
|
|
if last is not None:
|
2019-06-11 19:03:43 +00:00
|
|
|
self.debug('found circular dependency below', m, ', last is', last)
|
|
|
|
|
print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path))
|
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
print('no circular dependency found for flavour', args.flavour, ' in modules:',
|
|
|
|
|
' '.join(args.module))
|
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
def cmd_getval(self, args_):
|
|
|
|
|
parser = argparse.ArgumentParser(description='Get value from project config')
|
|
|
|
|
parser.add_argument('--project', default = self.top_name, help = 'Project name')
|
|
|
|
|
parser.add_argument('section', default = '', help = 'Config section')
|
|
|
|
|
parser.add_argument('key', default = '', help = 'Config key')
|
2019-06-12 13:04:07 +00:00
|
|
|
args = parser.parse_args(args_)
|
2019-06-11 19:03:43 +00:00
|
|
|
print(self.get_value(args.project, args.section, args.key))
|
|
|
|
|
|
|
|
|
|
# -------------------------------------------------------------------- here we go
|
|
|
|
|
def run(self):
|
2019-06-10 13:55:55 +00:00
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
skip = 0
|
|
|
|
|
for a in sys.argv[1::]:
|
|
|
|
|
self.global_args.append(a)
|
|
|
|
|
if a in [ '-p', '--prefix', '-t', '--topdir', '-O', '--os' ]:
|
|
|
|
|
skip = 1
|
|
|
|
|
continue
|
|
|
|
|
if skip > 0:
|
|
|
|
|
skip = skip -1
|
|
|
|
|
continue
|
|
|
|
|
if a[0] != '-':
|
|
|
|
|
break
|
|
|
|
|
|
2019-06-15 10:01:33 +00:00
|
|
|
# -- defaults
|
|
|
|
|
self.projs_root = pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj"
|
|
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
parser = argparse.ArgumentParser(description='Project metadata evaluation')
|
|
|
|
|
parser.add_argument('-d', '--debug', action='store_true',
|
|
|
|
|
default=False, help='Output debug information to stderr')
|
|
|
|
|
parser.add_argument('-t', '--topdir', nargs=1, default = [], help='Project Path')
|
|
|
|
|
parser.add_argument('-p', '--prefix', nargs=1, default = [ self.projs_root ], help='Projects Path Prefix')
|
|
|
|
|
parser.add_argument('-O', '--os', nargs=1, default = [], help='Target operating system')
|
|
|
|
|
parser.add_argument('cmd', default='', help='Command, one of: ' + self.commands())
|
|
|
|
|
parser.add_argument('arg', nargs='*', help='Command arguments')
|
|
|
|
|
args = parser.parse_args(self.global_args)
|
2019-06-11 19:03:43 +00:00
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
self.opt_debug = args.debug
|
|
|
|
|
if len(args.os):
|
|
|
|
|
self.opt_os = args.os[0]
|
2019-06-11 19:03:43 +00:00
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
self.debug("----------------------------------------- running ", ' '.join(sys.argv))
|
2019-06-11 19:03:43 +00:00
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
self.projs_root = args.prefix[0]
|
2019-06-15 10:01:33 +00:00
|
|
|
self.debug("projs_root = ", self.projs_root)
|
2019-06-12 13:04:07 +00:00
|
|
|
if len(args.topdir):
|
|
|
|
|
self.topdir = args.topdir[0]
|
2019-06-11 19:03:43 +00:00
|
|
|
|
|
|
|
|
if self.topdir:
|
|
|
|
|
self.top_name = self.res_cache.run(self.read_value, [self.topdir + '/make/project.conf', 'build', 'name'])
|
|
|
|
|
if not self.top_name:
|
|
|
|
|
self.top_name = re.sub('-[0-9.-]*$', '', basename(realpath(self.topdir)))
|
|
|
|
|
|
2019-06-12 13:04:07 +00:00
|
|
|
cmd_name = 'cmd_' + args.cmd.replace('-', '_')
|
|
|
|
|
cmd = getattr(self, cmd_name)
|
|
|
|
|
cmd(sys.argv[(len(self.global_args) + 1)::])
|
2016-01-29 16:29:43 +00:00
|
|
|
|
2019-06-10 13:55:55 +00:00
|
|
|
if __name__ == "__main__":
|
2019-06-11 19:03:43 +00:00
|
|
|
projects = Projects()
|
|
|
|
|
projects.run()
|