mirror of
ssh://git.janware.com/srv/git/janware/proj/jw-pkg
synced 2026-01-15 03:53:32 +01:00
projects.py: Introduce class Projects
The Projects class wraps all global variables. This is mostly a text-replace job and results in horrible class design. Signed-off-by: Jan Lindemann <jan@janware.com>
This commit is contained in:
parent
187a3343c3
commit
7d12ad7f8c
1 changed files with 663 additions and 651 deletions
|
|
@ -44,7 +44,7 @@ class ResultCache(object):
|
|||
else:
|
||||
k = str(k)
|
||||
depth += 1
|
||||
#debug('depth = ', depth, 'key = ', k, 'd = ', str(d))
|
||||
#self.projects.debug('depth = ', depth, 'key = ', k, 'd = ', str(d))
|
||||
if k in d:
|
||||
if l == depth:
|
||||
return d[k]
|
||||
|
|
@ -58,39 +58,42 @@ class ResultCache(object):
|
|||
#d = d[k]
|
||||
raise Exception("cache algorithm failed for function", func.__name__, "in depth", depth)
|
||||
|
||||
# ----------------------------------------------------------------- class Build
|
||||
|
||||
class Build(object):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, projects):
|
||||
self.projects = projects
|
||||
pass
|
||||
|
||||
def find_proj_path(self, name):
|
||||
name=name.replace("dspider-", "")
|
||||
search_path=[".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ]
|
||||
name = name.replace("dspider-", "")
|
||||
search_path = [".", "dspc/src", "dspc/src/dspcd-plugins", "dspc/src/io" ]
|
||||
for sub in search_path:
|
||||
path=projs_root + "/" + sub + "/" + name
|
||||
path = self.projects.projs_root + "/" + sub + "/" + name
|
||||
if os.path.exists(path):
|
||||
return os.path.abspath(path)
|
||||
raise Exception("module " + name + " not found below " + projs_root)
|
||||
raise Exception("module " + name + " not found below " + self.projects.projs_root)
|
||||
|
||||
def find_proj_path_cached(self, name):
|
||||
return res_cache.run(self.find_proj_path, [ name ])
|
||||
return self.projects.res_cache.run(self.find_proj_path, [ name ])
|
||||
|
||||
def read_deps(self, cur, prereq_type):
|
||||
# dep cache doesn't make a difference at all
|
||||
if prereq_type in dep_cache:
|
||||
if cur in dep_cache[prereq_type]:
|
||||
return dep_cache[prereq_type][cur]
|
||||
if prereq_type in self.projects.dep_cache:
|
||||
if cur in self.projects.dep_cache[prereq_type]:
|
||||
return self.projects.dep_cache[prereq_type][cur]
|
||||
else:
|
||||
dep_cache[prereq_type] = {}
|
||||
self.projects.dep_cache[prereq_type] = {}
|
||||
|
||||
if True:
|
||||
r = get_modules_from_project_txt([ cur ], ['pkg.requires.jw'],
|
||||
r = self.projects.get_modules_from_project_txt([ cur ], ['pkg.requires.jw'],
|
||||
prereq_type, scope = 2, add_self=False, names_only=True)
|
||||
debug('prerequisites = ' + ' '.join(r))
|
||||
self.projects.debug('prerequisites = ' + ' '.join(r))
|
||||
else: # legacy from build.py
|
||||
projects_py = sys.executable + " " + my_dir + "/projects.py --prefix " + projs_root + " " + os.getenv('PROJECTS_PY_EXTRA_ARGS', "")
|
||||
projects_py = sys.executable + " " + self.projects.my_dir + "/projects.py --prefix " + self.projects.projs_root + " " + os.getenv('PROJECTS_PY_EXTRA_ARGS', "")
|
||||
cmd = projects_py + " prereq " + prereq_type + " " + cur
|
||||
debug('running', cmd)
|
||||
self.projects.debug('running', cmd)
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
p.wait()
|
||||
if p.returncode:
|
||||
|
|
@ -99,30 +102,30 @@ class Build(object):
|
|||
pattern = re.compile(r'.*') # might be useful at a later point, currently pointless
|
||||
for line in iter(p.stdout.readline, b''):
|
||||
line = line.decode(sys.stdout.encoding)
|
||||
debug(cmd + ' returned: ', line)
|
||||
self.projects.debug(cmd + ' returned: ', line)
|
||||
if not pattern.match(line):
|
||||
continue
|
||||
for d in line.split():
|
||||
r.add(d)
|
||||
if cur in r:
|
||||
r.remove(cur)
|
||||
debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r))
|
||||
dep_cache[prereq_type][cur] = r
|
||||
self.projects.debug('inserting', prereq_type, "prerequisites of", cur, ":", ' '.join(r))
|
||||
self.projects.dep_cache[prereq_type][cur] = r
|
||||
return r
|
||||
|
||||
def read_deps_cached(self, cur, prereq_type):
|
||||
return res_cache.run(self.read_deps, [ cur, prereq_type ])
|
||||
return self.projects.res_cache.run(self.read_deps, [ cur, prereq_type ])
|
||||
|
||||
def add_dep_tree(self, cur, prereq_types, tree, all_deps):
|
||||
debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur)
|
||||
self.projects.debug("adding prerequisites " + ' '.join(prereq_types) + " of module " + cur)
|
||||
if cur in all_deps:
|
||||
debug('already handled module ' + cur)
|
||||
self.projects.debug('already handled module ' + cur)
|
||||
return 0
|
||||
|
||||
deps = set()
|
||||
all_deps.add(cur)
|
||||
for t in prereq_types:
|
||||
debug("checking prereqisites of type " + t)
|
||||
self.projects.debug("checking prereqisites of type " + t)
|
||||
deps.update(self.read_deps_cached(cur, t))
|
||||
for d in deps:
|
||||
self.add_dep_tree(d, prereq_types, tree, all_deps)
|
||||
|
|
@ -133,7 +136,7 @@ class Build(object):
|
|||
all_deps = set()
|
||||
dep_tree = {}
|
||||
for m in modules:
|
||||
debug("--- adding dependency tree of module " + m)
|
||||
self.projects.debug("--- adding dependency tree of module " + m)
|
||||
self.add_dep_tree(m, prereq_types, dep_tree, all_deps)
|
||||
while len(all_deps):
|
||||
for d in all_deps:
|
||||
|
|
@ -159,7 +162,7 @@ class Build(object):
|
|||
|
||||
print(',' + delim + ' >')
|
||||
|
||||
patt = is_excluded_from_build(module)
|
||||
patt = self.projects.is_excluded_from_build(module)
|
||||
if patt is not None:
|
||||
print('| Configured to skip build on platform >' + patt + '<')
|
||||
print('`' + delim + ' <')
|
||||
|
|
@ -175,7 +178,7 @@ class Build(object):
|
|||
print('`' + delim + ' <')
|
||||
if p.returncode:
|
||||
print(make_cmd + ' failed')
|
||||
raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + projs_root)
|
||||
raise Exception(time.strftime("%Y-%m-%d %H:%M") + ": failed to make target " + target + " in module " + module + " below base " + self.projects.projs_root)
|
||||
|
||||
def run_make_on_modules(self, modules, order, target):
|
||||
cur_project = 0
|
||||
|
|
@ -195,10 +198,9 @@ class Build(object):
|
|||
self.run_make(m, target, cur_project, num_projects)
|
||||
|
||||
def run(self, args_):
|
||||
global opt_debug
|
||||
visited = {}
|
||||
glob_order = []
|
||||
projs_root=pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj"
|
||||
self.projects.projs_root = pwd.getpwuid(os.getuid()).pw_dir + "/local/src/jw.dev/proj"
|
||||
|
||||
# -- parse command line
|
||||
parser = argparse.ArgumentParser(description='janware software project build tool')
|
||||
|
|
@ -216,9 +218,9 @@ class Build(object):
|
|||
|
||||
args=parser.parse_args(args_)
|
||||
if args.debug:
|
||||
opt_debug = True
|
||||
self.projects.opt_debug = True
|
||||
|
||||
debug("----------------------------------------- running ", ' '.join(args_))
|
||||
self.projects.debug("----------------------------------------- running ", ' '.join(args_))
|
||||
|
||||
modules=args.modules
|
||||
exclude=args.exclude.split()
|
||||
|
|
@ -261,31 +263,49 @@ class Build(object):
|
|||
|
||||
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
||||
|
||||
def debug(*objs):
|
||||
if opt_debug:
|
||||
# ----------------------------------------------------------------- class Projects
|
||||
|
||||
class Projects(object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.global_args = []
|
||||
self.opt_os = None
|
||||
self.top_name = None
|
||||
self.glob_os_cascade = None
|
||||
|
||||
self.dep_cache = {}
|
||||
self.my_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
self.opt_debug = False
|
||||
self.res_cache = ResultCache()
|
||||
self.topdir = None
|
||||
self.projs_root = expanduser("~") + '/local/src/jw.dev/proj'
|
||||
|
||||
def debug(self, *objs):
|
||||
if self.opt_debug:
|
||||
print("DEBUG: ", *objs, file=sys.stderr)
|
||||
|
||||
def err(*objs):
|
||||
def err(self, *objs):
|
||||
print("ERR: ", *objs, file=sys.stderr)
|
||||
|
||||
def proj_dir(name):
|
||||
if name == top_name:
|
||||
return topdir
|
||||
return projs_root + '/' + name
|
||||
def proj_dir(self, name):
|
||||
if name == self.top_name:
|
||||
return self.topdir
|
||||
return self.projs_root + '/' + name
|
||||
|
||||
def re_section(name):
|
||||
def re_section(self, name):
|
||||
return re.compile('[' + name + ']'
|
||||
'.*?'
|
||||
'(?=[)',
|
||||
re.DOTALL)
|
||||
|
||||
def remove_duplicates(seq):
|
||||
def remove_duplicates(self, seq):
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
return [x for x in seq if not (x in seen or seen_add(x))]
|
||||
|
||||
def get_os(args = ""):
|
||||
for d in [ projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]:
|
||||
def get_os(self, args = ""):
|
||||
for d in [ self.projs_root + '/jw-build/scripts', '/opt/jw-build/bin' ]:
|
||||
script = d + '/get-os.sh'
|
||||
if isfile(script):
|
||||
cmd = '/bin/bash ' + script
|
||||
|
|
@ -294,27 +314,25 @@ def get_os(args = ""):
|
|||
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
(out, rr) = p.communicate()
|
||||
if rr:
|
||||
err("failed to run ", cmd)
|
||||
self.err("failed to run ", cmd)
|
||||
continue
|
||||
out = re.sub('\n', '', out.decode('utf-8'))
|
||||
return out
|
||||
return "linux"
|
||||
|
||||
# TODO: add support for customizing this in project.conf
|
||||
def htdocs_dir(name):
|
||||
pd = proj_dir(name)
|
||||
for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name
|
||||
]:
|
||||
# TODO: add support for customizing this in project.conf
|
||||
def htdocs_dir(self, name):
|
||||
pd = self.proj_dir(name)
|
||||
for r in [ pd + "/tools/html/htdocs", pd + "/htdocs", "/srv/www/proj/" + name ]:
|
||||
if isdir(r):
|
||||
return r
|
||||
return None
|
||||
|
||||
def os_cascade():
|
||||
global glob_os_cascade
|
||||
if glob_os_cascade is not None:
|
||||
return glob_os_cascade
|
||||
def os_cascade(self):
|
||||
if self.glob_os_cascade is not None:
|
||||
return self.glob_os_cascade
|
||||
r = [ 'os', platform.system().lower() ]
|
||||
os = res_cache.run(get_os, [])
|
||||
os = self.res_cache.run(self.get_os, [])
|
||||
name = re.sub('-.*', '', os)
|
||||
series = os
|
||||
while True:
|
||||
|
|
@ -329,13 +347,13 @@ def os_cascade():
|
|||
r.append(os)
|
||||
# e.g. os, linux, suse, suse-tumbleweed
|
||||
#return [ 'os', platform.system().lower(), name, os ]
|
||||
glob_os_cascade = r
|
||||
self.glob_os_cascade = r
|
||||
return r
|
||||
|
||||
def strip_module_from_spec(mod):
|
||||
def strip_module_from_spec(self, mod):
|
||||
return re.sub(r'-devel$|-run$', '', re.split('([=><]+)', mod)[0].strip())
|
||||
|
||||
def get_section(path, section):
|
||||
def get_section(self, path, section):
|
||||
r = ''
|
||||
file = open(path)
|
||||
pat = '[' + section + ']'
|
||||
|
|
@ -351,12 +369,12 @@ def get_section(path, section):
|
|||
file.close()
|
||||
return r.rstrip()
|
||||
|
||||
def read_value(path, section, key):
|
||||
debug("opening ", path)
|
||||
def read_value(self, path, section, key):
|
||||
self.debug("opening ", path)
|
||||
try:
|
||||
file = open(path)
|
||||
except:
|
||||
debug(path, "not found")
|
||||
self.debug(path, "not found")
|
||||
# TODO: handle this special case cleaner somewhere up the stack
|
||||
if section == 'build' and key == 'libname':
|
||||
return 'none'
|
||||
|
|
@ -381,7 +399,7 @@ def read_value(path, section, key):
|
|||
r.append(line)
|
||||
else:
|
||||
r = re.findall('^ *' + key + ' *= *(.*)', line)
|
||||
#debug("key " + key + ": parsed line >" + line + "<, result is " + ' '.join(r))
|
||||
#self.debug("key " + key + ": parsed line >" + line + "<, result is " + ' '.join(r))
|
||||
if (len(r) > 0):
|
||||
break
|
||||
file.close()
|
||||
|
|
@ -390,17 +408,17 @@ def read_value(path, section, key):
|
|||
return r[0]
|
||||
return None
|
||||
|
||||
def get_value(name, section, key):
|
||||
debug("getting value [%s].%s for project %s (%s)" %(section, key, name, top_name))
|
||||
if top_name and name == top_name:
|
||||
proj_root = topdir
|
||||
def get_value(self, name, section, key):
|
||||
self.debug("getting value [%s].%s for project %s (%s)" %(section, key, name, self.top_name))
|
||||
if self.top_name and name == self.top_name:
|
||||
proj_root = self.topdir
|
||||
else:
|
||||
proj_root = projs_root + '/' + name
|
||||
debug("proj_root = " + proj_root)
|
||||
proj_root = self.projs_root + '/' + name
|
||||
self.debug("proj_root = " + proj_root)
|
||||
|
||||
if section == 'version':
|
||||
proj_version_dirs = [ proj_root ]
|
||||
if proj_root != topdir:
|
||||
if proj_root != self.topdir:
|
||||
proj_version_dirs.append('/usr/share/doc/packages/' + name)
|
||||
for d in proj_version_dirs:
|
||||
version_path = d + '/VERSION'
|
||||
|
|
@ -410,34 +428,34 @@ def get_value(name, section, key):
|
|||
fd.close()
|
||||
return r
|
||||
except EnvironmentError:
|
||||
debug("ignoring unreadable file " + version_path)
|
||||
self.debug("ignoring unreadable file " + version_path)
|
||||
continue
|
||||
raise Exception("No version file found for project \"" + name + "\"")
|
||||
|
||||
path = proj_root + '/make/project.conf'
|
||||
#print('path = ', path, 'top_name = ', top_name, 'name = ', name)
|
||||
return res_cache.run(read_value, [path, section, key])
|
||||
#print('path = ', path, 'self.top_name = ', self.top_name, 'name = ', name)
|
||||
return self.res_cache.run(self.read_value, [path, section, key])
|
||||
|
||||
def collect_values(names, section, key):
|
||||
def collect_values(self, names, section, key):
|
||||
r = ""
|
||||
for n in names:
|
||||
val = get_value(n, section, key)
|
||||
val = self.get_value(n, section, key)
|
||||
if val:
|
||||
r = r + " " + val
|
||||
return remove_duplicates([x.strip() for x in r.split(",")])
|
||||
return self.remove_duplicates([x.strip() for x in r.split(",")])
|
||||
|
||||
# scope 0: no children
|
||||
# scope 1: children
|
||||
# scope 2: recursive
|
||||
# scope 0: no children
|
||||
# scope 1: children
|
||||
# scope 2: recursive
|
||||
|
||||
def add_modules_from_project_txt_cached(buf, visited, spec, section, key, add_self, scope,
|
||||
def add_modules_from_project_txt_cached(self, buf, visited, spec, section, key, add_self, scope,
|
||||
names_only):
|
||||
return res_cache.run(add_modules_from_project_txt, [buf, visited, spec, section, key,
|
||||
return self.res_cache.run(self.add_modules_from_project_txt, [buf, visited, spec, section, key,
|
||||
add_self, scope, names_only])
|
||||
|
||||
def add_modules_from_project_txt(buf, visited, spec, section, key, add_self, scope,
|
||||
def add_modules_from_project_txt(self, buf, visited, spec, section, key, add_self, scope,
|
||||
names_only):
|
||||
name = strip_module_from_spec(spec)
|
||||
name = self.strip_module_from_spec(spec)
|
||||
if names_only:
|
||||
spec = name
|
||||
if spec in buf:
|
||||
|
|
@ -447,8 +465,8 @@ def add_modules_from_project_txt(buf, visited, spec, section, key, add_self, sco
|
|||
buf.append(spec)
|
||||
return
|
||||
visited.add(spec)
|
||||
deps = get_value(name, section, key)
|
||||
debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited)
|
||||
deps = self.get_value(name, section, key)
|
||||
self.debug("name = ", name, "section = ", section, "key = ", key, "deps = ", deps, "scope = ", scope, "visited = ", visited)
|
||||
if deps and scope > 0:
|
||||
if scope == 1:
|
||||
subscope = 0
|
||||
|
|
@ -459,13 +477,13 @@ def add_modules_from_project_txt(buf, visited, spec, section, key, add_self, sco
|
|||
dep = dep.strip()
|
||||
if not(len(dep)):
|
||||
continue
|
||||
add_modules_from_project_txt_cached(buf, visited, dep,
|
||||
self.add_modules_from_project_txt_cached(buf, visited, dep,
|
||||
section, key, add_self=True, scope=subscope,
|
||||
names_only=names_only)
|
||||
if add_self:
|
||||
buf.append(spec)
|
||||
|
||||
def get_modules_from_project_txt(names, sections, keys, add_self, scope,
|
||||
def get_modules_from_project_txt(self, names, sections, keys, add_self, scope,
|
||||
names_only = True):
|
||||
if isinstance(keys, basestring):
|
||||
keys = [ keys ]
|
||||
|
|
@ -476,7 +494,7 @@ def get_modules_from_project_txt(names, sections, keys, add_self, scope,
|
|||
visited = set()
|
||||
for name in names:
|
||||
rr = []
|
||||
add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope,
|
||||
self.add_modules_from_project_txt_cached(rr, visited, name, section, key, add_self, scope,
|
||||
names_only)
|
||||
# TODO: this looks like a performance hogger
|
||||
for m in rr:
|
||||
|
|
@ -484,7 +502,7 @@ def get_modules_from_project_txt(names, sections, keys, add_self, scope,
|
|||
r.append(m)
|
||||
return r
|
||||
|
||||
def pkg_relations(rel_type, args_):
|
||||
def pkg_relations(self, rel_type, args_):
|
||||
parser = argparse.ArgumentParser(description='pkg-' + rel_type)
|
||||
# TODO: implement Vendor evaluation
|
||||
|
||||
|
|
@ -505,11 +523,11 @@ def pkg_relations(rel_type, args_):
|
|||
args = parser.parse_args(args_)
|
||||
version_pattern=re.compile("[0-9-.]*")
|
||||
if args.subsections is None:
|
||||
subsecs = os_cascade()
|
||||
subsecs = self.os_cascade()
|
||||
subsecs.append('jw')
|
||||
else:
|
||||
subsecs = args.subsections.split(',')
|
||||
debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs))
|
||||
self.debug('flavour = ', args.flavour, ', subsecs = ', ' '.join(subsecs))
|
||||
|
||||
r = []
|
||||
flavours = args.flavour.split(',')
|
||||
|
|
@ -523,7 +541,7 @@ def pkg_relations(rel_type, args_):
|
|||
if m in visited:
|
||||
continue
|
||||
visited.add(m)
|
||||
value = get_value(m, section, flavour)
|
||||
value = self.get_value(m, section, flavour)
|
||||
if not value:
|
||||
continue
|
||||
deps = value.split(',')
|
||||
|
|
@ -544,7 +562,7 @@ def pkg_relations(rel_type, args_):
|
|||
if args.dont_expand_version_macros and dep_name in args.module:
|
||||
version = dep[2]
|
||||
else:
|
||||
version = get_value(dep_name, 'version', '')
|
||||
version = self.get_value(dep_name, 'version', '')
|
||||
if dep[2] == 'VERSION':
|
||||
if args.dont_strip_revision:
|
||||
dep[2] = version
|
||||
|
|
@ -562,8 +580,8 @@ def pkg_relations(rel_type, args_):
|
|||
r.append(cleaned_dep)
|
||||
print(args.delimiter.join(r))
|
||||
|
||||
def get_libname(names):
|
||||
vals = get_modules_from_project_txt(names, ['build'], 'libname',
|
||||
def get_libname(self, names):
|
||||
vals = self.get_modules_from_project_txt(names, ['build'], 'libname',
|
||||
scope = 1, add_self=False, names_only=True)
|
||||
if not vals:
|
||||
return ' '.join(names)
|
||||
|
|
@ -571,167 +589,167 @@ def get_libname(names):
|
|||
vals.remove('none')
|
||||
return ' '.join(reversed(vals))
|
||||
|
||||
def is_excluded_from_build(module):
|
||||
debug("checking if module " + module + " is excluded from build")
|
||||
exclude = get_modules_from_project_txt([ module ], ['build'], 'exclude',
|
||||
def is_excluded_from_build(self, module):
|
||||
self.debug("checking if module " + module + " is excluded from build")
|
||||
exclude = self.get_modules_from_project_txt([ module ], ['build'], 'exclude',
|
||||
scope = 1, add_self=False, names_only=True)
|
||||
cascade = os_cascade() + [ 'all' ]
|
||||
cascade = self.os_cascade() + [ 'all' ]
|
||||
for p1 in exclude:
|
||||
for p2 in cascade:
|
||||
if p1 == p2:
|
||||
return p1
|
||||
return None
|
||||
|
||||
# -L needs to contain more paths than libs linked with -l would require
|
||||
def get_ldpathflags(names, exclude = []):
|
||||
deps = get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
||||
# -L needs to contain more paths than libs linked with -l would require
|
||||
def get_ldpathflags(self, names, exclude = []):
|
||||
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
r = ''
|
||||
for m in deps:
|
||||
if m in exclude:
|
||||
continue
|
||||
libname = get_libname([m])
|
||||
libname = self.get_libname([m])
|
||||
if len(libname):
|
||||
r = r + ' -L' + proj_dir(m) + '/lib'
|
||||
r = r + ' -L' + self.proj_dir(m) + '/lib'
|
||||
print(r[1:])
|
||||
|
||||
def get_ldflags(names, exclude = [], add_self_ = False):
|
||||
def get_ldflags(self, names, exclude = [], add_self_ = False):
|
||||
#print(names)
|
||||
deps = get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
||||
deps = self.get_modules_from_project_txt(names, ['pkg.requires.jw'], 'build',
|
||||
scope = 1, add_self=add_self_, names_only=True)
|
||||
debug("deps = " + ' '.join(deps))
|
||||
self.debug("deps = " + ' '.join(deps))
|
||||
#print(deps)
|
||||
r = ''
|
||||
for m in reversed(deps):
|
||||
if m in exclude:
|
||||
continue
|
||||
libname = get_libname([m])
|
||||
libname = self.get_libname([m])
|
||||
if len(libname):
|
||||
#r = r + ' -L' + proj_dir(m) + '/lib -l' + libname
|
||||
#r = r + ' -L' + self.proj_dir(m) + '/lib -l' + libname
|
||||
r = r + ' -l' + libname
|
||||
if len(r):
|
||||
ldpathflags = get_ldpathflags(names, exclude)
|
||||
ldpathflags = self.get_ldpathflags(names, exclude)
|
||||
if ldpathflags:
|
||||
r = ldpathflags + ' ' + r
|
||||
return r[1::]
|
||||
return ''
|
||||
|
||||
def commands():
|
||||
def commands(self):
|
||||
f = open(sys.argv[0])
|
||||
cmds = []
|
||||
for line in f:
|
||||
debug("checking line ", line)
|
||||
self.debug("checking line ", line)
|
||||
rr = re.findall('^def *cmd_([a-z0-9_]+).*', line)
|
||||
if len(rr):
|
||||
cmds.append(rr[0].replace('_', '-'))
|
||||
f.close()
|
||||
return ' '.join(cmds)
|
||||
|
||||
# --------------------------------------------------------------------- commands
|
||||
# ----------------------------------------------------------------- commands
|
||||
|
||||
def cmd_commands(args_):
|
||||
print(commands())
|
||||
def cmd_commands(self, args_):
|
||||
print(self.commands())
|
||||
|
||||
def cmd_build(args_):
|
||||
build = Build()
|
||||
def cmd_build(self, args_):
|
||||
build = Build(self)
|
||||
build.run(args_)
|
||||
|
||||
def cmd_test(args_):
|
||||
def cmd_test(self, args_):
|
||||
parser = argparse.ArgumentParser(description='Test')
|
||||
parser.add_argument('blah', default='', help='The blah argument')
|
||||
args=parser.parse_args(args_)
|
||||
print("blah = " + args.blah)
|
||||
|
||||
def cmd_os_cascade(args_):
|
||||
print(' '.join(os_cascade()))
|
||||
def cmd_os_cascade(self, args_):
|
||||
print(' '.join(self.os_cascade()))
|
||||
|
||||
def cmd_ldlibpath(args_):
|
||||
def cmd_ldlibpath(self, args_):
|
||||
parser = argparse.ArgumentParser(description='ldlibpath')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
r = ''
|
||||
for m in deps:
|
||||
r = r + ':' + proj_dir(m) + '/lib'
|
||||
r = r + ':' + self.proj_dir(m) + '/lib'
|
||||
print(r[1:])
|
||||
|
||||
def cmd_pythonpath(args_):
|
||||
def cmd_pythonpath(self, args_):
|
||||
parser = argparse.ArgumentParser(description='pythonpath')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ],
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build' ],
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
r = ''
|
||||
for m in deps:
|
||||
pdir = proj_dir(m)
|
||||
pdir = self.proj_dir(m)
|
||||
for subdir in [ 'src/python', 'tools/python' ]:
|
||||
cand = pdir + "/" + subdir
|
||||
if isdir(cand):
|
||||
r = r + ':' + cand
|
||||
print(r[1:])
|
||||
|
||||
def cmd_exepath(args_):
|
||||
def cmd_exepath(self, args_):
|
||||
parser = argparse.ArgumentParser(description='exepath')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], [ 'run', 'build', 'devel' ],
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
debug('deps = ', deps)
|
||||
self.debug('deps = ', deps)
|
||||
r = ''
|
||||
for m in deps:
|
||||
r = r + ':' + proj_dir(m) + '/bin'
|
||||
r = r + ':' + self.proj_dir(m) + '/bin'
|
||||
print(r[1:])
|
||||
|
||||
def cmd_libname(args_):
|
||||
def cmd_libname(self, args_):
|
||||
parser = argparse.ArgumentParser(description='libname')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
print(get_libname(args.module))
|
||||
print(self.get_libname(args.module))
|
||||
|
||||
def cmd_ldflags(args_):
|
||||
def cmd_ldflags(self, args_):
|
||||
parser = argparse.ArgumentParser(description='ldflags')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
parser.add_argument('--exclude', action='append', help='Exclude Modules', default=[])
|
||||
parser.add_argument('--add-self', '-s', action='store_true',
|
||||
default=False, help='Include libflags of specified modules, too, not only their dependencies')
|
||||
args=parser.parse_args(args_)
|
||||
print(get_ldflags(args.module, args.exclude, args.add_self))
|
||||
print(self.get_ldflags(args.module, args.exclude, args.add_self))
|
||||
|
||||
def cmd_cflags(args_):
|
||||
def cmd_cflags(self, args_):
|
||||
parser = argparse.ArgumentParser(description='cflags')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build',
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'build',
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
r = ''
|
||||
for m in reversed(deps):
|
||||
r = r + ' -I' + proj_dir(m) + '/include'
|
||||
r = r + ' -I' + self.proj_dir(m) + '/include'
|
||||
print(r[1:])
|
||||
|
||||
def cmd_path(args_):
|
||||
def cmd_path(self, args_):
|
||||
parser = argparse.ArgumentParser(description='path')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run',
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'], 'run',
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
r = ''
|
||||
for m in deps:
|
||||
r = r + ':' + proj_dir(m) + '/bin'
|
||||
r = r + ':' + self.proj_dir(m) + '/bin'
|
||||
print(r[1:])
|
||||
|
||||
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
||||
def cmd_prereq(args_):
|
||||
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
||||
def cmd_prereq(self, args_):
|
||||
parser = argparse.ArgumentParser(description='path')
|
||||
parser.add_argument('flavour', help='Flavour')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args = parser.parse_args(args_)
|
||||
deps = get_modules_from_project_txt(args.module, ['pkg.requires.jw'],
|
||||
deps = self.get_modules_from_project_txt(args.module, ['pkg.requires.jw'],
|
||||
args.flavour, scope = 2, add_self=False, names_only=True)
|
||||
print(' '.join(deps))
|
||||
|
||||
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
||||
def cmd_required_os_pkg(args_):
|
||||
# TODO: seems at least partly redundant to cmd_pkg_requires / pkg_relations
|
||||
def cmd_required_os_pkg(self, args_):
|
||||
parser = argparse.ArgumentParser(description='required-os-pkg')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
parser.add_argument('--flavours', help='Dependency flavours', default='build')
|
||||
|
|
@ -740,19 +758,19 @@ def cmd_required_os_pkg(args_):
|
|||
args = parser.parse_args(args_)
|
||||
modules = args.module
|
||||
flavours = args.flavours.split()
|
||||
debug("flavours = " + args.flavours)
|
||||
deps = get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours,
|
||||
self.debug("flavours = " + args.flavours)
|
||||
deps = self.get_modules_from_project_txt(modules, ['pkg.requires.jw'], flavours,
|
||||
scope = 2, add_self=True, names_only=True)
|
||||
if args.skip_excluded:
|
||||
for d in deps:
|
||||
if is_excluded_from_build(d) is not None:
|
||||
if self.is_excluded_from_build(d) is not None:
|
||||
deps.remove(d)
|
||||
subsecs = os_cascade()
|
||||
debug("subsecs = ", subsecs)
|
||||
subsecs = self.os_cascade()
|
||||
self.debug("subsecs = ", subsecs)
|
||||
requires = []
|
||||
for s in subsecs:
|
||||
for f in flavours:
|
||||
vals = collect_values(deps, 'pkg.requires.' + s, f)
|
||||
vals = self.collect_values(deps, 'pkg.requires.' + s, f)
|
||||
if vals:
|
||||
requires = requires + vals
|
||||
# TODO: add all not in build tree as -devel
|
||||
|
|
@ -761,45 +779,45 @@ def cmd_required_os_pkg(args_):
|
|||
r = r + ' ' + m
|
||||
print(r[1:])
|
||||
|
||||
def cmd_pkg_requires(args_):
|
||||
return pkg_relations("requires", args_)
|
||||
def cmd_pkg_requires(self, args_):
|
||||
return self.pkg_relations("requires", args_)
|
||||
|
||||
def cmd_pkg_conflicts(args_):
|
||||
return pkg_relations("conflicts", args_)
|
||||
def cmd_pkg_conflicts(self, args_):
|
||||
return self.pkg_relations("conflicts", args_)
|
||||
|
||||
def cmd_pkg_provides(args_):
|
||||
return pkg_relations("provides", args_)
|
||||
def cmd_pkg_provides(self, args_):
|
||||
return self.pkg_relations("provides", args_)
|
||||
|
||||
def cmd_proj_dir(args_):
|
||||
def cmd_proj_dir(self, args_):
|
||||
parser = argparse.ArgumentParser(description='proj-dir')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
r = []
|
||||
for m in args.module:
|
||||
r.append(proj_dir(m))
|
||||
r.append(self.proj_dir(m))
|
||||
print(' '.join(r))
|
||||
|
||||
def cmd_htdocs_dir(args_):
|
||||
def cmd_htdocs_dir(self, args_):
|
||||
parser = argparse.ArgumentParser(description='htdocs-dir')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
r = []
|
||||
for m in args.module:
|
||||
r.append(htdocs_dir(m))
|
||||
r.append(self.htdocs_dir(m))
|
||||
print(' '.join(r))
|
||||
|
||||
def cmd_summary(args_):
|
||||
def cmd_summary(self, args_):
|
||||
parser = argparse.ArgumentParser(description='summary')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
args=parser.parse_args(args_)
|
||||
r = []
|
||||
for m in args.module:
|
||||
summary = get_value(m, "summary", None)
|
||||
summary = self.get_value(m, "summary", None)
|
||||
if summary is not None:
|
||||
r.append(summary)
|
||||
print(' '.join(r))
|
||||
|
||||
def contains(small, big):
|
||||
def contains(self, small, big):
|
||||
for i in xrange(len(big)-len(small)+1):
|
||||
for j in xrange(len(small)):
|
||||
if big[i+j] != small[j]:
|
||||
|
|
@ -808,18 +826,18 @@ def contains(small, big):
|
|||
return i, i+len(small)
|
||||
return False
|
||||
|
||||
def read_dep_graph(modules, section, graph):
|
||||
def read_dep_graph(self, modules, section, graph):
|
||||
for m in modules:
|
||||
if m in graph:
|
||||
continue
|
||||
deps = get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section,
|
||||
deps = self.get_modules_from_project_txt([ m ], ['pkg.requires.jw'], section,
|
||||
scope = 1, add_self=False, names_only=True)
|
||||
if not deps is None:
|
||||
graph[m] = deps
|
||||
for d in deps:
|
||||
read_dep_graph([ d ], section, graph)
|
||||
self.read_dep_graph([ d ], section, graph)
|
||||
|
||||
def flip_graph(graph):
|
||||
def flip_graph(self, graph):
|
||||
r = {}
|
||||
for m, deps in graph.items():
|
||||
for d in deps:
|
||||
|
|
@ -828,23 +846,23 @@ def flip_graph(graph):
|
|||
r[d].add(m)
|
||||
return r
|
||||
|
||||
def check_circular_deps(module, section, graph, unvisited, temp, path):
|
||||
def check_circular_deps(self, module, section, graph, unvisited, temp, path):
|
||||
if module in temp:
|
||||
debug('found circular dependency at module', module)
|
||||
self.debug('found circular dependency at module', module)
|
||||
return module
|
||||
if not module in unvisited:
|
||||
return None
|
||||
temp.add(module)
|
||||
if module in graph:
|
||||
for m in graph[module]:
|
||||
last = check_circular_deps(m, section, graph, unvisited, temp, path)
|
||||
last = self.check_circular_deps(m, section, graph, unvisited, temp, path)
|
||||
if last is not None:
|
||||
path.insert(0, m)
|
||||
return last
|
||||
unvisited.remove(module)
|
||||
temp.remove(module)
|
||||
|
||||
def cmd_check(args_):
|
||||
def cmd_check(self, args_):
|
||||
parser = argparse.ArgumentParser(description='check')
|
||||
parser.add_argument('module', nargs='*', help='Modules')
|
||||
parser.add_argument('--flavour', '-f', nargs='?', default = 'build')
|
||||
|
|
@ -852,15 +870,15 @@ def cmd_check(args_):
|
|||
|
||||
graph = {}
|
||||
path = []
|
||||
read_dep_graph(args.module, args.flavour, graph)
|
||||
self.read_dep_graph(args.module, args.flavour, graph)
|
||||
unvisited = graph.keys()
|
||||
temp = set()
|
||||
while len(unvisited) is not 0:
|
||||
m = unvisited[0]
|
||||
debug('checking circular dependency of', m)
|
||||
last = check_circular_deps(m, args.flavour, flip_graph(graph), unvisited, temp, path)
|
||||
self.debug('checking circular dependency of', m)
|
||||
last = self.check_circular_deps(m, args.flavour, self.flip_graph(graph), unvisited, temp, path)
|
||||
if last is not None:
|
||||
debug('found circular dependency below', m, ', last is', last)
|
||||
self.debug('found circular dependency below', m, ', last is', last)
|
||||
print('found circular dependency in flavour', args.flavour, ':', ' -> '.join(path))
|
||||
exit(1)
|
||||
|
||||
|
|
@ -868,31 +886,20 @@ def cmd_check(args_):
|
|||
' '.join(args.module))
|
||||
exit(0)
|
||||
|
||||
def cmd_getval(args_):
|
||||
def cmd_getval(self, args_):
|
||||
parser = argparse.ArgumentParser(description='Get value from project config')
|
||||
parser.add_argument('--project', default = top_name, help = 'Project name')
|
||||
parser.add_argument('--project', default = self.top_name, help = 'Project name')
|
||||
parser.add_argument('section', default = '', help = 'Config section')
|
||||
parser.add_argument('key', default = '', help = 'Config key')
|
||||
args=parser.parse_args(args_)
|
||||
print(get_value(args.project, args.section, args.key))
|
||||
print(self.get_value(args.project, args.section, args.key))
|
||||
|
||||
# -------------------------------------------------------------------- here we go
|
||||
|
||||
global_args = []
|
||||
res_cache = ResultCache()
|
||||
dep_cache = {}
|
||||
my_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
opt_debug = False
|
||||
opt_os = None
|
||||
topdir = None
|
||||
top_name = None
|
||||
glob_os_cascade = None
|
||||
projs_root = expanduser("~") + '/local/src/jw.dev/proj'
|
||||
|
||||
if __name__ == "__main__":
|
||||
# -------------------------------------------------------------------- here we go
|
||||
def run(self):
|
||||
if __name__ == "__main__":
|
||||
skip = 0
|
||||
for a in sys.argv[1::]:
|
||||
global_args.append(a)
|
||||
self.global_args.append(a)
|
||||
if a in [ '--prefix', '-p', '--topdir', '-t' ]:
|
||||
skip = 1
|
||||
continue
|
||||
|
|
@ -907,26 +914,31 @@ if __name__ == "__main__":
|
|||
parser.add_argument('--debug', '-d', action='store_true',
|
||||
default=False, help='Output debug information to stderr')
|
||||
parser.add_argument('--topdir', '-t', nargs=1, default = [], help='Project Path')
|
||||
parser.add_argument('--prefix', '-p', nargs=1, default = [ projs_root ], help='Projects Path Prefix')
|
||||
parser.add_argument('--prefix', '-p', nargs=1, default = [ self.projs_root ], help='Projects Path Prefix')
|
||||
parser.add_argument('--os', '-O', nargs=1, default = [], help='Target operating system')
|
||||
parser.add_argument('arg', nargs='*', help='Command arguments')
|
||||
args = parser.parse_args(global_args)
|
||||
args = parser.parse_args(self.global_args)
|
||||
|
||||
opt_debug = args.debug
|
||||
self.opt_debug = args.debug
|
||||
if len(args.os):
|
||||
opt_os = args.os[0]
|
||||
self.opt_os = args.os[0]
|
||||
|
||||
debug("----------------------------------------- running ", ' '.join(sys.argv))
|
||||
self.debug("----------------------------------------- running ", ' '.join(sys.argv))
|
||||
|
||||
projs_root = args.prefix[0]
|
||||
self.projs_root = args.prefix[0]
|
||||
if len(args.topdir):
|
||||
topdir = args.topdir[0]
|
||||
self.topdir = args.topdir[0]
|
||||
|
||||
if topdir:
|
||||
top_name = res_cache.run(read_value, [topdir + '/make/project.conf', 'build', 'name'])
|
||||
if not top_name:
|
||||
top_name = re.sub('-[0-9.-]*$', '', basename(realpath(topdir)))
|
||||
if self.topdir:
|
||||
self.top_name = self.res_cache.run(self.read_value, [self.topdir + '/make/project.conf', 'build', 'name'])
|
||||
if not self.top_name:
|
||||
self.top_name = re.sub('-[0-9.-]*$', '', basename(realpath(self.topdir)))
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmd_name = 'cmd_' + args.cmd.replace('-', '_')
|
||||
cmd = getattr(self, cmd_name)
|
||||
cmd(sys.argv[(len(self.global_args) + 1)::])
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmd = getattr(sys.modules[__name__], 'cmd_' + args.cmd.replace('-', '_'))
|
||||
cmd(sys.argv[(len(global_args) + 1)::])
|
||||
projects = Projects()
|
||||
projects.run()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue