Commit ad97104d authored by nextime's avatar nextime

first commit

parents

Too many changes to show.

To preserve performance only 322 of 322+ files are displayed.

argv = ['./waf', 'configure']
blddir = '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0/build'
commands = {'dist': 0, 'configure': True, 'distcheck': 0, 'install': 0, 'build': 0, 'clean': 0, 'distclean': 0, 'check': 0, 'uninstall': 0}
cwd = '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0'
environ = {'PYTHONPATH': '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0', 'SSH_CLIENT': '192.168.4.4 44028 22', 'PRINTER': 'HP_HP_Color_LaserJet_CP1515n', 'LOGNAME': 'nextime', 'USER': 'nextime', 'HOME': '/home/nextime', 'PATH': '/home/nextime/bin:/usr/local/bin:/usr/bin:/bin:/usr/games:/home/nextime/Microchip/bin:/usr/hitech/picc-18/pro/9.63PL3/bin', '_': './waf', 'SSH_CONNECTION': '192.168.4.4 44028 192.168.4.2 22', 'LANG': 'en_US.UTF-8', 'TERM': 'xterm', 'SHELL': '/bin/bash', 'XDG_SESSION_COOKIE': '3388004bbfef6792c52a954d0000041e-1340407338.408424-243343910', 'SHLVL': '1', 'SSH_TTY': '/dev/pts/3', 'OLDPWD': '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0/examples', 'HISTCONTROL': 'ignoreboth', 'PWD': '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0', 'MAIL': '/var/mail/nextime', 'LS_COLORS': 'rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36:'}
files = ['/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0/benchmarks/wscript', '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0/wscript']
hash = -505678433355082749
options = {'compile_targets': None, 'force': False, 'verbose': 0, 'nocache': False, 'progress_bar': 0, 'valgrind': False, 'destdir': '', 'keep': False, 'zones': '', 'blddir': '', 'prefix': '/usr/local/', 'examples': False, 'disable_pygccxml': False, 'pyo': 1, 'srcdir': '', 'jobs': 8, 'pyc': 1, 'check_cxx_compiler': 'g++ icpc sunc++', 'check_c_compiler': 'gcc icc suncc', 'build_profile': 'default'}
srcdir = '/home/nextime/Microchip/python/PyBindGen/pybindgen-0.15.0'
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,sys,errno,re,glob,gc,datetime,shutil
try:import cPickle
except:import pickle as cPickle
import Runner,TaskGen,Node,Scripting,Utils,Environment,Task,Logs,Options
from Logs import debug,error,info
from Constants import*
SAVED_ATTRS='root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
bld=None
class BuildError(Utils.WafError):
def __init__(self,b=None,t=[]):
self.bld=b
self.tasks=t
self.ret=1
Utils.WafError.__init__(self,self.format_error())
def format_error(self):
lst=['Build failed']
for tsk in self.tasks:
txt=tsk.format_error()
if txt:lst.append(txt)
return'\n'.join(lst)
def group_method(fun):
def f(*k,**kw):
if not k[0].is_install:
return False
postpone=True
if'postpone'in kw:
postpone=kw['postpone']
del kw['postpone']
if postpone:
m=k[0].task_manager
m.groups[m.current_group].post_funs.append((fun,k,kw))
kw['cwd']=k[0].path
else:
fun(*k,**kw)
return f
class BuildContext(Utils.Context):
def __init__(self):
global bld
bld=self
self.task_manager=Task.TaskManager()
self.id_nodes=0
self.idx={}
self.all_envs={}
self.bdir=''
self.path=None
self.deps_man=Utils.DefaultDict(list)
self.cache_node_abspath={}
self.cache_scanned_folders={}
self.uninstall=[]
for v in'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
var={}
setattr(self,v,var)
self.cache_dir_contents={}
self.all_task_gen=[]
self.task_gen_cache_names={}
self.cache_sig_vars={}
self.log=None
self.root=None
self.srcnode=None
self.bldnode=None
class node_class(Node.Node):
pass
self.node_class=node_class
self.node_class.__module__="Node"
self.node_class.__name__="Nodu"
self.node_class.bld=self
self.is_install=None
def __copy__(self):
raise Utils.WafError('build contexts are not supposed to be cloned')
def load(self):
try:
env=Environment.Environment(os.path.join(self.cachedir,'build.config.py'))
except(IOError,OSError):
pass
else:
if env['version']<HEXVERSION:
raise Utils.WafError('Version mismatch! reconfigure the project')
for t in env['tools']:
self.setup(**t)
try:
gc.disable()
f=data=None
Node.Nodu=self.node_class
try:
f=open(os.path.join(self.bdir,DBFILE),'rb')
except(IOError,EOFError):
pass
try:
if f:data=cPickle.load(f)
except AttributeError:
if Logs.verbose>1:raise
if data:
for x in SAVED_ATTRS:setattr(self,x,data[x])
else:
debug('build: Build cache loading failed')
finally:
if f:f.close()
gc.enable()
def save(self):
gc.disable()
self.root.__class__.bld=None
Node.Nodu=self.node_class
db=os.path.join(self.bdir,DBFILE)
file=open(db+'.tmp','wb')
data={}
for x in SAVED_ATTRS:data[x]=getattr(self,x)
cPickle.dump(data,file,-1)
file.close()
try:os.unlink(db)
except OSError:pass
os.rename(db+'.tmp',db)
self.root.__class__.bld=self
gc.enable()
def clean(self):
debug('build: clean called')
precious=set([])
for env in self.all_envs.values():
for x in env[CFG_FILES]:
node=self.srcnode.find_resource(x)
if node:
precious.add(node.id)
def clean_rec(node):
for x in list(node.childs.keys()):
nd=node.childs[x]
tp=nd.id&3
if tp==Node.DIR:
clean_rec(nd)
elif tp==Node.BUILD:
if nd.id in precious:continue
for env in self.all_envs.values():
try:os.remove(nd.abspath(env))
except OSError:pass
node.childs.__delitem__(x)
clean_rec(self.srcnode)
for v in'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
setattr(self,v,{})
def compile(self):
debug('build: compile called')
self.flush()
self.generator=Runner.Parallel(self,Options.options.jobs)
def dw(on=True):
if Options.options.progress_bar:
if on:sys.stderr.write(Logs.colors.cursor_on)
else:sys.stderr.write(Logs.colors.cursor_off)
debug('build: executor starting')
back=os.getcwd()
os.chdir(self.bldnode.abspath())
try:
try:
dw(on=False)
self.generator.start()
except KeyboardInterrupt:
dw()
if self.generator.consumers:
self.save()
raise
except Exception:
dw()
raise
else:
dw()
if self.generator.consumers:
self.save()
if self.generator.error:
raise BuildError(self,self.task_manager.tasks_done)
finally:
os.chdir(back)
def install(self):
debug('build: install called')
self.flush()
if self.is_install<0:
lst=[]
for x in self.uninstall:
dir=os.path.dirname(x)
if not dir in lst:lst.append(dir)
lst.sort()
lst.reverse()
nlst=[]
for y in lst:
x=y
while len(x)>4:
if not x in nlst:nlst.append(x)
x=os.path.dirname(x)
nlst.sort()
nlst.reverse()
for x in nlst:
try:os.rmdir(x)
except OSError:pass
def new_task_gen(self,*k,**kw):
kw['bld']=self
if len(k)==0:
ret=TaskGen.task_gen(*k,**kw)
else:
cls_name=k[0]
try:cls=TaskGen.task_gen.classes[cls_name]
except KeyError:raise Utils.WscriptError('%s is not a valid task generator -> %s'%(cls_name,[x for x in TaskGen.task_gen.classes]))
ret=cls(*k,**kw)
return ret
def load_envs(self):
try:
lst=Utils.listdir(self.cachedir)
except OSError,e:
if e.errno==errno.ENOENT:
raise Utils.WafError('The project was not configured: run "waf configure" first!')
else:
raise
if not lst:
raise Utils.WafError('The cache directory is empty: reconfigure the project')
for file in lst:
if file.endswith(CACHE_SUFFIX):
env=Environment.Environment(os.path.join(self.cachedir,file))
name=file[:-len(CACHE_SUFFIX)]
self.all_envs[name]=env
self.init_variants()
for env in self.all_envs.values():
for f in env[CFG_FILES]:
newnode=self.path.find_or_declare(f)
try:
hash=Utils.h_file(newnode.abspath(env))
except(IOError,AttributeError):
error("cannot find "+f)
hash=SIG_NIL
self.node_sigs[env.variant()][newnode.id]=hash
self.bldnode=self.root.find_dir(self.bldnode.abspath())
self.path=self.srcnode=self.root.find_dir(self.srcnode.abspath())
self.cwd=self.bldnode.abspath()
def setup(self,tool,tooldir=None,funs=None):
if isinstance(tool,list):
for i in tool:self.setup(i,tooldir)
return
if not tooldir:tooldir=Options.tooldir
module=Utils.load_tool(tool,tooldir)
if hasattr(module,"setup"):module.setup(self)
def init_variants(self):
debug('build: init variants')
lstvariants=[]
for env in self.all_envs.values():
if not env.variant()in lstvariants:
lstvariants.append(env.variant())
self.lst_variants=lstvariants
debug('build: list of variants is %r'%lstvariants)
for name in lstvariants+[0]:
for v in'node_sigs cache_node_abspath'.split():
var=getattr(self,v)
if not name in var:
var[name]={}
def load_dirs(self,srcdir,blddir,load_cache=1):
assert(os.path.isabs(srcdir))
assert(os.path.isabs(blddir))
self.cachedir=os.path.join(blddir,CACHE_DIR)
if srcdir==blddir:
raise Utils.WafError("build dir must be different from srcdir: %s <-> %s "%(srcdir,blddir))
self.bdir=blddir
self.load()
if not self.root:
Node.Nodu=self.node_class
self.root=Node.Nodu('',None,Node.DIR)
if not self.srcnode:
self.srcnode=self.root.ensure_dir_node_from_path(srcdir)
debug('build: srcnode is %s and srcdir %s'%(self.srcnode.name,srcdir))
self.path=self.srcnode
try:os.makedirs(blddir)
except OSError:pass
if not self.bldnode:
self.bldnode=self.root.ensure_dir_node_from_path(blddir)
self.init_variants()
def rescan(self,src_dir_node):
if self.cache_scanned_folders.get(src_dir_node.id,None):return
self.cache_scanned_folders[src_dir_node.id]=True
if hasattr(self,'repository'):self.repository(src_dir_node)
if not src_dir_node.name and sys.platform=='win32':
return
parent_path=src_dir_node.abspath()
try:
lst=set(Utils.listdir(parent_path))
except OSError:
lst=set([])
self.cache_dir_contents[src_dir_node.id]=lst
cache=self.node_sigs[0]
for x in src_dir_node.childs.values():
if x.id&3!=Node.FILE:continue
if x.name in lst:
try:
cache[x.id]=Utils.h_file(x.abspath())
except IOError:
raise Utils.WafError('The file %s is not readable or has become a dir'%x.abspath())
else:
try:del cache[x.id]
except KeyError:pass
del src_dir_node.childs[x.name]
h1=self.srcnode.height()
h2=src_dir_node.height()
lst=[]
child=src_dir_node
while h2>h1:
lst.append(child.name)
child=child.parent
h2-=1
lst.reverse()
for variant in self.lst_variants:
sub_path=os.path.join(self.bldnode.abspath(),variant,*lst)
try:
self.listdir_bld(src_dir_node,sub_path,variant)
except OSError:
dict=self.node_sigs[variant]
for node in src_dir_node.childs.values():
if node.id in dict:
dict.__delitem__(node.id)
if node.id!=self.bldnode.id:
src_dir_node.childs.__delitem__(node.name)
os.makedirs(sub_path)
def listdir_src(self,parent_node):
pass
def remove_node(self,node):
pass
def listdir_bld(self,parent_node,path,variant):
i_existing_nodes=[x for x in parent_node.childs.values()if x.id&3==Node.BUILD]
lst=set(Utils.listdir(path))
node_names=set([x.name for x in i_existing_nodes])
remove_names=node_names-lst
ids_to_remove=[x.id for x in i_existing_nodes if x.name in remove_names]
cache=self.node_sigs[variant]
for nid in ids_to_remove:
if nid in cache:
cache.__delitem__(nid)
def get_env(self):
return self.env_of_name('default')
def set_env(self,name,val):
self.all_envs[name]=val
env=property(get_env,set_env)
def add_manual_dependency(self,path,value):
if isinstance(path,Node.Node):
node=path
elif os.path.isabs(path):
node=self.root.find_resource(path)
else:
node=self.path.find_resource(path)
self.deps_man[node.id].append(value)
def launch_node(self):
try:
return self.p_ln
except AttributeError:
self.p_ln=self.root.find_dir(Options.launch_dir)
return self.p_ln
def glob(self,pattern,relative=True):
path=self.path.abspath()
files=[self.root.find_resource(x)for x in glob.glob(path+os.sep+pattern)]
if relative:
files=[x.path_to_parent(self.path)for x in files if x]
else:
files=[x.abspath()for x in files if x]
return files
def add_group(self,*k):
self.task_manager.add_group(*k)
def set_group(self,*k,**kw):
self.task_manager.set_group(*k,**kw)
def hash_env_vars(self,env,vars_lst):
idx=str(id(env))+str(vars_lst)
try:return self.cache_sig_vars[idx]
except KeyError:pass
lst=[str(env[a])for a in vars_lst]
ret=Utils.h_list(lst)
debug("envhash: %r %r"%(ret,lst))
self.cache_sig_vars[idx]=ret
return ret
def name_to_obj(self,name,env):
cache=self.task_gen_cache_names
if not cache:
for x in self.all_task_gen:
vt=x.env.variant()+'_'
if x.name:
cache[vt+x.name]=x
else:
if isinstance(x.target,str):
target=x.target
else:
target=' '.join(x.target)
v=vt+target
if not cache.get(v,None):
cache[v]=x
return cache.get(env.variant()+'_'+name,None)
def flush(self,all=1):
self.ini=datetime.datetime.now()
self.task_gen_cache_names={}
self.name_to_obj('',self.env)
debug('build: delayed operation TaskGen.flush() called')
if Options.options.compile_targets:
debug('task_gen: posting objects listed in compile_targets')
target_objects=Utils.DefaultDict(list)
for target_name in Options.options.compile_targets.split(','):
target_name=target_name.strip()
for env in self.all_envs.values():
obj=self.name_to_obj(target_name,env)
if obj:
target_objects[target_name].append(obj)
if not target_name in target_objects and all:
raise Utils.WafError("target '%s' does not exist"%target_name)
to_compile=[]
for x in target_objects.values():
for y in x:
to_compile.append(id(y))
for i in xrange(len(self.task_manager.groups)):
g=self.task_manager.groups[i]
self.task_manager.current_group=i
for tg in g.tasks_gen:
if id(tg)in to_compile:
tg.post()
else:
debug('task_gen: posting objects (normal)')
ln=self.launch_node()
if ln.is_child_of(self.bldnode)or not ln.is_child_of(self.srcnode):
ln=self.srcnode
proj_node=self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
if proj_node.id!=self.srcnode.id:
ln=self.srcnode
for i in xrange(len(self.task_manager.groups)):
g=self.task_manager.groups[i]
self.task_manager.current_group=i
for tg in g.tasks_gen:
if not tg.path.is_child_of(ln):
continue
tg.post()
def env_of_name(self,name):
try:
return self.all_envs[name]
except KeyError:
error('no such environment: '+name)
return None
def progress_line(self,state,total,col1,col2):
n=len(str(total))
Utils.rot_idx+=1
ind=Utils.rot_chr[Utils.rot_idx%4]
ini=self.ini
pc=(100.*state)/total
eta=Utils.get_elapsed_time(ini)
fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
left=fs%(state,total,col1,pc,col2)
right='][%s%s%s]'%(col1,eta,col2)
cols=Utils.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
if cols<7:cols=7
ratio=int((cols*state)/total)-1
bar=('='*ratio+'>').ljust(cols)
msg=Utils.indicator%(left,bar,right)
return msg
def do_install(self,src,tgt,chmod=O644):
if self.is_install>0:
if not Options.options.force:
try:
st1=os.stat(tgt)
st2=os.stat(src)
except OSError:
pass
else:
if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size:
return False
srclbl=src.replace(self.srcnode.abspath(None)+os.sep,'')
info("* installing %s as %s"%(srclbl,tgt))
try:os.remove(tgt)
except OSError:pass
try:
shutil.copy2(src,tgt)
os.chmod(tgt,chmod)
except IOError:
try:
os.stat(src)
except(OSError,IOError):
error('File %r does not exist'%src)
raise Utils.WafError('Could not install the file %r'%tgt)
return True
elif self.is_install<0:
info("* uninstalling %s"%tgt)
self.uninstall.append(tgt)
try:
os.remove(tgt)
except OSError,e:
if e.errno!=errno.ENOENT:
if not getattr(self,'uninstall_error',None):
self.uninstall_error=True
Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
if Logs.verbose>1:
Logs.warn('could not remove %s (error code %r)'%(e.filename,e.errno))
return True
def get_install_path(self,path,env=None):
if not env:env=self.env
destdir=env.get_destdir()
path=path.replace('/',os.sep)
destpath=Utils.subst_vars(path,env)
if destdir:
destpath=os.path.join(destdir,destpath.lstrip(os.sep))
return destpath
def install_files(self,path,files,env=None,chmod=O644,relative_trick=False,cwd=None):
if env:
assert isinstance(env,Environment.Environment),"invalid parameter"
else:
env=self.env
if not path:return[]
if not cwd:
cwd=self.path
if isinstance(files,str)and'*'in files:
gl=cwd.abspath()+os.sep+files
lst=glob.glob(gl)
else:
lst=Utils.to_list(files)
destpath=self.get_install_path(path,env)
Utils.check_dir(destpath)
installed_files=[]
for filename in lst:
if isinstance(filename,str)and os.path.isabs(filename):
alst=Utils.split_path(filename)
destfile=os.path.join(destpath,alst[-1])
else:
if isinstance(filename,Node.Node):
nd=filename
else:
nd=cwd.find_resource(filename)
if not nd:
raise Utils.WafError("Unable to install the file %r (not found in %s)"%(filename,cwd))
if relative_trick:
destfile=os.path.join(destpath,filename)
Utils.check_dir(os.path.dirname(destfile))
else:
destfile=os.path.join(destpath,nd.name)
filename=nd.abspath(env)
if self.do_install(filename,destfile,chmod):
installed_files.append(destfile)
return installed_files
def install_as(self,path,srcfile,env=None,chmod=O644,cwd=None):
if env:
assert isinstance(env,Environment.Environment),"invalid parameter"
else:
env=self.env
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)"%(srcfile,path))
if not cwd:
cwd=self.path
destpath=self.get_install_path(path,env)
dir,name=os.path.split(destpath)
Utils.check_dir(dir)
if isinstance(srcfile,Node.Node):
src=srcfile.abspath(env)
else:
src=srcfile
if not os.path.isabs(srcfile):
node=cwd.find_resource(srcfile)
if not node:
raise Utils.WafError("Unable to install the file %r (not found in %s)"%(srcfile,cwd))
src=node.abspath(env)
return self.do_install(src,destpath,chmod)
def symlink_as(self,path,src,env=None,cwd=None):
if sys.platform=='win32':
return
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)"%(src,path))
tgt=self.get_install_path(path,env)
dir,name=os.path.split(tgt)
Utils.check_dir(dir)
if self.is_install>0:
link=False
if not os.path.islink(tgt):
link=True
elif os.readlink(tgt)!=src:
link=True
try:os.remove(tgt)
except OSError:pass
if link:
info('* symlink %s (-> %s)'%(tgt,src))
os.symlink(src,tgt)
return 0
else:
try:
info('* removing %s'%(tgt))
os.remove(tgt)
return 0
except OSError:
return 1
def exec_command(self,cmd,**kw):
debug('runner: system command -> %s'%cmd)
if self.log:
self.log.write('%s\n'%cmd)
kw['log']=self.log
try:
if not kw.get('cwd',None):
kw['cwd']=self.cwd
except AttributeError:
self.cwd=kw['cwd']=self.bldnode.abspath()
return Utils.exec_command(cmd,**kw)
def printout(self,s):
f=self.log or sys.stderr
f.write(s)
f.flush()
def add_subdirs(self,dirs):
self.recurse(dirs,'build')
def pre_recurse(self,name_or_mod,path,nexdir):
if not hasattr(self,'oldpath'):
self.oldpath=[]
self.oldpath.append(self.path)
self.path=self.root.find_dir(nexdir)
return{'bld':self,'ctx':self}
def post_recurse(self,name_or_mod,path,nexdir):
self.path=self.oldpath.pop()
def pre_build(self):
if hasattr(self,'pre_funs'):
for m in self.pre_funs:
m(self)
def post_build(self):
if hasattr(self,'post_funs'):
for m in self.post_funs:
m(self)
def add_pre_fun(self,meth):
try:self.pre_funs.append(meth)
except AttributeError:self.pre_funs=[meth]
def add_post_fun(self,meth):
try:self.post_funs.append(meth)
except AttributeError:self.post_funs=[meth]
def use_the_magic(self):
Task.algotype=Task.MAXPARALLEL
Task.file_deps=Task.extract_deps
install_as=group_method(install_as)
install_files=group_method(install_files)
symlink_as=group_method(symlink_as)
#! /usr/bin/env python
# encoding: utf-8
import os,shlex,sys,time
try:import cPickle
except ImportError:import pickle as cPickle
import Environment,Utils,Options
from Logs import warn
from Constants import*
conf_template='''# project %(app)s configured on %(now)s by
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#
'''
class ConfigurationError(Utils.WscriptError):
pass
autoconfig=False
def find_file(filename,path_list):
for directory in Utils.to_list(path_list):
if os.path.exists(os.path.join(directory,filename)):
return directory
return''
def find_program_impl(env,filename,path_list=[],var=None,environ=None):
if not environ:
environ=os.environ
try:path_list=path_list.split()
except AttributeError:pass
if var:
if env[var]:return env[var]
if var in environ:env[var]=environ[var]
if not path_list:path_list=environ.get('PATH','').split(os.pathsep)
ext=(Options.platform=='win32')and'.exe,.com,.bat,.cmd'or''
for y in[filename+x for x in ext.split(',')]:
for directory in path_list:
x=os.path.join(directory,y)
if os.path.isfile(x):
if var:env[var]=x
return x
return''
class ConfigurationContext(Utils.Context):
tests={}
error_handlers=[]
def __init__(self,env=None,blddir='',srcdir=''):
self.env=None
self.envname=''
self.environ=dict(os.environ)
self.line_just=40
self.blddir=blddir
self.srcdir=srcdir
self.all_envs={}
self.cwd=self.curdir=os.getcwd()
self.tools=[]
self.setenv(DEFAULT)
self.lastprog=''
self.hash=0
self.files=[]
self.tool_cache=[]
if self.blddir:
self.post_init()
def post_init(self):
self.cachedir=os.path.join(self.blddir,CACHE_DIR)
path=os.path.join(self.blddir,WAF_CONFIG_LOG)
try:os.unlink(path)
except(OSError,IOError):pass
try:
self.log=open(path,'w')
except(OSError,IOError):
self.fatal('could not open %r for writing'%path)
app=getattr(Utils.g_module,'APPNAME','')
if app:
ver=getattr(Utils.g_module,'VERSION','')
if ver:
app="%s (%s)"%(app,ver)
now=time.ctime()
pyver=sys.hexversion
systype=sys.platform
args=" ".join(sys.argv)
wafver=WAFVERSION
abi=ABI
self.log.write(conf_template%vars())
def __del__(self):
if hasattr(self,'log')and self.log:
self.log.close()
def fatal(self,msg):
raise ConfigurationError(msg)
def check_tool(self,input,tooldir=None,funs=None):
tools=Utils.to_list(input)
if tooldir:tooldir=Utils.to_list(tooldir)
for tool in tools:
tool=tool.replace('++','xx')
if tool=='java':tool='javaw'
mag=(tool,id(self.env),funs)
if mag in self.tool_cache:
continue
self.tool_cache.append(mag)
module=Utils.load_tool(tool,tooldir)
func=getattr(module,'detect',None)
if func:
if type(func)is type(find_file):func(self)
else:self.eval_rules(funs or func)
self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
def sub_config(self,k):
self.recurse(k,name='configure')
def pre_recurse(self,name_or_mod,path,nexdir):
return{'conf':self,'ctx':self}
def post_recurse(self,name_or_mod,path,nexdir):
if not autoconfig:
return
self.hash=hash((self.hash,getattr(name_or_mod,'waf_hash_val',name_or_mod)))
self.files.append(path)
def store(self,file=''):
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
if not file:
file=open(os.path.join(self.cachedir,'build.config.py'),'w')
file.write('version = 0x%x\n'%HEXVERSION)
file.write('tools = %r\n'%self.tools)
file.close()
if not self.all_envs:
self.fatal('nothing to store in the configuration context!')
for key in self.all_envs:
tmpenv=self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir,key+CACHE_SUFFIX))
def set_env_name(self,name,env):
self.all_envs[name]=env
return env
def retrieve(self,name,fromenv=None):
try:
env=self.all_envs[name]
except KeyError:
env=Environment.Environment()
env['PREFIX']=os.path.abspath(os.path.expanduser(Options.options.prefix))
self.all_envs[name]=env
else:
if fromenv:warn("The environment %s may have been configured already"%name)
return env
def setenv(self,name):
self.env=self.retrieve(name)
self.envname=name
def add_os_flags(self,var,dest=None):
try:self.env.append_value(dest or var,Utils.to_list(self.environ[var]))
except KeyError:pass
def check_message_1(self,sr):
self.line_just=max(self.line_just,len(sr))
self.log.write(sr+'\n\n')
Utils.pprint('NORMAL',"%s :"%sr.ljust(self.line_just),sep='')
def check_message_2(self,sr,color='GREEN'):
Utils.pprint(color,sr)
def check_message(self,th,msg,state,option=''):
sr='Checking for %s %s'%(th,msg)
self.check_message_1(sr)
p=self.check_message_2
if state:p('ok '+option)
else:p('not found','YELLOW')
def check_message_custom(self,th,msg,custom,option='',color='PINK'):
sr='Checking for %s %s'%(th,msg)
self.check_message_1(sr)
self.check_message_2(custom,color)
def find_program(self,filename,path_list=[],var=None,mandatory=False):
ret=None
if var:
if self.env[var]:
ret=self.env[var]
elif var in os.environ:
ret=os.environ[var]
if not isinstance(filename,list):filename=[filename]
if not ret:
for x in filename:
ret=find_program_impl(self.env,x,path_list,var,environ=self.environ)
if ret:break
self.check_message('program',','.join(filename),ret,ret)
self.log.write('find program=%r paths=%r var=%r -> %r\n\n'%(filename,path_list,var,ret))
if not ret and mandatory:
self.fatal('The program %r could not be found'%filename)
if var:
self.env[var]=ret
return ret
def cmd_to_list(self,cmd):
if isinstance(cmd,str)and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
return shlex.split(cmd)
else:
return[cmd]
return cmd
def __getattr__(self,name):
r=self.__class__.__dict__.get(name,None)
if r:return r
if name and name.startswith('require_'):
for k in['check_','find_']:
n=name.replace('require_',k)
ret=self.__class__.__dict__.get(n,None)
if ret:
def run(*k,**kw):
r=ret(self,*k,**kw)
if not r:
self.fatal('requirement failure')
return r
return run
self.fatal('No such method %r'%name)
def eval_rules(self,rules):
self.rules=Utils.to_list(rules)
for x in self.rules:
f=getattr(self,x)
if not f:self.fatal("No such method '%s'."%x)
try:
f()
except Exception,e:
ret=self.err_handler(x,e)
if ret==BREAK:
break
elif ret==CONTINUE:
continue
else:
self.fatal(e)
def err_handler(self,fun,error):
pass
def conf(f):
setattr(ConfigurationContext,f.__name__,f)
return f
def conftest(f):
ConfigurationContext.tests[f.__name__]=f
return conf(f)
#! /usr/bin/env python
# encoding: utf-8
HEXVERSION=0x10509
WAFVERSION="1.5.9"
WAFREVISION="6626:6639M"
ABI=7
O644=420
O755=493
MAXJOBS=99999999
CACHE_DIR='c4che'
CACHE_SUFFIX='.cache.py'
DBFILE='.wafpickle-%d'%ABI
WSCRIPT_FILE='wscript'
WSCRIPT_BUILD_FILE='wscript_build'
WAF_CONFIG_LOG='config.log'
WAF_CONFIG_H='config.h'
SIG_NIL='iluvcuteoverload'
VARIANT='_VARIANT_'
DEFAULT='default'
SRCDIR='srcdir'
BLDDIR='blddir'
APPNAME='APPNAME'
VERSION='VERSION'
DEFINES='defines'
UNDEFINED=()
BREAK="break"
CONTINUE="continue"
JOBCONTROL="JOBCONTROL"
MAXPARALLEL="MAXPARALLEL"
NORMAL="NORMAL"
NOT_RUN=0
MISSING=1
CRASHED=2
EXCEPTION=3
SKIPPED=8
SUCCESS=9
ASK_LATER=-1
SKIP_ME=-2
RUN_ME=-3
LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT="%H:%M:%S"
TEST_OK=True
CFG_FILES='cfg_files'
INSTALL=1337
UNINSTALL=-1337
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,copy,re
import Logs,Options,Utils
from Constants import*
re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
class Environment(object):
__slots__=("table","parent")
def __init__(self,filename=None):
self.table={}
if filename:
self.load(filename)
def __contains__(self,key):
if key in self.table:return True
try:return self.parent.__contains__(key)
except AttributeError:return False
def __str__(self):
keys=set()
cur=self
while cur:
keys.update(cur.table.keys())
cur=getattr(cur,'parent',None)
keys=list(keys)
keys.sort()
return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in keys])
def __getitem__(self,key):
try:
while 1:
x=self.table.get(key,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return[]
def __setitem__(self,key,value):
self.table[key]=value
def __delitem__(self,key,value):
del self.table[key]
def set_variant(self,name):
self.table[VARIANT]=name
def variant(self):
try:
while 1:
x=self.table.get(VARIANT,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return DEFAULT
def copy(self):
newenv=Environment()
newenv.parent=self
return newenv
def detach(self):
tbl=self.get_merged_dict()
try:
delattr(self,'parent')
except AttributeError:
pass
else:
keys=tbl.keys()
for x in keys:
tbl[x]=copy.copy(tbl[x])
self.table=tbl
def get_flat(self,key):
s=self[key]
if isinstance(s,str):return s
return' '.join(s)
def _get_list_value_for_modification(self,key):
try:
value=self.table[key]
except KeyError:
try:value=self.parent[key]
except AttributeError:value=[]
if isinstance(value,list):
value=value[:]
else:
value=[value]
else:
if not isinstance(value,list):
value=[value]
self.table[key]=value
return value
def append_value(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
current_value.extend(value)
else:
current_value.append(value)
def prepend_value(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
current_value=value+current_value
self.table[var]=current_value
else:
current_value.insert(0,value)
def append_unique(self,var,value):
current_value=self._get_list_value_for_modification(var)
if isinstance(value,list):
for value_item in value:
if value_item not in current_value:
current_value.append(value_item)
else:
if value not in current_value:
current_value.append(value)
def get_merged_dict(self):
table_list=[]
env=self
while 1:
table_list.insert(0,env.table)
try:env=env.parent
except AttributeError:break
merged_table={}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self,filename):
file=open(filename,'w')
merged_table=self.get_merged_dict()
keys=list(merged_table.keys())
keys.sort()
for k in keys:file.write('%s = %r\n'%(k,merged_table[k]))
file.close()
def load(self,filename):
tbl=self.table
code=Utils.readf(filename)
for m in re_imp.finditer(code):
g=m.group
tbl[g(2)]=eval(g(3))
Logs.debug('env: %s'%str(self.table))
def get_destdir(self):
if self.__getitem__('NOINSTALL'):return''
return Options.options.destdir
def update(self,d):
for k,v in d.iteritems():
self[k]=v
def __getattr__(self,name):
if name in self.__slots__:
return object.__getattr__(self,name)
else:
return self[name]
def __setattr__(self,name,value):
if name in self.__slots__:
object.__setattr__(self,name,value)
else:
self[name]=value
def __detattr__(self,name):
if name in self.__slots__:
object.__detattr__(self,name)
else:
del self[name]
#! /usr/bin/env python
# encoding: utf-8
import os,re,logging,traceback,sys
from Constants import*
zones=''
verbose=0
colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;91m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']
if got_tty:
try:
got_tty=sys.stderr.isatty()
except AttributeError:
got_tty=False
import Utils
if not got_tty or sys.platform=='win32'or'NOCOLOR'in os.environ:
colors_lst['USE']=False
def get_color(cl):
if not colors_lst['USE']:return''
return colors_lst.get(cl,'')
class foo(object):
def __getattr__(self,a):
return get_color(a)
def __call__(self,a):
return get_color(a)
colors=foo()
re_log=re.compile(r'(\w+): (.*)',re.M)
class log_filter(logging.Filter):
def __init__(self,name=None):
pass
def filter(self,rec):
rec.c1=colors.PINK
rec.c2=colors.NORMAL
rec.zone=rec.module
if rec.levelno>=logging.INFO:
if rec.levelno>=logging.ERROR:
rec.c1=colors.RED
elif rec.levelno>=logging.WARNING:
rec.c1=colors.YELLOW
else:
rec.c1=colors.GREEN
return True
zone=''
m=re_log.match(rec.msg)
if m:
zone=rec.zone=m.group(1)
rec.msg=m.group(2)
if zones:
return getattr(rec,'zone','')in zones or'*'in zones
elif not verbose>2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
def format(self,rec):
if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
try:
return'%s%s%s'%(rec.c1,rec.msg.decode('utf-8'),rec.c2)
except:
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self,rec)
def debug(msg):
if verbose:
msg=msg.replace('\n',' ')
logging.debug(msg)
def error(msg):
logging.error(msg)
if verbose>1:
if isinstance(msg,Utils.WafError):
st=msg.stack
else:
st=traceback.extract_stack()
if st:
st=st[:-1]
buf=[]
for filename,lineno,name,line in st:
buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
if line:
buf.append(' %s'%line.strip())
if buf:logging.error("\n".join(buf))
warn=logging.warn
info=logging.info
def init_log():
log=logging.getLogger()
log.handlers=[]
log.filters=[]
hdlr=logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
init_log()
#! /usr/bin/env python
# encoding: utf-8
import os,sys,fnmatch,re
import Utils
UNDEFINED=0
DIR=1
FILE=2
BUILD=3
type_to_string={UNDEFINED:"unk",DIR:"dir",FILE:"src",BUILD:"bld"}
prune_pats='.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
exclude_pats=prune_pats+'*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
exclude_regs='''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store'''
class Node(object):
__slots__=("name","parent","id","childs")
def __init__(self,name,parent,node_type=UNDEFINED):
self.name=name
self.parent=parent
self.__class__.bld.id_nodes+=4
self.id=self.__class__.bld.id_nodes+node_type
if node_type==DIR:self.childs={}
if parent and name in parent.childs:
raise Utils.WafError('node %s exists in the parent files %r already'%(name,parent))
if parent:parent.childs[name]=self
def __setstate__(self,data):
if len(data)==4:
(self.parent,self.name,self.id,self.childs)=data
else:
(self.parent,self.name,self.id)=data
def __getstate__(self):
if getattr(self,'childs',None)is None:
return(self.parent,self.name,self.id)
else:
return(self.parent,self.name,self.id,self.childs)
def __str__(self):
if not self.parent:return''
return"%s://%s"%(type_to_string[self.id&3],self.abspath())
def __repr__(self):
return self.__str__()
def __hash__(self):
raise Utils.WafError('nodes, you are doing it wrong')
def __copy__(self):
raise Utils.WafError('nodes are not supposed to be cloned')
def get_type(self):
return self.id&3
def set_type(self,t):
self.id=self.id+t-self.id&3
def dirs(self):
return[x for x in self.childs.values()if x.id&3==DIR]
def files(self):
return[x for x in self.childs.values()if x.id&3==FILE]
def get_dir(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=DIR:return default
return node
def get_file(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=FILE:return default
return node
def get_build(self,name,default=None):
node=self.childs.get(name,None)
if not node or node.id&3!=BUILD:return default
return node
def find_resource(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
if len(lst)==1:
parent=self
else:
parent=self.find_dir(lst[:-1])
if not parent:return None
self.__class__.bld.rescan(parent)
name=lst[-1]
node=parent.childs.get(name,None)
if node:
tp=node.id&3
if tp==FILE or tp==BUILD:
return node
else:
return None
tree=self.__class__.bld
if not name in tree.cache_dir_contents[parent.id]:
return None
path=parent.abspath()+os.sep+name
try:
st=Utils.h_file(path)
except IOError:
return None
child=self.__class__(name,parent,FILE)
tree.node_sigs[0][child.id]=st
return child
def find_or_declare(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
if len(lst)==1:
parent=self
else:
parent=self.find_dir(lst[:-1])
if not parent:return None
self.__class__.bld.rescan(parent)
name=lst[-1]
node=parent.childs.get(name,None)
if node:
tp=node.id&3
if tp!=BUILD:
raise Utils.WafError("find_or_declare returns a build node, not a source nor a directory %r"%lst)
return node
node=self.__class__(name,parent,BUILD)
return node
def find_dir(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
current=self
for name in lst:
self.__class__.bld.rescan(current)
prev=current
if not current.parent and name==current.name:
continue
elif not name:
continue
elif name=='.':
continue
elif name=='..':
current=current.parent or current
else:
current=prev.childs.get(name,None)
if current is None:
dir_cont=self.__class__.bld.cache_dir_contents
if prev.id in dir_cont and name in dir_cont[prev.id]:
if not prev.name:
if os.sep=='/':
dirname=os.sep+name
else:
dirname=name
else:
dirname=prev.abspath()+os.sep+name
if not os.path.isdir(dirname):
return None
current=self.__class__(name,prev,DIR)
elif(not prev.name and len(name)==2 and name[1]==':')or name.startswith('\\\\'):
current=self.__class__(name,prev,DIR)
else:
return None
else:
if current.id&3!=DIR:
return None
return current
def ensure_dir_node_from_path(self,lst):
if isinstance(lst,str):
lst=Utils.split_path(lst)
current=self
for name in lst:
if not name:
continue
elif name=='.':
continue
elif name=='..':
current=current.parent or current
else:
prev=current
current=prev.childs.get(name,None)
if current is None:
current=self.__class__(name,prev,DIR)
return current
def exclusive_build_node(self,path):
lst=Utils.split_path(path)
name=lst[-1]
if len(lst)>1:
parent=None
try:
parent=self.find_dir(lst[:-1])
except OSError:
pass
if not parent:
parent=self.ensure_dir_node_from_path(lst[:-1])
self.__class__.bld.rescan(parent)
else:
try:
self.__class__.bld.rescan(parent)
except OSError:
pass
else:
parent=self
node=parent.childs.get(name,None)
if not node:
node=self.__class__(name,parent,BUILD)
return node
def path_to_parent(self,parent):
lst=[]
p=self
h1=parent.height()
h2=p.height()
while h2>h1:
h2-=1
lst.append(p.name)
p=p.parent
if lst:
lst.reverse()
ret=os.path.join(*lst)
else:
ret=''
return ret
def find_ancestor(self,node):
dist=self.height()-node.height()
if dist<0:return node.find_ancestor(self)
cand=self
while dist>0:
cand=cand.parent
dist-=1
if cand==node:return cand
cursor=node
while cand.parent:
cand=cand.parent
cursor=cursor.parent
if cand==cursor:return cand
def relpath_gen(self,from_node):
if self==from_node:return'.'
if from_node.parent==self:return'..'
ancestor=self.find_ancestor(from_node)
lst=[]
cand=self
while not cand.id==ancestor.id:
lst.append(cand.name)
cand=cand.parent
cand=from_node
while not cand.id==ancestor.id:
lst.append('..')
cand=cand.parent
lst.reverse()
return os.sep.join(lst)
def nice_path(self,env=None):
tree=self.__class__.bld
ln=tree.launch_node()
if self.id&3==FILE:return self.relpath_gen(ln)
else:return os.path.join(tree.bldnode.relpath_gen(ln),env.variant(),self.relpath_gen(tree.srcnode))
def is_child_of(self,node):
p=self
diff=self.height()-node.height()
while diff>0:
diff-=1
p=p.parent
return p.id==node.id
def variant(self,env):
if not env:return 0
elif self.id&3==FILE:return 0
else:return env.variant()
def height(self):
d=self
val=-1
while d:
d=d.parent
val+=1
return val
def abspath(self,env=None):
variant=(env and(self.id&3!=FILE)and env.variant())or 0
ret=self.__class__.bld.cache_node_abspath[variant].get(self.id,None)
if ret:return ret
if not variant:
if not self.parent:
val=os.sep=='/'and os.sep or''
elif not self.parent.name:
val=(os.sep=='/'and os.sep or'')+self.name
else:
val=self.parent.abspath()+os.sep+self.name
else:
val=os.sep.join((self.__class__.bld.bldnode.abspath(),variant,self.path_to_parent(self.__class__.bld.srcnode)))
self.__class__.bld.cache_node_abspath[variant][self.id]=val
return val
def change_ext(self,ext):
name=self.name
k=name.rfind('.')
if k>=0:
name=name[:k]+ext
else:
name=name+ext
return self.parent.find_or_declare([name])
def src_dir(self,env):
return self.parent.srcpath(env)
def bld_dir(self,env):
return self.parent.bldpath(env)
def bld_base(self,env):
s=os.path.splitext(self.name)[0]
return os.path.join(self.bld_dir(env),s)
def bldpath(self,env=None):
if self.id&3==FILE:
return self.relpath_gen(self.__class__.bld.bldnode)
if self.path_to_parent(self.__class__.bld.srcnode)is not'':
return os.path.join(env.variant(),self.path_to_parent(self.__class__.bld.srcnode))
return env.variant()
def srcpath(self,env=None):
if self.id&3==BUILD:
return self.bldpath(env)
return self.relpath_gen(self.__class__.bld.bldnode)
def read(self,env):
return Utils.readf(self.abspath(env))
def dir(self,env):
return self.parent.abspath(env)
def file(self):
return self.name
def file_base(self):
return os.path.splitext(self.name)[0]
def suffix(self):
k=max(0,self.name.rfind('.'))
return self.name[k:]
def find_iter_impl(self,src=True,bld=True,dir=True,accept_name=None,is_prune=None,maxdepth=25):
bld_ctx=self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
if accept_name(self,name):
node=self.find_resource(name)
if node:
if src and node.id&3==FILE:
yield node
else:
node=self.find_dir(name)
if node and node.id!=bld_ctx.bldnode.id:
if dir:
yield node
if not is_prune(self,name):
if maxdepth:
for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
yield k
else:
if not is_prune(self,name):
node=self.find_resource(name)
if not node:
node=self.find_dir(name)
if node and node.id!=bld_ctx.bldnode.id:
if maxdepth:
for k in node.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth-1):
yield k
if bld:
for node in self.childs.values():
if node.id==bld_ctx.bldnode.id:
continue
if node.id&3==BUILD:
if accept_name(self,node.name):
yield node
raise StopIteration
def find_iter(self,in_pat=['*'],ex_pat=exclude_pats,prune_pat=prune_pats,src=True,bld=True,dir=False,maxdepth=25,flat=False):
if not(src or bld or dir):
raise StopIteration
if self.id&3!=DIR:
raise StopIteration
in_pat=Utils.to_list(in_pat)
ex_pat=Utils.to_list(ex_pat)
prune_pat=Utils.to_list(prune_pat)
def accept_name(node,name):
for pat in ex_pat:
if fnmatch.fnmatchcase(name,pat):
return False
for pat in in_pat:
if fnmatch.fnmatchcase(name,pat):
return True
return False
def is_prune(node,name):
for pat in prune_pat:
if fnmatch.fnmatchcase(name,pat):
return True
return False
ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth)
if flat:
return" ".join([x.relpath_gen(self)for x in ret])
return ret
def ant_glob(self,*k,**kw):
src=kw.get('src',1)
bld=kw.get('bld',1)
dir=kw.get('dir',0)
excl=kw.get('excl',exclude_regs)
incl=k and k[0]or kw.get('incl','**')
def to_pat(s):
lst=Utils.to_list(s)
ret=[]
for x in lst:
x=x.replace('//','/')
if x.endswith('/'):
x+='**'
lst2=x.split('/')
accu=[]
for k in lst2:
if k=='**':
accu.append(k)
else:
k=k.replace('.','[.]').replace('*','.*').replace('?','.')
k='^%s$'%k
accu.append(re.compile(k))
ret.append(accu)
return ret
def filtre(name,nn):
ret=[]
for lst in nn:
if not lst:
pass
elif lst[0]=='**':
ret.append(lst)
if len(lst)>1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
def accept(name,pats):
nacc=filtre(name,pats[0])
nrej=filtre(name,pats[1])
if[]in nrej:
nacc=[]
return[nacc,nrej]
def ant_iter(nodi,maxdepth=25,pats=[]):
nodi.__class__.bld.rescan(nodi)
for name in nodi.__class__.bld.cache_dir_contents[nodi.id]:
npats=accept(name,pats)
if npats and npats[0]:
accepted=[]in npats[0]
node=nodi.find_resource(name)
if node and accepted:
if src and node.id&3==FILE:
yield node
else:
node=nodi.find_dir(name)
if node and node.id!=nodi.__class__.bld.bldnode.id:
if accepted and dir:
yield node
if maxdepth:
for k in ant_iter(node,maxdepth=maxdepth-1,pats=npats):
yield k
if bld:
for node in nodi.childs.values():
if node.id==nodi.__class__.bld.bldnode.id:
continue
if node.id&3==BUILD:
npats=accept(node.name,pats)
if npats and npats[0]and[]in npats[0]:
yield node
raise StopIteration
ret=[x for x in ant_iter(self,pats=[to_pat(incl),to_pat(excl)])]
if kw.get('flat',True):
return" ".join([x.relpath_gen(self)for x in ret])
return ret
class Nodu(Node):
pass
#! /usr/bin/env python
# encoding: utf-8
import os,sys,imp,types,tempfile,optparse
import Logs,Utils
from Constants import*
cmds='distclean configure build install clean uninstall check dist distcheck'.split()
commands={}
is_install=False
options={}
arg_line=[]
launch_dir=''
tooldir=''
lockfile=os.environ.get('WAFLOCK','.lock-wscript')
try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
except KeyError:cache_global=''
platform=Utils.unversioned_sys_platform()
conf_file='conf-runs-%s-%d.pickle'%(platform,ABI)
default_prefix=os.environ.get('PREFIX')
if not default_prefix:
if platform=='win32':default_prefix=tempfile.gettempdir()
else:default_prefix='/usr/local/'
default_jobs=os.environ.get('JOBS',-1)
if default_jobs<1:
try:
if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
default_jobs=os.sysconf('SC_NPROCESSORS_ONLN')
else:
default_jobs=int(Utils.cmd_output(['sysctl','-n','hw.ncpu']))
except:
if os.name=='java':
from java.lang import Runtime
default_jobs=Runtime.getRuntime().availableProcessors()
else:
default_jobs=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
default_destdir=os.environ.get('DESTDIR','')
def get_usage(self):
cmds_str=[]
module=Utils.g_module
if module:
tbl=module.__dict__
keys=list(tbl.keys())
keys.sort()
if'build'in tbl:
if not module.build.__doc__:
module.build.__doc__='builds the project'
if'configure'in tbl:
if not module.configure.__doc__:
module.configure.__doc__='configures the project'
ban=['set_options','init','shutdown']
optlst=[x for x in keys if not x in ban and type(tbl[x])is type(parse_args_impl)and tbl[x].__doc__ and not x.startswith('_')]
just=max([len(x)for x in optlst])
for x in optlst:
cmds_str.append(' %s: %s'%(x.ljust(just),tbl[x].__doc__))
ret='\n'.join(cmds_str)
else:
ret=' '.join(cmds)
return'''waf [command] [options]
Main commands (example: ./waf build -j4)
%s
'''%ret
setattr(optparse.OptionParser,'get_usage',get_usage)
def create_parser(module=None):
Logs.debug('options: create_parser is called')
parser=optparse.OptionParser(conflict_handler="resolve",version='waf %s (%s)'%(WAFVERSION,WAFREVISION))
parser.formatter.width=Utils.get_term_cols()
p=parser.add_option
p('-j','--jobs',type='int',default=default_jobs,help='amount of parallel jobs (%r)'%default_jobs,dest='jobs')
p('-k','--keep',action='store_true',default=False,help='keep running happily on independent task groups',dest='keep')
p('-v','--verbose',action='count',default=0,help='verbosity level -v -vv or -vvv [default: 0]',dest='verbose')
p('--nocache',action='store_true',default=False,help='ignore the WAFCACHE (if set)',dest='nocache')
p('--zones',action='store',default='',help='debugging zones (task_gen, deps, tasks, etc)',dest='zones')
p('-p','--progress',action='count',default=0,help='-p: progress bar; -pp: ide output',dest='progress_bar')
p('--targets',action='store',default='',help='build given task generators, e.g. "target1,target2"',dest='compile_targets')
gr=optparse.OptionGroup(parser,'configuration options')
parser.add_option_group(gr)
gr.add_option('-b','--blddir',action='store',default='',help='build dir for the project (configuration)',dest='blddir')
gr.add_option('-s','--srcdir',action='store',default='',help='src dir for the project (configuration)',dest='srcdir')
gr.add_option('--prefix',help='installation prefix (configuration) [default: %r]'%default_prefix,default=default_prefix,dest='prefix')
gr=optparse.OptionGroup(parser,'installation options')
parser.add_option_group(gr)
gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
gr.add_option('-f','--force',action='store_true',default=False,help='force file installation',dest='force')
return parser
def parse_args_impl(parser,_args=None):
global options,commands,arg_line
(options,args)=parser.parse_args(args=_args)
arg_line=args
commands={}
for var in cmds:commands[var]=0
if not args:
commands['build']=1
args.append('build')
for arg in args:
commands[arg]=True
if'check'in args:
idx=args.index('check')
try:
bidx=args.index('build')
if bidx>idx:
raise ValueError('build before check')
except ValueError,e:
args.insert(idx,'build')
if args[0]!='init':
args.insert(0,'init')
if options.keep:options.jobs=1
if options.jobs<1:options.jobs=1
if'install'in sys.argv or'uninstall'in sys.argv:
options.destdir=options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
Logs.verbose=options.verbose
Logs.init_log()
if options.zones:
Logs.zones=options.zones.split(',')
if not Logs.verbose:Logs.verbose=1
elif Logs.verbose>0:
Logs.zones=['runner']
if Logs.verbose>2:
Logs.zones=['*']
class Handler(Utils.Context):
parser=None
def __init__(self,module=None):
self.parser=create_parser(module)
self.cwd=os.getcwd()
Handler.parser=self
def add_option(self,*k,**kw):
self.parser.add_option(*k,**kw)
def add_option_group(self,*k,**kw):
return self.parser.add_option_group(*k,**kw)
def get_option_group(self,opt_str):
return self.parser.get_option_group(opt_str)
def sub_options(self,*k,**kw):
if not k:raise Utils.WscriptError('folder expected')
self.recurse(k[0],name='set_options')
def tool_options(self,*k,**kw):
if not k[0]:
raise Utils.WscriptError('invalid tool_options call %r %r'%(k,kw))
tools=Utils.to_list(k[0])
path=Utils.to_list(kw.get('tdir',kw.get('tooldir',tooldir)))
for tool in tools:
tool=tool.replace('++','xx')
module=Utils.load_tool(tool,path)
try:
fun=module.set_options
except AttributeError:
pass
else:
fun(kw.get('option_group',self))
def parse_args(self,args=None):
parse_args_impl(self.parser,args)
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import sys,random,time,threading,traceback
try:from Queue import Queue
except ImportError:from queue import Queue
import Build,Utils,Logs,Options
from Logs import debug,error
from Constants import*
GAP=15
run_old=threading.Thread.run
def run(*args,**kwargs):
try:
run_old(*args,**kwargs)
except(KeyboardInterrupt,SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run=run
class TaskConsumer(threading.Thread):
def __init__(self,m):
threading.Thread.__init__(self)
self.setDaemon(1)
self.master=m
self.start()
def run(self):
try:
self.loop()
except:
pass
def loop(self):
m=self.master
while 1:
tsk=m.ready.get()
if m.stop:
m.out.put(tsk)
continue
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat:ret=tsk.__class__.stat(tsk)
else:ret=tsk.call_run()
except Exception,e:
tsk.err_msg=Utils.ex_stack()
tsk.hasrun=EXCEPTION
m.error_handler(tsk)
m.out.put(tsk)
continue
if ret:
tsk.err_code=ret
tsk.hasrun=CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg=Utils.ex_stack()
tsk.hasrun=EXCEPTION
else:
tsk.hasrun=SUCCESS
if tsk.hasrun!=SUCCESS:
m.error_handler(tsk)
m.out.put(tsk)
class Parallel(object):
def __init__(self,bld,j=2):
self.numjobs=j
self.manager=bld.task_manager
self.manager.current_group=0
self.total=self.manager.total()
self.outstanding=[]
self.maxjobs=MAXJOBS
self.frozen=[]
self.ready=Queue(0)
self.out=Queue(0)
self.count=0
self.processed=1
self.consumers=None
self.stop=False
self.error=False
def get_next(self):
if not self.outstanding:
return None
return self.outstanding.pop(0)
def postpone(self,tsk):
if random.randint(0,1):
self.frozen.insert(0,tsk)
else:
self.frozen.append(tsk)
def refill_task_list(self):
while self.count>self.numjobs+GAP or self.count>=self.maxjobs:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
if self.frozen:
self.outstanding+=self.frozen
self.frozen=[]
elif not self.count:
(jobs,tmp)=self.manager.get_next_set()
if jobs!=None:self.maxjobs=jobs
if tmp:self.outstanding+=tmp
break
def get_out(self):
ret=self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret,'more_tasks',None):
self.outstanding+=ret.more_tasks
self.total+=len(ret.more_tasks)
self.count-=1
def error_handler(self,tsk):
if not Options.options.keep:
self.stop=True
self.error=True
def start(self):
while not self.stop:
self.refill_task_list()
tsk=self.get_next()
if not tsk:
if self.count:
continue
else:
break
if tsk.hasrun:
self.processed+=1
self.manager.add_finished(tsk)
continue
try:
st=tsk.runnable_status()
except Exception,e:
tsk.err_msg=Utils.ex_stack()
tsk.hasrun=EXCEPTION
self.processed+=1
self.error_handler(tsk)
self.manager.add_finished(tsk)
continue
if st==ASK_LATER:
self.postpone(tsk)
elif st==SKIP_ME:
self.processed+=1
tsk.hasrun=SKIPPED
self.manager.add_finished(tsk)
else:
tsk.position=(self.processed,self.total)
self.count+=1
self.ready.put(tsk)
self.processed+=1
if not self.consumers:
self.consumers=[TaskConsumer(self)for i in xrange(self.numjobs)]
while self.error and self.count:
self.get_out()
assert(self.count==0 or self.stop)
#! /usr/bin/env python
# encoding: utf-8
import os,sys,shutil,traceback,datetime,inspect,errno
import Utils,Configure,Build,Logs,Options,Environment,Task
from Logs import error,warn,info
from Constants import*
dist_format='tar'
g_gz='bz2'
commands=[]
def prepare_impl(t,cwd,ver,wafdir):
Options.tooldir=[t]
Options.launch_dir=cwd
if'--version'in sys.argv:
opt_obj=Options.Handler()
opt_obj.curdir=cwd
opt_obj.parse_args()
sys.exit(0)
msg1='Waf: Please run waf from a directory containing a file named "%s" or run distclean'%WSCRIPT_FILE
build_dir_override=None
candidate=None
lst=os.listdir(cwd)
search_for_candidate=True
if WSCRIPT_FILE in lst:
candidate=cwd
elif'configure'in sys.argv and not WSCRIPT_BUILD_FILE in lst:
calldir=os.path.abspath(os.path.dirname(sys.argv[0]))
if WSCRIPT_FILE in os.listdir(calldir):
candidate=calldir
search_for_candidate=False
else:
error('arg[0] directory does not contain a wscript file')
sys.exit(1)
build_dir_override=cwd
while search_for_candidate:
if len(cwd)<=3:
break
dirlst=os.listdir(cwd)
if WSCRIPT_FILE in dirlst:
candidate=cwd
if'configure'in sys.argv and candidate:
break
if Options.lockfile in dirlst:
env=Environment.Environment()
env.load(os.path.join(cwd,Options.lockfile))
try:
os.stat(env['cwd'])
except:
candidate=cwd
else:
candidate=env['cwd']
break
cwd=os.path.dirname(cwd)
if not candidate:
if'-h'in sys.argv or'--help'in sys.argv:
warn('No wscript file found: the help message may be incomplete')
opt_obj=Options.Handler()
opt_obj.curdir=cwd
opt_obj.parse_args()
else:
error(msg1)
sys.exit(0)
try:
os.chdir(candidate)
except OSError:
raise Utils.WafError("the folder %r is unreadable"%candidate)
Utils.set_main_module(os.path.join(candidate,WSCRIPT_FILE))
if build_dir_override:
d=getattr(Utils.g_module,BLDDIR,None)
if d:
msg=' Overriding build directory %s with %s'%(d,build_dir_override)
warn(msg)
Utils.g_module.blddir=build_dir_override
def set_def(obj,name=''):
n=name or obj.__name__
if not n in Utils.g_module.__dict__:
setattr(Utils.g_module,n,obj)
for k in[dist,distclean,distcheck,clean,install,uninstall]:
set_def(k)
set_def(Configure.ConfigurationContext,'configure_context')
for k in['build','clean','install','uninstall']:
set_def(Build.BuildContext,k+'_context')
opt_obj=Options.Handler(Utils.g_module)
opt_obj.curdir=candidate
try:
f=Utils.g_module.set_options
except AttributeError:
pass
else:
opt_obj.sub_options([''])
opt_obj.parse_args()
if not'init'in Utils.g_module.__dict__:
Utils.g_module.init=Utils.nada
if not'shutdown'in Utils.g_module.__dict__:
Utils.g_module.shutdown=Utils.nada
main()
def prepare(t,cwd,ver,wafdir):
if WAFVERSION!=ver:
msg='Version mismatch: waf %s <> wafadmin %s (wafdir %s)'%(ver,WAFVERSION,wafdir)
print('\033[91mError: %s\033[0m'%msg)
sys.exit(1)
try:
prepare_impl(t,cwd,ver,wafdir)
except Utils.WafError,e:
error(str(e))
sys.exit(1)
except KeyboardInterrupt:
Utils.pprint('RED','Interrupted')
sys.exit(68)
def main():
global commands
commands=Options.arg_line[:]
while commands:
x=commands.pop(0)
ini=datetime.datetime.now()
if x=='configure':
fun=configure
elif x=='build':
fun=build
else:
fun=getattr(Utils.g_module,x,None)
if not fun:
raise Utils.WscriptError('No such command %r'%x)
ctx=getattr(Utils.g_module,x+'_context',Utils.Context)()
if x in['init','shutdown','dist','distclean','distcheck']:
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
ela=''
if not Options.options.progress_bar:
ela=' (%s)'%Utils.get_elapsed_time(ini)
if x!='init'and x!='shutdown':
info('%r finished successfully%s'%(x,ela))
if not commands and x!='shutdown':
commands.append('shutdown')
def configure(conf):
src=getattr(Options.options,SRCDIR,None)
if not src:src=getattr(Utils.g_module,SRCDIR,None)
if not src:
src='.'
incomplete_src=1
src=os.path.abspath(src)
bld=getattr(Options.options,BLDDIR,None)
if not bld:
bld=getattr(Utils.g_module,BLDDIR,None)
if bld=='.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
if not bld:
bld='build'
incomplete_bld=1
bld=os.path.abspath(bld)
try:os.makedirs(bld)
except OSError:pass
targets=Options.options.compile_targets
Options.options.compile_targets=None
Options.is_install=False
conf.srcdir=src
conf.blddir=bld
conf.post_init()
if'incomplete_src'in vars():
conf.check_message_1('Setting srcdir to')
conf.check_message_2(src)
if'incomplete_bld'in vars():
conf.check_message_1('Setting blddir to')
conf.check_message_2(bld)
conf.sub_config([''])
conf.store()
env=Environment.Environment()
env[BLDDIR]=bld
env[SRCDIR]=src
env['argv']=sys.argv
env['commands']=Options.commands
env['options']=Options.options.__dict__
env['hash']=conf.hash
env['files']=conf.files
env['environ']=dict(conf.environ)
env['cwd']=os.path.split(Utils.g_module.root_path)[0]
if Utils.g_module.root_path!=src:
env.store(os.path.join(src,Options.lockfile))
env.store(Options.lockfile)
Options.options.compile_targets=targets
def clean(bld):
'''removes the build files'''
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Nothing to clean (project not configured)')
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
bld.is_install=0
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
try:
bld.clean()
finally:
bld.save()
def check_configured(bld):
if not Configure.autoconfig:
return bld
conf_cls=getattr(Utils.g_module,'configure_context',Utils.Context)
bld_cls=getattr(Utils.g_module,'build_context',Utils.Context)
def reconf(proj):
back=(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)
Options.commands=proj['commands']
Options.options.__dict__=proj['options']
conf=conf_cls()
conf.environ=proj['environ']
configure(conf)
(Options.commands,Options.options.__dict__,Logs.zones,Logs.verbose)=back
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
conf=conf_cls()
configure(conf)
else:
try:
bld=bld_cls()
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
except Utils.WafError:
reconf(proj)
return bld_cls()
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Auto-config: project does not configure (bug)')
h=0
try:
for file in proj['files']:
if file.endswith('configure'):
h=hash((h,Utils.readf(file)))
else:
mod=Utils.load_module(file)
h=hash((h,mod.waf_hash_val))
except(OSError,IOError):
warn('Reconfiguring the project: a file is unavailable')
reconf(proj)
else:
if(h!=proj['hash']):
warn('Reconfiguring the project: the configuration has changed')
reconf(proj)
return bld_cls()
def install(bld):
'''installs the build files'''
bld=check_configured(bld)
Options.commands['install']=True
Options.commands['uninstall']=False
Options.is_install=True
bld.is_install=INSTALL
build_impl(bld)
bld.install()
def uninstall(bld):
'''removes the installed files'''
Options.commands['install']=False
Options.commands['uninstall']=True
Options.is_install=True
bld.is_install=UNINSTALL
try:
def runnable_status(self):
return SKIP_ME
setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
setattr(Task.Task,'runnable_status',runnable_status)
build_impl(bld)
bld.install()
finally:
setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
def build(bld):
bld=check_configured(bld)
Options.commands['install']=False
Options.commands['uninstall']=False
Options.is_install=False
bld.is_install=0
return build_impl(bld)
def build_impl(bld):
try:
proj=Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR],proj[BLDDIR])
bld.load_envs()
info("Waf: Entering directory `%s'"%bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
bld.pre_build()
try:
bld.compile()
finally:
if Options.options.progress_bar:print('')
info("Waf: Leaving directory `%s'"%bld.bldnode.abspath())
bld.post_build()
bld.install()
excludes='.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log'.split()
dist_exts='~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
def dont_dist(name,src,build_dir):
global excludes,dist_exts
if(name.startswith(',,')or name.startswith('++')or name.startswith('.waf-1.')or(src=='.'and name==Options.lockfile)or name in excludes or name==build_dir):
return True
for ext in dist_exts:
if name.endswith(ext):
return True
return False
def copytree(src,dst,build_dir):
names=os.listdir(src)
os.makedirs(dst)
for name in names:
srcname=os.path.join(src,name)
dstname=os.path.join(dst,name)
if dont_dist(name,src,build_dir):
continue
if os.path.isdir(srcname):
copytree(srcname,dstname,build_dir)
else:
shutil.copy2(srcname,dstname)
def distclean(ctx=None):
'''removes the build directory'''
lst=os.listdir('.')
for f in lst:
if f==Options.lockfile:
try:
proj=Environment.Environment(f)
except:
Logs.warn('could not read %r'%f)
continue
try:
shutil.rmtree(proj[BLDDIR])
except IOError:
pass
except OSError,e:
if e.errno!=errno.ENOENT:
Logs.warn('project %r cannot be removed'%proj[BLDDIR])
try:
os.remove(f)
except OSError,e:
if e.errno!=errno.ENOENT:
Logs.warn('file %r cannot be removed'%f)
if f.startswith('.waf-'):
shutil.rmtree(f,ignore_errors=True)
def get_version():
if hasattr(Utils.g_module,"get_version"):
version=Utils.g_module.get_version()
else:
version=getattr(Utils.g_module,VERSION,'1.0')
return version
def zipper(dir,zip_file,archive_main_folder=None):
import zipfile
zip=zipfile.ZipFile(zip_file,'w',compression=zipfile.ZIP_DEFLATED)
root_len=len(os.path.abspath(dir))
for root,dirs,files in os.walk(dir):
archive_root=os.path.abspath(root)[root_len:]
for f in files:
fullpath=os.path.join(root,f)
archive_name=os.path.join(archive_root,f)
if archive_main_folder is not None:
if archive_name.startswith(os.sep):
n=archive_name[len(os.sep):]
else:
n=archive_name
archive_name=os.path.join(archive_main_folder,n)
zip.write(fullpath,archive_name,zipfile.ZIP_DEFLATED)
zip.close()
def dist(appname='',version=''):
'''makes a tarball for redistributing the sources'''
import tarfile
if not appname:appname=getattr(Utils.g_module,APPNAME,'noname')
if not version:version=get_version()
tmp_folder=appname+'-'+version
if dist_format=='zip':
arch_name=tmp_folder+'.zip'
elif dist_format=='tar':
arch_name=tmp_folder+'.tar.'+g_gz
else:
raise ValueError("invalid dist_format option %r"%dist_format)
try:
shutil.rmtree(tmp_folder)
except(OSError,IOError):
pass
try:
os.remove(arch_name)
except(OSError,IOError):
pass
copytree('.',tmp_folder,getattr(Utils.g_module,BLDDIR,None))
dist_hook=getattr(Utils.g_module,'dist_hook',None)
if dist_hook:
back=os.getcwd()
os.chdir(tmp_folder)
try:
dist_hook()
finally:
os.chdir(back)
if dist_format=='tar':
tar=tarfile.open(arch_name,'w:'+g_gz)
tar.add(tmp_folder)
tar.close()
elif dist_format=='zip':
zipper(tmp_folder,arch_name,tmp_folder)
else:
raise ValueError("invalid dist_format option %r"%dist_format)
try:from hashlib import sha1 as sha
except ImportError:from sha import sha
try:
digest=" (sha=%r)"%sha(Utils.readf(arch_name)).hexdigest()
except:
digest=''
info('New archive created: %s%s'%(arch_name,digest))
if os.path.exists(tmp_folder):shutil.rmtree(tmp_folder)
return arch_name
def distcheck(appname='',version=''):
'''checks if the sources compile (tarball from 'dist')'''
import tempfile,tarfile
if not appname:appname=getattr(Utils.g_module,APPNAME,'noname')
if not version:version=get_version()
waf=os.path.abspath(sys.argv[0])
tarball=dist(appname,version)
t=tarfile.open(tarball)
for x in t:t.extract(x)
t.close()
path=appname+'-'+version
instdir=tempfile.mkdtemp('.inst','%s-%s'%(appname,version))
ret=Utils.pproc.Popen([waf,'configure','install','uninstall','--destdir='+instdir],cwd=path).wait()
if ret:
raise Utils.WafError('distcheck failed with code %i'%ret)
if os.path.exists(instdir):
raise Utils.WafError('distcheck succeeded, but files were left in %s'%instdir)
shutil.rmtree(path)
def add_subdir(dir,bld):
bld.recurse(dir,'build')
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,shutil,sys,re,random,datetime
from Utils import md5
import Build,Runner,Utils,Node,Logs,Options
from Logs import debug,warn,error
from Constants import*
algotype=NORMAL
COMPILE_TEMPLATE_SHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
p = env.get_flat
cmd = \'\'\' %s \'\'\' % s
return task.exec_command(cmd, cwd=wd)
'''
COMPILE_TEMPLATE_NOSHELL='''
def f(task):
env = task.env
wd = getattr(task, 'cwd', None)
def to_list(xx):
if isinstance(xx, str): return [xx]
return xx
lst = []
%s
lst = [x for x in lst if x]
return task.exec_command(lst, cwd=wd)
'''
file_deps=Utils.nada
class TaskManager(object):
def __init__(self):
self.groups=[]
self.tasks_done=[]
self.current_group=0
self.groups_names={}
def get_next_set(self):
ret=None
while not ret and self.current_group<len(self.groups):
ret=self.groups[self.current_group].get_next_set()
if ret:return ret
else:
self.groups[self.current_group].process_install()
self.current_group+=1
return(None,None)
def add_group(self,name=None,set=True):
g=TaskGroup()
if name and name in self.groups_names:
error('add_group: name %s already present'%name)
self.groups_names[name]=g
self.groups.append(g)
if set:
self.current_group=len(self.groups)-1
def set_group(self,idx):
if isinstance(idx,str):
g=self.groups_names[idx]
for x in xrange(len(self.groups)):
if id(g)==id(self.groups[x]):
self.current_group=x
else:
self.current_group=idx
def add_task_gen(self,tgen):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks_gen.append(tgen)
def add_task(self,task):
if not self.groups:self.add_group()
self.groups[self.current_group].tasks.append(task)
def total(self):
total=0
if not self.groups:return 0
for group in self.groups:
total+=len(group.tasks)
return total
def add_finished(self,tsk):
self.tasks_done.append(tsk)
bld=tsk.generator.bld
if bld.is_install:
f=None
if'install'in tsk.__dict__:
f=tsk.__dict__['install']
if f:f(tsk)
else:
tsk.install()
class TaskGroup(object):
def __init__(self):
self.tasks=[]
self.tasks_gen=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.temp_tasks=[]
self.ready=0
self.post_funs=[]
def reset(self):
for x in self.cstr_groups:
self.tasks+=self.cstr_groups[x]
self.tasks=self.temp_tasks+self.tasks
self.temp_tasks=[]
self.cstr_groups=Utils.DefaultDict(list)
self.cstr_order=Utils.DefaultDict(set)
self.ready=0
def process_install(self):
for(f,k,kw)in self.post_funs:
f(*k,**kw)
def prepare(self):
self.ready=1
file_deps(self.tasks)
self.make_cstr_groups()
self.extract_constraints()
def get_next_set(self):
global algotype
if algotype==NORMAL:
tasks=self.tasks_in_parallel()
maxj=MAXJOBS
elif algotype==JOBCONTROL:
(maxj,tasks)=self.tasks_by_max_jobs()
elif algotype==MAXPARALLEL:
tasks=self.tasks_with_inner_constraints()
maxj=MAXJOBS
else:
raise Utils.WafError("unknown algorithm type %s"%(algotype))
if not tasks:return()
return(maxj,tasks)
def make_cstr_groups(self):
self.cstr_groups=Utils.DefaultDict(list)
for x in self.tasks:
h=x.hash_constraints()
self.cstr_groups[h].append(x)
def set_order(self,a,b):
self.cstr_order[a].add(b)
def compare_exts(self,t1,t2):
x="ext_in"
y="ext_out"
in_=t1.attr(x,())
out_=t2.attr(y,())
for k in in_:
if k in out_:
return-1
in_=t2.attr(x,())
out_=t1.attr(y,())
for k in in_:
if k in out_:
return 1
return 0
def compare_partial(self,t1,t2):
m="after"
n="before"
name=t2.__class__.__name__
if name in Utils.to_list(t1.attr(m,())):return-1
elif name in Utils.to_list(t1.attr(n,())):return 1
name=t1.__class__.__name__
if name in Utils.to_list(t2.attr(m,())):return 1
elif name in Utils.to_list(t2.attr(n,())):return-1
return 0
def extract_constraints(self):
keys=self.cstr_groups.keys()
max=len(keys)
for i in xrange(max):
t1=self.cstr_groups[keys[i]][0]
for j in xrange(i+1,max):
t2=self.cstr_groups[keys[j]][0]
val=(self.compare_exts(t1,t2)or self.compare_partial(t1,t2))
if val>0:
self.set_order(keys[i],keys[j])
elif val<0:
self.set_order(keys[j],keys[i])
def tasks_in_parallel(self):
if not self.ready:self.prepare()
keys=self.cstr_groups.keys()
unconnected=[]
remainder=[]
for u in keys:
for k in self.cstr_order.values():
if u in k:
remainder.append(u)
break
else:
unconnected.append(u)
toreturn=[]
for y in unconnected:
toreturn.extend(self.cstr_groups[y])
for y in unconnected:
try:self.cstr_order.__delitem__(y)
except KeyError:pass
self.cstr_groups.__delitem__(y)
if not toreturn and remainder:
raise Utils.WafError("circular order constraint detected %r"%remainder)
return toreturn
def tasks_by_max_jobs(self):
if not self.ready:self.prepare()
if not self.temp_tasks:self.temp_tasks=self.tasks_in_parallel()
if not self.temp_tasks:return(None,None)
maxjobs=MAXJOBS
ret=[]
remaining=[]
for t in self.temp_tasks:
m=getattr(t,"maxjobs",getattr(self.__class__,"maxjobs",MAXJOBS))
if m>maxjobs:
remaining.append(t)
elif m<maxjobs:
remaining+=ret
ret=[t]
maxjobs=m
else:
ret.append(t)
self.temp_tasks=remaining
return(maxjobs,ret)
def tasks_with_inner_constraints(self):
if not self.ready:self.prepare()
if getattr(self,"done",None):return None
for p in self.cstr_order:
for v in self.cstr_order[p]:
for m in self.cstr_groups[p]:
for n in self.cstr_groups[v]:
n.set_run_after(m)
self.cstr_order=Utils.DefaultDict(set)
self.cstr_groups=Utils.DefaultDict(list)
self.done=1
return self.tasks[:]
class store_task_type(type):
def __init__(cls,name,bases,dict):
super(store_task_type,cls).__init__(name,bases,dict)
name=cls.__name__
if name.endswith('_task'):
name=name.replace('_task','')
TaskBase.classes[name]=cls
class TaskBase(object):
__metaclass__=store_task_type
color="GREEN"
maxjobs=MAXJOBS
classes={}
stat=None
def __init__(self,*k,**kw):
self.hasrun=NOT_RUN
try:
self.generator=kw['generator']
except KeyError:
self.generator=self
self.bld=Build.bld
if kw.get('normal',1):
self.generator.bld.task_manager.add_task(self)
def __repr__(self):
return'\n\t{task: %s %s}'%(self.__class__.__name__,str(getattr(self,"fun","")))
def __str__(self):
if hasattr(self,'fun'):
return'executing: %s\n'%self.fun.__name__
return self.__class__.__name__+'\n'
def exec_command(self,*k,**kw):
return self.generator.bld.exec_command(*k,**kw)
def runnable_status(self):
return RUN_ME
def can_retrieve_cache(self):
return False
def call_run(self):
if self.can_retrieve_cache():
return 0
return self.run()
def run(self):
if hasattr(self,'fun'):
return self.fun(self)
return 0
def post_run(self):
pass
def display(self):
col1=Logs.colors(self.color)
col2=Logs.colors.NORMAL
if Options.options.progress_bar==1:
return self.generator.bld.progress_line(self.position[0],self.position[1],col1,col2)
if Options.options.progress_bar==2:
ela=Utils.get_elapsed_time(self.generator.bld.ini)
try:
ins=','.join([n.name for n in self.inputs])
except AttributeError:
ins=''
try:
outs=','.join([n.name for n in self.outputs])
except AttributeError:
outs=''
return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(self.position[1],self.position[0],ins,outs,ela)
total=self.position[1]
n=len(str(total))
fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
return fs%(self.position[0],self.position[1],col1,str(self),col2)
def attr(self,att,default=None):
ret=getattr(self,att,self)
if ret is self:return getattr(self.__class__,att,default)
return ret
def hash_constraints(self):
a=self.attr
sum=hash((self.__class__.__name__,str(a('before','')),str(a('after','')),str(a('ext_in','')),str(a('ext_out','')),self.__class__.maxjobs))
return sum
def format_error(self):
if getattr(self,"err_msg",None):
return self.err_msg
elif self.hasrun==CRASHED:
try:
return" -> task failed (err #%d): %r"%(self.err_code,self)
except AttributeError:
return" -> task failed: %r"%self
elif self.hasrun==MISSING:
return" -> missing files: %r"%self
else:
return''
def install(self):
bld=self.generator.bld
d=self.attr('install')
if self.attr('install_path'):
lst=[a.relpath_gen(bld.srcnode)for a in self.outputs]
perm=self.attr('chmod',O644)
if self.attr('src'):
lst+=[a.relpath_gen(bld.srcnode)for a in self.inputs]
if self.attr('filename'):
dir=self.install_path.rstrip(os.sep)+os.sep+self.attr('filename')
bld.install_as(dir,lst[0],self.env,perm)
else:
bld.install_files(self.install_path,lst,self.env,perm)
class Task(TaskBase):
vars=[]
def __init__(self,env,**kw):
TaskBase.__init__(self,**kw)
self.env=env
self.inputs=[]
self.outputs=[]
self.deps_nodes=[]
self.run_after=[]
def __str__(self):
env=self.env
src_str=' '.join([a.nice_path(env)for a in self.inputs])
tgt_str=' '.join([a.nice_path(env)for a in self.outputs])
if self.outputs:sep=' -> '
else:sep=''
return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)
def __repr__(self):
return"".join(['\n\t{task: ',self.__class__.__name__," ",",".join([x.name for x in self.inputs])," -> ",",".join([x.name for x in self.outputs]),'}'])
def unique_id(self):
try:
return self.uid
except AttributeError:
m=md5()
up=m.update
up(self.__class__.__name__)
up(self.env.variant())
p=None
for x in self.inputs+self.outputs:
if p!=x.parent.id:
p=x.parent.id
up(x.parent.abspath())
up(x.name)
self.uid=m.digest()
return self.uid
def set_inputs(self,inp):
if isinstance(inp,list):self.inputs+=inp
else:self.inputs.append(inp)
def set_outputs(self,out):
if isinstance(out,list):self.outputs+=out
else:self.outputs.append(out)
def set_run_after(self,task):
assert isinstance(task,TaskBase)
self.run_after.append(task)
def add_file_dependency(self,filename):
node=self.generator.bld.current.find_resource(filename)
self.deps_nodes.append(node)
def signature(self):
try:return self.cache_sig[0]
except AttributeError:pass
m=md5()
exp_sig=self.sig_explicit_deps()
m.update(exp_sig)
imp_sig=self.scan and self.sig_implicit_deps()or SIG_NIL
m.update(imp_sig)
var_sig=self.sig_vars()
m.update(var_sig)
ret=m.digest()
self.cache_sig=(ret,exp_sig,imp_sig,var_sig)
return ret
def runnable_status(self):
if self.inputs and(not self.outputs):
if not getattr(self.__class__,'quiet',None):
warn("invalid task (no inputs OR outputs): override in a Task subclass or set the attribute 'quiet' %r"%self)
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
env=self.env
bld=self.generator.bld
try:
new_sig=self.signature()
except KeyError:
debug("task: something is wrong, computing the task %r signature failed"%self)
return RUN_ME
key=self.unique_id()
try:
prev_sig=bld.task_sigs[key][0]
except KeyError:
debug("task: task %r must run as it was never run before or the task code changed"%self)
return RUN_ME
for node in self.outputs:
variant=node.variant(env)
try:
if bld.node_sigs[variant][node.id]!=new_sig:
return RUN_ME
except KeyError:
debug("task: task %r must run as the output nodes do not exist"%self)
return RUN_ME
if Logs.verbose:self.debug_why(bld.task_sigs[key])
if new_sig!=prev_sig:
return RUN_ME
return SKIP_ME
def post_run(self):
bld=self.generator.bld
env=self.env
sig=self.signature()
cnt=0
variant=env.variant()
for node in self.outputs:
try:
os.stat(node.abspath(env))
except OSError:
self.has_run=MISSING
self.err_msg='-> missing file: %r'%node.abspath(env)
raise Utils.WafError
bld.node_sigs[variant][node.id]=sig
if Options.cache_global:
ssig=sig.encode('hex')
dest=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name))
try:shutil.copy2(node.abspath(env),dest)
except IOError:warn('Could not write the file to the cache')
cnt+=1
bld.task_sigs[self.unique_id()]=self.cache_sig
def can_retrieve_cache(self):
if not Options.cache_global:return None
if Options.options.nocache:return None
if not self.outputs:return None
env=self.env
sig=self.signature()
cnt=0
for node in self.outputs:
variant=node.variant(env)
ssig=sig.encode('hex')
orig=os.path.join(Options.cache_global,'%s_%d_%s'%(ssig,cnt,node.name))
try:
shutil.copy2(orig,node.abspath(env))
os.utime(orig,None)
except(OSError,IOError):
debug('task: failed retrieving file')
return None
else:
cnt+=1
for node in self.outputs:
self.generator.bld.node_sigs[variant][node.id]=sig
self.generator.bld.printout('restoring from cache %r\n'%node.bldpath(env))
return 1
def debug_why(self,old_sigs):
new_sigs=self.cache_sig
def v(x):
return x.encode('hex')
debug("Task %r"%self)
msgs=['Task must run','* Source file or manual dependency','* Implicit dependency','* Environment variable']
tmp='task: -> %s: %s %s'
for x in xrange(len(msgs)):
if(new_sigs[x]!=old_sigs[x]):
debug(tmp%(msgs[x],v(old_sigs[x]),v(new_sigs[x])))
def sig_explicit_deps(self):
bld=self.generator.bld
m=md5()
for x in self.inputs+getattr(self,'dep_nodes',[]):
if not x.parent.id in bld.cache_scanned_folders:
bld.rescan(x.parent)
variant=x.variant(self.env)
m.update(bld.node_sigs[variant][x.id])
if bld.deps_man:
additional_deps=bld.deps_man
for x in self.inputs+self.outputs:
try:
d=additional_deps[x.id]
except KeyError:
continue
for v in d:
if isinstance(v,Node.Node):
bld.rescan(v.parent)
variant=v.variant(self.env)
try:
v=bld.node_sigs[variant][v.id]
except KeyError:
v=''
elif hasattr(v,'__call__'):
v=v()
m.update(v)
for x in self.deps_nodes:
v=bld.node_sigs[x.variant(self.env)][x.id]
m.update(v)
return m.digest()
def sig_vars(self):
m=md5()
bld=self.generator.bld
env=self.env
act_sig=bld.hash_env_vars(env,self.__class__.vars)
m.update(act_sig)
dep_vars=getattr(self,'dep_vars',None)
if dep_vars:
m.update(bld.hash_env_vars(env,dep_vars))
return m.digest()
scan=None
def sig_implicit_deps(self):
bld=self.generator.bld
key=self.unique_id()
prev_sigs=bld.task_sigs.get(key,())
if prev_sigs:
try:
if prev_sigs[2]==self.compute_sig_implicit_deps():
return prev_sigs[2]
except(KeyError,OSError):
pass
(nodes,names)=self.scan()
if Logs.verbose:
debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names)))
bld.node_deps[key]=nodes
bld.raw_deps[key]=names
sig=self.compute_sig_implicit_deps()
return sig
def compute_sig_implicit_deps(self):
m=md5()
upd=m.update
bld=self.generator.bld
tstamp=bld.node_sigs
env=self.env
for k in bld.node_deps.get(self.unique_id(),[]):
if not k.parent.id in bld.cache_scanned_folders:
bld.rescan(k.parent)
if k.id&3==2:
upd(tstamp[0][k.id])
else:
upd(tstamp[env.variant()][k.id])
return m.digest()
def funex(c):
dc={}
exec(c,dc)
return dc['f']
reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M)
def compile_fun_shell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('backslash'):return'\\\\'
elif g('subst'):extr.append((g('var'),g('code')));return"%s"
return None
line=reg_act.sub(repl,line)
parm=[]
dvars=[]
app=parm.append
for(var,meth)in extr:
if var=='SRC':
if meth:app('task.inputs%s'%meth)
else:app('" ".join([a.srcpath(env) for a in task.inputs])')
elif var=='TGT':
if meth:app('task.outputs%s'%meth)
else:app('" ".join([a.bldpath(env) for a in task.outputs])')
else:
if not var in dvars:dvars.append(var)
app("p('%s')"%var)
if parm:parm="%% (%s) "%(',\n\t\t'.join(parm))
else:parm=''
c=COMPILE_TEMPLATE_SHELL%(line,parm)
debug('action: %s'%c)
return(funex(c),dvars)
def compile_fun_noshell(name,line):
extr=[]
def repl(match):
g=match.group
if g('dollar'):return"$"
elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"
return None
line2=reg_act.sub(repl,line)
params=line2.split('<<|@|>>')
buf=[]
dvars=[]
app=buf.append
for x in xrange(len(extr)):
params[x]=params[x].strip()
if params[x]:
app("lst.extend(%r)"%params[x].split())
(var,meth)=extr[x]
if var=='SRC':
if meth:app('lst.append(task.inputs%s)'%meth)
else:app("lst.extend([a.srcpath(env) for a in task.inputs])")
elif var=='TGT':
if meth:app('lst.append(task.outputs%s)'%meth)
else:app("lst.extend([a.bldpath(env) for a in task.outputs])")
else:
app('lst.extend(to_list(env[%r]))'%var)
if not var in dvars:dvars.append(var)
if extr:
if params[-1]:
app("lst.extend(%r)"%params[-1].split())
fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
debug('action: %s'%fun)
return(funex(fun),dvars)
def compile_fun(name,line,shell=None):
if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
shell=True
if shell is None:
if sys.platform=='win32':
shell=False
else:
shell=True
if shell:
return compile_fun_shell(name,line)
else:
return compile_fun_noshell(name,line)
def simple_task_type(name,line,color='GREEN',vars=[],ext_in=[],ext_out=[],before=[],after=[],shell=None):
(fun,dvars)=compile_fun(name,line,shell)
fun.code=line
return task_type_from_func(name,fun,vars or dvars,color,ext_in,ext_out,before,after)
def task_type_from_func(name,func,vars=[],color='GREEN',ext_in=[],ext_out=[],before=[],after=[]):
params={'run':func,'vars':vars,'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),}
cls=type(Task)(name,(Task,),params)
TaskBase.classes[name]=cls
return cls
def always_run(cls):
old=cls.runnable_status
def always(self):
old(self)
return RUN_ME
cls.runnable_status=always
def update_outputs(cls):
old_post_run=cls.post_run
def post_run(self):
old_post_run(self)
bld=self.outputs[0].__class__.bld
bld.node_sigs[self.env.variant()][self.outputs[0].id]=Utils.h_file(self.outputs[0].abspath(self.env))
cls.post_run=post_run
def extract_outputs(tasks):
v={}
for x in tasks:
try:
(ins,outs)=v[x.env.variant()]
except KeyError:
ins={}
outs={}
v[x.env.variant()]=(ins,outs)
for a in getattr(x,'inputs',[]):
try:ins[a.id].append(x)
except KeyError:ins[a.id]=[x]
for a in getattr(x,'outputs',[]):
try:outs[a.id].append(x)
except KeyError:outs[a.id]=[x]
for(ins,outs)in v.values():
links=set(ins.iterkeys()).intersection(outs.iterkeys())
for k in links:
for a in ins[k]:
for b in outs[k]:
a.set_run_after(b)
def extract_deps(tasks):
extract_outputs(tasks)
out_to_task={}
for x in tasks:
v=x.env.variant()
try:
lst=x.outputs
except AttributeError:
pass
else:
for node in lst:
out_to_task[(v,node.id)]=x
dep_to_task={}
for x in tasks:
try:
x.signature()
except:
pass
variant=x.env.variant()
key=x.unique_id()
for k in x.generator.bld.node_deps.get(x.unique_id(),[]):
try:dep_to_task[(v,k.id)].append(x)
except KeyError:dep_to_task[(v,k.id)]=[x]
deps=set(dep_to_task.keys()).intersection(set(out_to_task.keys()))
for idx in deps:
for k in dep_to_task[idx]:
k.set_run_after(out_to_task[idx])
for x in tasks:
try:
delattr(x,'cache_sig')
except AttributeError:
pass
#! /usr/bin/env python
# encoding: utf-8
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,traceback,copy
import Build,Task,Utils,Logs,Options
from Logs import debug,error,warn
from Constants import*
typos={'sources':'source','targets':'target','include':'includes','define':'defines','importpath':'importpaths','install_var':'install_path','install_subdir':'install_path','inst_var':'install_path','inst_dir':'install_path','feature':'features',}
class register_obj(type):
def __init__(cls,name,bases,dict):
super(register_obj,cls).__init__(name,bases,dict)
name=cls.__name__
suffix='_taskgen'
if name.endswith(suffix):
task_gen.classes[name.replace(suffix,'')]=cls
class task_gen(object):
__metaclass__=register_obj
mappings={}
mapped={}
prec=Utils.DefaultDict(list)
traits=Utils.DefaultDict(set)
classes={}
def __init__(self,*kw,**kwargs):
self.prec=Utils.DefaultDict(list)
self.source=''
self.target=''
self.meths=[]
self.mappings={}
self.features=list(kw)
self.tasks=[]
self.default_chmod=O644
self.default_install_path=None
self.allnodes=[]
self.bld=kwargs.get('bld',Build.bld)
self.env=self.bld.env.copy()
self.path=self.bld.path
self.name=''
self.idx=self.bld.idx[self.path.id]=self.bld.idx.get(self.path.id,0)+1
for key,val in kwargs.iteritems():
setattr(self,key,val)
self.bld.task_manager.add_task_gen(self)
self.bld.all_task_gen.append(self)
def __str__(self):
return("<task_gen '%s' of type %s defined in %s>"%(self.name or self.target,self.__class__.__name__,str(self.path)))
def __setattr__(self,name,attr):
real=typos.get(name,name)
if real!=name:
warn('typo %s -> %s'%(name,real))
if Logs.verbose>0:
traceback.print_stack()
object.__setattr__(self,real,attr)
def to_list(self,value):
if isinstance(value,str):return value.split()
else:return value
def apply(self):
keys=set(self.meths)
self.features=Utils.to_list(self.features)
for x in self.features+['*']:
st=task_gen.traits[x]
if not st:
warn('feature %r does not exist - bind at least one method to it'%x)
keys.update(st)
prec={}
prec_tbl=self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x]=prec_tbl[x]
tmp=[]
for a in keys:
for x in prec.values():
if a in x:break
else:
tmp.append(a)
out=[]
while tmp:
e=tmp.pop()
if e in keys:out.append(e)
try:
nlst=prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec: