ref: 904702d027b3e5e51e28a12383eecea0b55611da
parent: 55255070cdc7f914525c08d5689fa5c9dc9e7045
author: Paul Brossier <piem@piem.org>
date: Sat Mar 14 14:06:10 EDT 2015
waf, waflib: update to 1.8.7
--- a/waf
+++ b/waf
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: ISO8859-1
-# Thomas Nagy, 2005-2012
+# Thomas Nagy, 2005-2015
"""
Redistribution and use in source and binary forms, with or without
@@ -30,13 +30,15 @@
POSSIBILITY OF SUCH DAMAGE.
"""
-import os, sys
+import os, sys, inspect
-VERSION="1.7.15"
-REVISION="de1cb53b86321cda764be4b3c38c3d20"
+VERSION="1.8.7"
+REVISION="073060339ba56c09e143ed641610cbec"
+GIT="x"
INSTALL=''
-C1='#+'
-C2='#&'
+C1='#/'
+C2='#-'
+C3='#+'
cwd = os.getcwd()
join = os.path.join
@@ -53,8 +55,8 @@
print(('\033[91mError: %s\033[0m' % m))sys.exit(1)
-def unpack_wafdir(dir):
- f = open(sys.argv[0],'rb')
+def unpack_wafdir(dir, src):
+ f = open(src,'rb')
c = 'corrupt archive (%d)'
while 1:
line = f.readline()
@@ -65,13 +67,13 @@
if f.readline() != b('#<==\n'): err(c % 2)break
if not txt: err(c % 3)
- txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))+ txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try:
- for x in ['Tools', 'extras']:
+ for x in ('Tools', 'extras'):os.makedirs(join(dir, 'waflib', x))
except OSError:
err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)@@ -100,7 +102,7 @@
finally:
t.close()
- for x in ['Tools', 'extras']:
+ for x in ('Tools', 'extras'): os.chmod(join('waflib',x), 493)if sys.hexversion<0x300000f:
@@ -127,8 +129,8 @@
pass
def find_lib():
- name = sys.argv[0]
- base = os.path.dirname(os.path.abspath(name))
+ src = os.path.abspath(inspect.getfile(inspect.getmodule(err)))
+ base, name = os.path.split(src)
#devs use $WAFDIR
w=test(os.environ.get('WAFDIR', ''))@@ -141,7 +143,7 @@
err('waf-light requires waflib -> export WAFDIR=/folder')dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
- for i in [INSTALL,'/usr','/usr/local','/opt']:
+ for i in (INSTALL,'/usr','/usr/local','/opt'):
w = test(i + '/lib/' + dirname)
if w: return w
@@ -151,7 +153,7 @@
if w: return w
#unpack
- unpack_wafdir(dir)
+ unpack_wafdir(dir, src)
return dir
wafdir = find_lib()
--- a/waflib/Build.py
+++ b/waflib/Build.py
@@ -2,7 +2,7 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,errno,re,shutil
+import os,sys,errno,re,shutil,stat
try:
import cPickle
except ImportError:
@@ -31,7 +31,7 @@
self.out_dir=kw.get('out_dir',Context.out_dir) self.cache_dir=kw.get('cache_dir',None)if not self.cache_dir:
- self.cache_dir=self.out_dir+os.sep+CACHE_DIR
+ self.cache_dir=os.path.join(self.out_dir,CACHE_DIR)
self.all_envs={} self.task_sigs={} self.node_deps={}@@ -42,8 +42,6 @@
self.jobs=Options.options.jobs
self.targets=Options.options.targets
self.keep=Options.options.keep
- self.cache_global=Options.cache_global
- self.nocache=Options.options.nocache
self.progress_bar=Options.options.progress_bar
self.deps_man=Utils.defaultdict(list)
self.current_group=0
@@ -109,23 +107,19 @@
self.recurse([self.run_dir])
self.pre_build()
self.timer=Utils.Timer()
- if self.progress_bar:
- sys.stderr.write(Logs.colors.cursor_off)
try:
self.compile()
finally:
- if self.progress_bar==1:
+ if self.progress_bar==1 and sys.stderr.isatty():
c=len(self.returned_tasks)or 1
- self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL))
- print('')- sys.stdout.flush()
- sys.stderr.write(Logs.colors.cursor_on)
+ m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)
+ Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) Logs.info("Waf: Leaving directory `%s'"%self.variant_dir)self.post_build()
def restore(self):
try:
env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
- except(IOError,OSError):
+ except EnvironmentError:
pass
else:
if env['version']<Context.HEXVERSION:
@@ -253,6 +247,8 @@
except KeyError:
raise Errors.WafError('Could not find a task generator for the name %r'%name)def progress_line(self,state,total,col1,col2):
+ if not sys.stderr.isatty():
+ return''
n=len(str(total))
Utils.rot_idx+=1
ind=Utils.rot_chr[Utils.rot_idx%4]
@@ -265,7 +261,7 @@
if cols<7:cols=7
ratio=((cols*state)//total)-1
bar=('='*ratio+'>').ljust(cols)- msg=Utils.indicator%(left,bar,right)
+ msg=Logs.indicator%(left,bar,right)
return msg
def declare_chain(self,*k,**kw):
return TaskGen.declare_chain(*k,**kw)
@@ -325,6 +321,7 @@
for i in range(len(self.groups)):
if id(g)==id(self.groups[i]):
self.current_group=i
+ break
else:
self.current_group=idx
def total(self):
@@ -473,16 +470,16 @@
destfile=os.path.join(destpath,y.path_from(self.path))
else:
destfile=os.path.join(destpath,y.name)
- self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
+ self.generator.bld.do_install(y.abspath(),destfile,chmod=self.chmod,tsk=self)
def exec_install_as(self):
destfile=self.get_install_path()
- self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod)
+ self.generator.bld.do_install(self.inputs[0].abspath(),destfile,chmod=self.chmod,tsk=self)
def exec_symlink_as(self):
destfile=self.get_install_path()
src=self.link
if self.relative_trick:
src=os.path.relpath(src,os.path.dirname(destfile))
- self.generator.bld.do_link(src,destfile)
+ self.generator.bld.do_link(src,destfile,tsk=self)
class InstallContext(BuildContext):
'''installs the targets on the system'''
cmd='install'
@@ -490,7 +487,12 @@
super(InstallContext,self).__init__(**kw)
self.uninstall=[]
self.is_install=INSTALL
- def do_install(self,src,tgt,chmod=Utils.O644):
+ def copy_fun(self,src,tgt,**kw):
+ if Utils.is_win32 and len(tgt)>259 and not tgt.startswith('\\\\?\\'):+ tgt='\\\\?\\'+tgt
+ shutil.copy2(src,tgt)
+ os.chmod(tgt,kw.get('chmod',Utils.O644))+ def do_install(self,src,tgt,**kw):
d,_=os.path.split(tgt)
if not d:
raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))@@ -510,19 +512,22 @@
if not self.progress_bar:
Logs.info('+ install %s (from %s)'%(tgt,srclbl))try:
+ os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode))
+ except EnvironmentError:
+ pass
+ try:
os.remove(tgt)
except OSError:
pass
try:
- shutil.copy2(src,tgt)
- os.chmod(tgt,chmod)
+ self.copy_fun(src,tgt,**kw)
except IOError:
try:
os.stat(src)
- except(OSError,IOError):
+ except EnvironmentError:
Logs.error('File %r does not exist'%src) raise Errors.WafError('Could not install the file %r'%tgt)- def do_link(self,src,tgt):
+ def do_link(self,src,tgt,**kw):
d,_=os.path.split(tgt)
Utils.check_dir(d)
link=False
@@ -545,11 +550,12 @@
if tsk.runnable_status()==Task.ASK_LATER:
raise self.WafError('cannot post the task %r'%tsk)tsk.run()
- def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True):
+ def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True,task=None):
tsk=inst(env=env or self.env)
tsk.bld=self
tsk.path=cwd or self.path
tsk.chmod=chmod
+ tsk.task=task
if isinstance(files,waflib.Node.Node):
tsk.source=[files]
else:
@@ -560,18 +566,19 @@
if add:self.add_to_group(tsk)
self.run_task_now(tsk,postpone)
return tsk
- def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True):
+ def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True,task=None):
tsk=inst(env=env or self.env)
tsk.bld=self
tsk.path=cwd or self.path
tsk.chmod=chmod
tsk.source=[srcfile]
+ tsk.task=task
tsk.dest=dest
tsk.exec_task=tsk.exec_install_as
if add:self.add_to_group(tsk)
self.run_task_now(tsk,postpone)
return tsk
- def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False):
+ def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False,task=None):
if Utils.is_win32:
return
tsk=inst(env=env or self.env)
@@ -579,6 +586,7 @@
tsk.dest=dest
tsk.path=cwd or self.path
tsk.source=[]
+ tsk.task=task
tsk.link=src
tsk.relative_trick=relative_trick
tsk.exec_task=tsk.exec_symlink_as
@@ -591,7 +599,14 @@
def __init__(self,**kw):
super(UninstallContext,self).__init__(**kw)
self.is_install=UNINSTALL
- def do_install(self,src,tgt,chmod=Utils.O644):
+ def rm_empty_dirs(self,tgt):
+ while tgt:
+ tgt=os.path.dirname(tgt)
+ try:
+ os.rmdir(tgt)
+ except OSError:
+ break
+ def do_install(self,src,tgt,**kw):
if not self.progress_bar:
Logs.info('- remove %s'%tgt)self.uninstall.append(tgt)
@@ -604,13 +619,8 @@
Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')if Logs.verbose>1:
Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno))- while tgt:
- tgt=os.path.dirname(tgt)
- try:
- os.rmdir(tgt)
- except OSError:
- break
- def do_link(self,src,tgt):
+ self.rm_empty_dirs(tgt)
+ def do_link(self,src,tgt,**kw):
try:
if not self.progress_bar:
Logs.info('- remove %s'%tgt)@@ -617,12 +627,7 @@
os.remove(tgt)
except OSError:
pass
- while tgt:
- tgt=os.path.dirname(tgt)
- try:
- os.rmdir(tgt)
- except OSError:
- break
+ self.rm_empty_dirs(tgt)
def execute(self):
try:
def runnable_status(self):
@@ -754,5 +759,3 @@
else:
return pattern.match(node.abspath())
return match
-BuildContext.store=Utils.nogc(BuildContext.store)
-BuildContext.restore=Utils.nogc(BuildContext.restore)
--- a/waflib/ConfigSet.py
+++ b/waflib/ConfigSet.py
@@ -89,9 +89,9 @@
self.table[key]=value
return value
def append_value(self,var,val):
- current_value=self._get_list_value_for_modification(var)
if isinstance(val,str):
val=[val]
+ current_value=self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self,var,val):
if isinstance(val,str):
--- a/waflib/Configure.py
+++ b/waflib/Configure.py
@@ -2,14 +2,8 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,shlex,sys,time
+import os,shlex,sys,time,re,shutil
from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
-try:
- from urllib import request
-except ImportError:
- from urllib import urlopen
-else:
- urlopen=request.urlopen
BREAK='break'
CONTINUE='continue'
WAF_CONFIG_LOG='config.log'
@@ -18,37 +12,6 @@
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#'''
-def download_check(node):
- pass
-def download_tool(tool,force=False,ctx=None):
- for x in Utils.to_list(Context.remote_repo):
- for sub in Utils.to_list(Context.remote_locs):
- url='/'.join((x,sub,tool+'.py'))
- try:
- web=urlopen(url)
- try:
- if web.getcode()!=200:
- continue
- except AttributeError:
- pass
- except Exception:
- continue
- else:
- tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py')))
- tmp.write(web.read(),'wb')
- Logs.warn('Downloaded %s from %s'%(tool,url))- download_check(tmp)
- try:
- module=Context.load_tool(tool)
- except Exception:
- Logs.warn('The tool %s from %s is unusable'%(tool,url))- try:
- tmp.delete()
- except Exception:
- pass
- continue
- return module
- raise Errors.WafError('Could not load the Waf tool')class ConfigurationContext(Context.Context):
'''configures the project'''
cmd='configure'
@@ -96,6 +59,7 @@
out=getattr(Context.g_module,Context.OUT,None)
if not out:
out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')+ out=os.path.realpath(out)
self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
self.bldnode.mkdir()
if not os.path.isdir(self.bldnode.abspath()):
@@ -139,11 +103,11 @@
env['files']=self.files
env['environ']=dict(self.environ)
if not self.env.NO_LOCK_IN_RUN:
- env.store(Context.run_dir+os.sep+Options.lockfile)
+ env.store(os.path.join(Context.run_dir,Options.lockfile))
if not self.env.NO_LOCK_IN_TOP:
- env.store(Context.top_dir+os.sep+Options.lockfile)
+ env.store(os.path.join(Context.top_dir,Options.lockfile))
if not self.env.NO_LOCK_IN_OUT:
- env.store(Context.out_dir+os.sep+Options.lockfile)
+ env.store(os.path.join(Context.out_dir,Options.lockfile))
def prepare_env(self,env):
if not env.PREFIX:
if Options.options.prefix or Utils.is_win32:
@@ -151,9 +115,15 @@
else:
env.PREFIX=''
if not env.BINDIR:
- env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)+ if Options.options.bindir:
+ env.BINDIR=os.path.abspath(os.path.expanduser(Options.options.bindir))
+ else:
+ env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)if not env.LIBDIR:
- env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env)+ if Options.options.libdir:
+ env.LIBDIR=os.path.abspath(os.path.expanduser(Options.options.libdir))
+ else:
+ env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env)def store(self):
n=self.cachedir.make_node('build.config.py') n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))@@ -162,7 +132,7 @@
for key in self.all_envs:
tmpenv=self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
- def load(self,input,tooldir=None,funs=None,download=True):
+ def load(self,input,tooldir=None,funs=None):
tools=Utils.to_list(input)
if tooldir:tooldir=Utils.to_list(tooldir)
for tool in tools:
@@ -173,14 +143,9 @@
self.tool_cache.append(mag)
module=None
try:
- module=Context.load_tool(tool,tooldir)
+ module=Context.load_tool(tool,tooldir,ctx=self)
except ImportError ,e:
- if Options.options.download:
- module=download_tool(tool,ctx=self)
- if not module:
- self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e))- else:
- self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e))+ self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e))except Exception ,e:
self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))self.to_log(Utils.ex_stack())
@@ -243,14 +208,14 @@
return[cmd]
return cmd
@conf
-def check_waf_version(self,mini='1.6.99',maxi='1.8.0'):
- self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)))+def check_waf_version(self,mini='1.7.99',maxi='1.9.0',**kw):
+ self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw)ver=Context.HEXVERSION
if Utils.num2ver(mini)>ver:
self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))if Utils.num2ver(maxi)<ver:
self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))- self.end_msg('ok')+ self.end_msg('ok',**kw)@conf
def find_file(self,filename,path_list=[]):
for n in Utils.to_list(filename):
@@ -262,56 +227,153 @@
@conf
def find_program(self,filename,**kw):
exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')- environ=kw.get('environ',os.environ)+ environ=kw.get('environ',getattr(self,'environ',os.environ))ret=''
filename=Utils.to_list(filename)
+ msg=kw.get('msg',', '.join(filename)) var=kw.get('var','')if not var:
- var=filename[0].upper()
- if self.env[var]:
- ret=self.env[var]
- elif var in environ:
- ret=environ[var]
+ var=re.sub(r'[-.]','_',filename[0].upper())
path_list=kw.get('path_list','')- if not ret:
- if path_list:
- path_list=Utils.to_list(path_list)
+ if path_list:
+ path_list=Utils.to_list(path_list)
+ else:
+ path_list=environ.get('PATH','').split(os.pathsep)+ if var in environ:
+ filename=environ[var]
+ if os.path.isfile(filename):
+ ret=[filename]
else:
- path_list=environ.get('PATH','').split(os.pathsep)- if not isinstance(filename,list):
- filename=[filename]
- for a in exts.split(','):- if ret:
- break
- for b in filename:
- if ret:
- break
- for c in path_list:
- if ret:
- break
- x=os.path.expanduser(os.path.join(c,b+a))
- if os.path.isfile(x):
- ret=x
- if not ret and Utils.winreg:
- ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
- if not ret and Utils.winreg:
- ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
- self.msg('Checking for program '+','.join(filename),ret or False)- self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))+ ret=self.cmd_to_list(filename)
+ elif self.env[var]:
+ ret=self.env[var]
+ ret=self.cmd_to_list(ret)
+ else:
+ if not ret:
+ ret=self.find_binary(filename,exts.split(','),path_list)+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
+ ret=self.cmd_to_list(ret)
+ if ret:
+ if len(ret)==1:
+ retmsg=ret[0]
+ else:
+ retmsg=ret
+ else:
+ retmsg=False
+ self.msg("Checking for program '%s'"%msg,retmsg,**kw)+ if not kw.get('quiet',None):+ self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))if not ret:
- self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename))- if var:
+ self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename)+ interpreter=kw.get('interpreter',None)+ if interpreter is None:
+ if not Utils.check_exe(ret[0],env=environ):
+ self.fatal('Program %r is not executable'%ret)self.env[var]=ret
+ else:
+ self.env[var]=self.env[interpreter]+ret
return ret
@conf
-def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''):
+def find_binary(self,filenames,exts,paths):
+ for f in filenames:
+ for ext in exts:
+ exe_name=f+ext
+ if os.path.isabs(exe_name):
+ if os.path.isfile(exe_name):
+ return exe_name
+ else:
+ for path in paths:
+ x=os.path.expanduser(os.path.join(path,exe_name))
+ if os.path.isfile(x):
+ return x
+ return None
+@conf
+def run_build(self,*k,**kw):
+ lst=[str(v)for(p,v)in kw.items()if p!='env']
+ h=Utils.h_list(lst)
+ dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
try:
- app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts)
- except Exception:
- self.find_program('perl',var='PERL')- app=self.find_file(filename,os.environ['PATH'].split(os.pathsep))
- if not app:
+ os.makedirs(dir)
+ except OSError:
+ pass
+ try:
+ os.stat(dir)
+ except OSError:
+ self.fatal('cannot use the configuration test folder %r'%dir)+ cachemode=getattr(Options.options,'confcache',None)
+ if cachemode==1:
+ try:
+ proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
+ except OSError:
+ pass
+ except IOError:
+ pass
+ else:
+ ret=proj['cache_run_build']
+ if isinstance(ret,str)and ret.startswith('Test does not build'):+ self.fatal(ret)
+ return ret
+ bdir=os.path.join(dir,'testbuild')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+ self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
+ bld.init_dirs()
+ bld.progress_bar=0
+ bld.targets='*'
+ bld.logger=self.logger
+ bld.all_envs.update(self.all_envs)
+ bld.env=kw['env']
+ bld.kw=kw
+ bld.conf=self
+ kw['build_fun'](bld)
+ ret=-1
+ try:
+ try:
+ bld.compile()
+ except Errors.WafError:
+ ret='Test does not build: %s'%Utils.ex_stack()
+ self.fatal(ret)
+ else:
+ ret=getattr(bld,'retval',0)
+ finally:
+ if cachemode==1:
+ proj=ConfigSet.ConfigSet()
+ proj['cache_run_build']=ret
+ proj.store(os.path.join(dir,'cache_run_build'))
+ else:
+ shutil.rmtree(dir)
+ return ret
+@conf
+def ret_msg(self,msg,args):
+ if isinstance(msg,str):
+ return msg
+ return msg(args)
+@conf
+def test(self,*k,**kw):
+ if not'env'in kw:
+ kw['env']=self.env.derive()
+ if kw.get('validate',None):+ kw['validate'](kw)
+ self.start_msg(kw['msg'],**kw)
+ ret=None
+ try:
+ ret=self.run_build(*k,**kw)
+ except self.errors.ConfigurationError:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ if Logs.verbose>1:
raise
- if var:
- self.env[var]=Utils.to_list(self.env['PERL'])+[app]
- self.msg('Checking for %r'%filename,app)+ else:
+ self.fatal('The configuration failed')+ else:
+ kw['success']=ret
+ if kw.get('post_check',None):+ ret=kw['post_check'](kw)
+ if ret:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ self.fatal('The configuration failed %r'%ret)+ else:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
+ return ret
--- a/waflib/Context.py
+++ b/waflib/Context.py
@@ -2,12 +2,12 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,imp,sys
+import os,re,imp,sys
from waflib import Utils,Errors,Logs
import waflib.Node
-HEXVERSION=0x1070f00
-WAFVERSION="1.7.15"
-WAFREVISION="f63ac9793de2d4eaae884e55d4ff70a761dcbab2"
+HEXVERSION=0x1080700
+WAFVERSION="1.8.7"
+WAFREVISION="e5056b9ade7bb224f53baab13a0ce136344ab602"
ABI=98
DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
APPNAME='APPNAME'
@@ -71,6 +71,14 @@
self.logger=None
def __hash__(self):
return id(self)
+ def finalize(self):
+ try:
+ logger=self.logger
+ except AttributeError:
+ pass
+ else:
+ Logs.free_logger(logger)
+ delattr(self,'logger')
def load(self,tool_list,*k,**kw):
tools=Utils.to_list(tool_list)
path=Utils.to_list(kw.get('tooldir',''))@@ -90,7 +98,7 @@
self.cur_script=self.stack_path.pop()
if self.cur_script:
self.path=self.cur_script.parent
- def recurse(self,dirs,name=None,mandatory=True,once=True):
+ def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None):
try:
cache=self.recurse_cache
except AttributeError:
@@ -105,7 +113,7 @@
cache[node]=True
self.pre_recurse(node)
try:
- function_code=node.read('rU')+ function_code=node.read('rU',encoding)exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
finally:
self.post_recurse(node)
@@ -116,7 +124,7 @@
cache[tup]=True
self.pre_recurse(node)
try:
- wscript_module=load_module(node.abspath())
+ wscript_module=load_module(node.abspath(),encoding=encoding)
user_function=getattr(wscript_module,(name or self.fun),None)
if not user_function:
if not mandatory:
@@ -140,6 +148,8 @@
kw['stdout']=subprocess.PIPE
if'stderr'not in kw:
kw['stderr']=subprocess.PIPE
+ if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!"%cmd[0])try:
if kw['stdout']or kw['stderr']:
p=subprocess.Popen(cmd,**kw)
@@ -156,7 +166,7 @@
if self.logger:
self.logger.debug('out: %s'%out)else:
- sys.stdout.write(out)
+ Logs.info(out,extra={'stream':sys.stdout,'c1':''})if err:
if not isinstance(err,str):
err=err.decode(sys.stdout.encoding or'iso8859-1')
@@ -163,7 +173,7 @@
if self.logger:
self.logger.error('err: %s'%err)else:
- sys.stderr.write(err)
+ Logs.info(err,extra={'stream':sys.stderr,'c1':''})return ret
def cmd_and_log(self,cmd,**kw):
subprocess=Utils.subprocess
@@ -179,6 +189,8 @@
del kw['output']
else:
to_ret=STDOUT
+ if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError("Program %s not found!"%cmd[0])kw['stdout']=kw['stderr']=subprocess.PIPE
if quiet is None:
self.to_log(cmd)
@@ -222,12 +234,24 @@
else:
sys.stderr.write(str(msg))
sys.stderr.flush()
- def msg(self,msg,result,color=None):
- self.start_msg(msg)
+ def msg(self,*k,**kw):
+ try:
+ msg=kw['msg']
+ except KeyError:
+ msg=k[0]
+ self.start_msg(msg,**kw)
+ try:
+ result=kw['result']
+ except KeyError:
+ result=k[1]
+ color=kw.get('color',None)if not isinstance(color,str):
color=result and'GREEN'or'YELLOW'
- self.end_msg(result,color)
- def start_msg(self,msg):
+ self.end_msg(result,color,**kw)
+ def start_msg(self,*k,**kw):
+ if kw.get('quiet',None):+ return
+ msg=kw.get('msg',None)or k[0]try:
if self.in_msg:
self.in_msg+=1
@@ -242,10 +266,13 @@
for x in(self.line_just*'-',msg):
self.to_log(x)
Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')- def end_msg(self,result,color=None):
+ def end_msg(self,*k,**kw):
+ if kw.get('quiet',None):+ return
self.in_msg-=1
if self.in_msg:
return
+ result=kw.get('result',None)or k[0]defcolor='GREEN'
if result==True:
msg='ok'
@@ -255,15 +282,39 @@
else:
msg=str(result)
self.to_log(msg)
- Logs.pprint(color or defcolor,msg)
+ try:
+ color=kw['color']
+ except KeyError:
+ if len(k)>1 and k[1]in Logs.colors_lst:
+ color=k[1]
+ else:
+ color=defcolor
+ Logs.pprint(color,msg)
def load_special_tools(self,var,ban=[]):
global waf_dir
- lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)- for x in lst:
- if not x.name in ban:
- load_tool(x.name.replace('.py',''))+ if os.path.isdir(waf_dir):
+ lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)+ for x in lst:
+ if not x.name in ban:
+ load_tool(x.name.replace('.py',''))+ else:
+ from zipfile import PyZipFile
+ waflibs=PyZipFile(waf_dir)
+ lst=waflibs.namelist()
+ for x in lst:
+ if not re.match("waflib/extras/%s"%var.replace("*",".*"),var):+ continue
+ f=os.path.basename(x)
+ doban=False
+ for b in ban:
+ r=b.replace("*",".*")+ if re.match(b,f):
+ doban=True
+ if not doban:
+ f=f.replace('.py','')+ load_tool(f)
cache_modules={}-def load_module(path):
+def load_module(path,encoding=None):
try:
return cache_modules[path]
except KeyError:
@@ -270,8 +321,8 @@
pass
module=imp.new_module(WSCRIPT_FILE)
try:
- code=Utils.readf(path,m='rU')
- except(IOError,OSError):
+ code=Utils.readf(path,m='rU',encoding=encoding)
+ except EnvironmentError:
raise Errors.WafError('Could not read the file %r'%path)module_dir=os.path.dirname(path)
sys.path.insert(0,module_dir)
@@ -279,11 +330,9 @@
sys.path.remove(module_dir)
cache_modules[path]=module
return module
-def load_tool(tool,tooldir=None):
+def load_tool(tool,tooldir=None,ctx=None):
if tool=='java':
tool='javaw'
- elif tool=='compiler_cc':
- tool='compiler_c'
else:
tool=tool.replace('++','xx')if tooldir:
@@ -298,19 +347,14 @@
for d in tooldir:
sys.path.remove(d)
else:
- global waf_dir
- try:
- os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py'))
- except OSError:
+ for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'):try:
- os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py'))
- except OSError:
- d=tool
- else:
- d='waflib.Tools.%s'%tool
- else:
- d='waflib.extras.%s'%tool
- __import__(d)
- ret=sys.modules[d]
+ __import__(x%tool)
+ break
+ except ImportError:
+ x=None
+ if x is None:
+ __import__(tool)
+ ret=sys.modules[x%tool]
Context.tools[tool]=ret
return ret
--- a/waflib/Logs.py
+++ b/waflib/Logs.py
@@ -2,41 +2,13 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,re,traceback,sys
-_nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false')-try:
- if not _nocolor:
- import waflib.ansiterm
-except ImportError:
- pass
-try:
- import threading
-except ImportError:
- if not'JOBS'in os.environ:
- os.environ['JOBS']='1'
-else:
- wlock=threading.Lock()
- class sync_stream(object):
- def __init__(self,stream):
- self.stream=stream
- self.encoding=self.stream.encoding
- def write(self,txt):
- try:
- wlock.acquire()
- self.stream.write(txt)
- self.stream.flush()
- finally:
- wlock.release()
- def fileno(self):
- return self.stream.fileno()
- def flush(self):
- self.stream.flush()
- def isatty(self):
- return self.stream.isatty()
- if not os.environ.get('NOSYNC',False):- if id(sys.stdout)==id(sys.__stdout__):
- sys.stdout=sync_stream(sys.stdout)
- sys.stderr=sync_stream(sys.stderr)
+import os,re,traceback,sys,types
+from waflib import Utils,ansiterm
+if not os.environ.get('NOSYNC',False):+ if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__):
+ sys.stdout=ansiterm.AnsiTerm(sys.stdout)
+ if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__):
+ sys.stderr=ansiterm.AnsiTerm(sys.stderr)
import logging
LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT="%H:%M:%S"
@@ -43,31 +15,25 @@
zones=''
verbose=0
colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}-got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']-if got_tty:
- try:
- got_tty=sys.stderr.isatty()and sys.stdout.isatty()
- except AttributeError:
- got_tty=False
-if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor:- colors_lst['USE']=False
-def get_term_cols():
- return 80
-try:
- import struct,fcntl,termios
-except ImportError:
- pass
-else:
- if got_tty:
- def get_term_cols_real():
- dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]- return cols
- try:
- get_term_cols_real()
- except Exception:
- pass
+indicator='\r\x1b[K%s%s%s'
+def enable_colors(use):
+ if use==1:
+ if not(sys.stderr.isatty()or sys.stdout.isatty()):
+ use=0
+ if Utils.is_win32:
+ term=os.environ.get('TERM','')else:
- get_term_cols=get_term_cols_real
+ term=os.environ.get('TERM','dumb')+ if term in('dumb','emacs'):+ use=0
+ if use>=1:
+ os.environ['TERM']='vt100'
+ colors_lst['USE']=use
+try:
+ get_term_cols=ansiterm.get_term_cols
+except AttributeError:
+ def get_term_cols():
+ return 80
get_term_cols.__doc__="""
Get the console width in characters.
@@ -88,16 +54,8 @@
def __init__(self,name=None):
pass
def filter(self,rec):
- rec.c1=colors.PINK
- rec.c2=colors.NORMAL
rec.zone=rec.module
if rec.levelno>=logging.INFO:
- if rec.levelno>=logging.ERROR:
- rec.c1=colors.RED
- elif rec.levelno>=logging.WARNING:
- rec.c1=colors.YELLOW
- else:
- rec.c1=colors.GREEN
return True
m=re_log.match(rec.msg)
if m:
@@ -108,16 +66,70 @@
elif not verbose>2:
return False
return True
+class log_handler(logging.StreamHandler):
+ def emit(self,record):
+ try:
+ try:
+ self.stream=record.stream
+ except AttributeError:
+ if record.levelno>=logging.WARNING:
+ record.stream=self.stream=sys.stderr
+ else:
+ record.stream=self.stream=sys.stdout
+ self.emit_override(record)
+ self.flush()
+ except(KeyboardInterrupt,SystemExit):
+ raise
+ except:
+ self.handleError(record)
+ def emit_override(self,record,**kw):
+ self.terminator=getattr(record,'terminator','\n')
+ stream=self.stream
+ if hasattr(types,"UnicodeType"):
+ msg=self.formatter.format(record)
+ fs='%s'+self.terminator
+ try:
+ if(isinstance(msg,unicode)and getattr(stream,'encoding',None)):
+ fs=fs.decode(stream.encoding)
+ try:
+ stream.write(fs%msg)
+ except UnicodeEncodeError:
+ stream.write((fs%msg).encode(stream.encoding))
+ else:
+ stream.write(fs%msg)
+ except UnicodeError:
+ stream.write((fs%msg).encode("UTF-8"))+ else:
+ logging.StreamHandler.emit(self,record)
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
def format(self,rec):
- if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
- try:
- msg=rec.msg.decode('utf-8')- except Exception:
- msg=rec.msg
- return'%s%s%s'%(rec.c1,msg,rec.c2)
+ try:
+ msg=rec.msg.decode('utf-8')+ except Exception:
+ msg=rec.msg
+ use=colors_lst['USE']
+ if(use==1 and rec.stream.isatty())or use==2:
+ c1=getattr(rec,'c1',None)
+ if c1 is None:
+ c1=''
+ if rec.levelno>=logging.ERROR:
+ c1=colors.RED
+ elif rec.levelno>=logging.WARNING:
+ c1=colors.YELLOW
+ elif rec.levelno>=logging.INFO:
+ c1=colors.GREEN
+ c2=getattr(rec,'c2',colors.NORMAL)
+ msg='%s%s%s'%(c1,msg,c2)
+ else:
+ msg=msg.replace('\r','\n')+ msg=re.sub(r'\x1B\[(K|.*?(m|h|l))','',msg)
+ if rec.levelno>=logging.INFO:
+ return msg
+ rec.msg=msg
+ rec.c1=colors.PINK
+ rec.c2=colors.NORMAL
return logging.Formatter.format(self,rec)
log=None
def debug(*k,**kw):
@@ -150,7 +162,7 @@
log=logging.getLogger('waflib')log.handlers=[]
log.filters=[]
- hdlr=logging.StreamHandler()
+ hdlr=log_handler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
@@ -163,7 +175,7 @@
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
-def make_mem_logger(name,to_log,size=10000):
+def make_mem_logger(name,to_log,size=8192):
from logging.handlers import MemoryHandler
logger=logging.getLogger(name)
hdlr=MemoryHandler(size,target=to_log)
@@ -173,5 +185,12 @@
logger.memhandler=hdlr
logger.setLevel(logging.DEBUG)
return logger
-def pprint(col,str,label='',sep='\n'):
- sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep))+def free_logger(logger):
+ try:
+ for x in logger.handlers:
+ x.close()
+ logger.removeHandler(x)
+ except Exception ,e:
+ pass
+def pprint(col,msg,label='',sep='\n'):
+ info("%s%s%s %s"%(colors(col),msg,colors.NORMAL,label),extra={'terminator':sep})--- a/waflib/Node.py
+++ b/waflib/Node.py
@@ -46,9 +46,12 @@
re_sp=re.compile('[/\\\\]')def split_path_win32(path):
if path.startswith('\\\\'):- ret=re.split(re_sp,path)[2:]
- ret[0]='\\'+ret[0]
- return ret
+ if path.startswith('\\\\?'):+ path=path[4:]
+ else:
+ ret=re.split(re_sp,path)[2:]
+ ret[0]='\\\\'+ret[0]
+ return ret
return re.split(re_sp,path)
if sys.platform=='cygwin':
split_path=split_path_cygwin
@@ -55,6 +58,7 @@
elif Utils.is_win32:
split_path=split_path_win32
class Node(object):
+ dict_class=dict
__slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig')def __init__(self,name,parent):
self.name=name
@@ -67,7 +71,7 @@
self.name=data[0]
self.parent=data[1]
if data[2]is not None:
- self.children=data[2]
+ self.children=self.dict_class(data[2])
if data[3]is not None:
self.sig=data[3]
def __getstate__(self):
@@ -90,13 +94,16 @@
os.chmod(self.abspath(),val)
def delete(self):
try:
- if hasattr(self,'children'):
- shutil.rmtree(self.abspath())
- else:
- os.remove(self.abspath())
- except OSError:
- pass
- self.evict()
+ try:
+ if hasattr(self,'children'):
+ shutil.rmtree(self.abspath())
+ else:
+ os.remove(self.abspath())
+ except OSError ,e:
+ if os.path.exists(self.abspath()):
+ raise e
+ finally:
+ self.evict()
def evict(self):
del self.parent.children[self.name]
def suffix(self):
@@ -130,7 +137,7 @@
try:
self.children
except AttributeError:
- self.children={}+ self.children=self.dict_class()
self.cache_isdir=True
def find_node(self,lst):
if isinstance(lst,str):
@@ -143,7 +150,7 @@
try:
ch=cur.children
except AttributeError:
- cur.children={}+ cur.children=self.dict_class()
else:
try:
cur=cur.children[x]
@@ -182,7 +189,7 @@
cur=cur.children[x]
continue
else:
- cur.children={}+ cur.children=self.dict_class()
cur=self.__class__(x,cur)
return cur
def search_node(self,lst):
@@ -218,8 +225,12 @@
up+=1
c1=c1.parent
c2=c2.parent
- for i in range(up):
- lst.append('..')+ if c1.parent:
+ for i in range(up):
+ lst.append('..')+ else:
+ if os.sep=='/'and lst:
+ lst.append('')lst.reverse()
return os.sep.join(lst)or'.'
def abspath(self):
@@ -256,7 +267,7 @@
try:
lst=set(self.children.keys())
except AttributeError:
- self.children={}+ self.children=self.dict_class()
else:
if remove:
for x in lst-set(dircont):
@@ -433,8 +444,6 @@
else:
name=name[:-len(ext_in)]+ext
return self.parent.find_or_declare([name])
- def nice_path(self,env=None):
- return self.path_from(self.ctx.launch_node())
def bldpath(self):
return self.path_from(self.ctx.bldnode)
def srcpath(self):
@@ -449,9 +458,6 @@
return self.srcpath()
def bld_dir(self):
return self.parent.bldpath()
- def bld_base(self):
- s=os.path.splitext(self.name)[0]
- return self.bld_dir()+os.sep+s
def get_bld_sig(self):
try:
return self.cache_sig
@@ -461,7 +467,6 @@
self.sig=Utils.h_file(self.abspath())
self.cache_sig=ret=self.sig
return ret
- search=search_node
pickle_lock=Utils.threading.Lock()
class Nod3(Node):
pass
--- a/waflib/Options.py
+++ b/waflib/Options.py
@@ -7,58 +7,26 @@
cmds='distclean configure build install clean uninstall check dist distcheck'.split()
options={}commands=[]
+envvars=[]
lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)-try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
-except KeyError:cache_global=''
platform=Utils.unversioned_sys_platform()
class opt_parser(optparse.OptionParser):
def __init__(self,ctx):
optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
self.formatter.width=Logs.get_term_cols()
- p=self.add_option
self.ctx=ctx
- jobs=ctx.jobs()
- p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)- p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found')- p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')- p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)')- p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')- gr=optparse.OptionGroup(self,'configure options')
- self.add_option_group(gr)
- gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')- gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')- default_prefix=os.environ.get('PREFIX')- if not default_prefix:
- if platform=='win32':
- d=tempfile.gettempdir()
- default_prefix=d[0].upper()+d[1:]
- else:
- default_prefix='/usr/local/'
- gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)- gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing')- gr=optparse.OptionGroup(self,'build and install options')
- self.add_option_group(gr)
- gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')- gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')- gr=optparse.OptionGroup(self,'step options')
- self.add_option_group(gr)
- gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')- default_destdir=os.environ.get('DESTDIR','')- gr=optparse.OptionGroup(self,'install/uninstall options')
- self.add_option_group(gr)
- gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')- gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')- gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store')+ def print_usage(self,file=None):
+ return self.print_help(file)
def get_usage(self):
cmds_str={}for cls in Context.classes:
- if not cls.cmd or cls.cmd=='options':
+ if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'):continue
s=cls.__doc__ or''
cmds_str[cls.cmd]=s
if Context.g_module:
for(k,v)in Context.g_module.__dict__.items():
- if k in['options','init','shutdown']:
+ if k in('options','init','shutdown'):continue
if type(v)is type(Context.create_context):
if v.__doc__ and not k.startswith('_'):@@ -81,6 +49,41 @@
super(OptionsContext,self).__init__(**kw)
self.parser=opt_parser(self)
self.option_groups={}+ jobs=self.jobs()
+ p=self.add_option
+ color=os.environ.get('NOCOLOR','')and'no'or'auto'+ p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto'))+ p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)+ p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)')+ p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')+ p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')+ gr=self.add_option_group('Configuration options')+ self.option_groups['configure options']=gr
+ gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')+ gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')+ default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX'))+ if not default_prefix:
+ if platform=='win32':
+ d=tempfile.gettempdir()
+ default_prefix=d[0].upper()+d[1:]
+ else:
+ default_prefix='/usr/local/'
+ gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)+ gr.add_option('--bindir',dest='bindir',help='bindir')+ gr.add_option('--libdir',dest='libdir',help='libdir')+ gr=self.add_option_group('Build and installation options')+ self.option_groups['build and install options']=gr
+ gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')+ gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')+ gr=self.add_option_group('Step options')+ self.option_groups['step options']=gr
+ gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')+ default_destdir=os.environ.get('DESTDIR','')+ gr=self.add_option_group('Installation and uninstallation options')+ self.option_groups['install/uninstall options']=gr
+ gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')+ gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')+ gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store')def jobs(self):
count=int(os.environ.get('JOBS',0))if count<1:
@@ -123,13 +126,19 @@
return group
return None
def parse_args(self,_args=None):
- global options,commands
+ global options,commands,envvars
(options,leftover_args)=self.parser.parse_args(args=_args)
- commands=leftover_args
+ for arg in leftover_args:
+ if'='in arg:
+ envvars.append(arg)
+ else:
+ commands.append(arg)
if options.destdir:
options.destdir=os.path.abspath(os.path.expanduser(options.destdir))
if options.verbose>=1:
self.load('errcheck')+ colors={'yes':2,'auto':1,'no':0}[options.colors]+ Logs.enable_colors(colors)
def execute(self):
super(OptionsContext,self).execute()
self.parse_args()
--- a/waflib/Runner.py
+++ b/waflib/Runner.py
@@ -109,10 +109,6 @@
self.count-=1
self.dirty=True
return tsk
- def error_handler(self,tsk):
- if not self.bld.keep:
- self.stop=True
- self.error.append(tsk)
def add_task(self,tsk):
try:
self.pool
@@ -143,6 +139,31 @@
for x in pool:
put_pool(x)
self.pool=[]
+ def skip(self,tsk):
+ tsk.hasrun=Task.SKIPPED
+ def error_handler(self,tsk):
+ if not self.bld.keep:
+ self.stop=True
+ self.error.append(tsk)
+ def task_status(self,tsk):
+ try:
+ return tsk.runnable_status()
+ except Exception:
+ self.processed+=1
+ tsk.err_msg=Utils.ex_stack()
+ if not self.stop and self.bld.keep:
+ self.skip(tsk)
+ if self.bld.keep==1:
+ if Logs.verbose>1 or not self.error:
+ self.error.append(tsk)
+ self.stop=True
+ else:
+ if Logs.verbose>1:
+ self.error.append(tsk)
+ return Task.EXCEPTION
+ tsk.hasrun=Task.EXCEPTION
+ self.error_handler(tsk)
+ return Task.EXCEPTION
def start(self):
self.total=self.bld.total()
while not self.stop:
@@ -158,31 +179,8 @@
continue
if self.stop:
break
- try:
- st=tsk.runnable_status()
- except Exception:
- self.processed+=1
- tsk.err_msg=Utils.ex_stack()
- if not self.stop and self.bld.keep:
- tsk.hasrun=Task.SKIPPED
- if self.bld.keep==1:
- if Logs.verbose>1 or not self.error:
- self.error.append(tsk)
- self.stop=True
- else:
- if Logs.verbose>1:
- self.error.append(tsk)
- continue
- tsk.hasrun=Task.EXCEPTION
- self.error_handler(tsk)
- continue
- if st==Task.ASK_LATER:
- self.postpone(tsk)
- elif st==Task.SKIP_ME:
- self.processed+=1
- tsk.hasrun=Task.SKIPPED
- self.add_more_tasks(tsk)
- else:
+ st=self.task_status(tsk)
+ if st==Task.RUN_ME:
tsk.position=(self.processed,self.total)
self.count+=1
tsk.master=self
@@ -191,6 +189,12 @@
tsk.process()
else:
self.add_task(tsk)
+ if st==Task.ASK_LATER:
+ self.postpone(tsk)
+ elif st==Task.SKIP_ME:
+ self.processed+=1
+ self.skip(tsk)
+ self.add_more_tasks(tsk)
while self.error and self.count:
self.get_out()
assert(self.count==0 or self.stop)
--- a/waflib/Scripting.py
+++ b/waflib/Scripting.py
@@ -18,14 +18,20 @@
ctx.curdir=current_directory
ctx.parse_args()
sys.exit(0)
+ if len(sys.argv)>1:
+ potential_wscript=os.path.join(current_directory,sys.argv[1])
+ if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript):
+ current_directory=os.path.normpath(os.path.dirname(potential_wscript))
+ sys.argv.pop(1)
Context.waf_dir=wafdir
Context.launch_dir=current_directory
no_climb=os.environ.get('NOCLIMB',None)if not no_climb:
for k in no_climb_commands:
- if k in sys.argv:
- no_climb=True
- break
+ for y in sys.argv:
+ if y.startswith(k):
+ no_climb=True
+ break
cur=current_directory
while cur:
lst=os.listdir(cur)
@@ -37,7 +43,7 @@
except Exception:
pass
else:
- for x in[env.run_dir,env.top_dir,env.out_dir]:
+ for x in(env.run_dir,env.top_dir,env.out_dir):
if Utils.is_win32:
if cur==x:
load=True
@@ -84,7 +90,7 @@
Logs.error('Waf: The folder %r is unreadable'%Context.run_dir)sys.exit(1)
try:
- set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE)
+ set_main_module(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))
except Errors.WafError ,e:
Logs.pprint('RED',e.verbose_msg)Logs.error(str(e))
@@ -115,7 +121,7 @@
name=obj.__name__
if not name in Context.g_module.__dict__:
setattr(Context.g_module,name,obj)
- for k in[update,dist,distclean,distcheck,update]:
+ for k in(update,dist,distclean,distcheck,update):
set_def(k)
if not'init'in Context.g_module.__dict__:
Context.g_module.init=Utils.nada
@@ -125,11 +131,13 @@
Context.g_module.options=Utils.nada
def parse_options():
Context.create_context('options').execute()+ for var in Options.envvars:
+ (name,value)=var.split('=',1)+ os.environ[name.strip()]=value
if not Options.commands:
Options.commands=[default_cmd]
Options.commands=[x for x in Options.commands if x!='options']
Logs.verbose=Options.options.verbose
- Logs.init_log()
if Options.options.zones:
Logs.zones=Options.options.zones.split(',')if not Logs.verbose:
@@ -143,7 +151,10 @@
ctx.log_timer=Utils.Timer()
ctx.options=Options.options
ctx.cmd=cmd_name
- ctx.execute()
+ try:
+ ctx.execute()
+ finally:
+ ctx.finalize()
return ctx
def run_commands():
parse_options()
@@ -162,12 +173,12 @@
for(root,dirs,files)in os.walk(dirname):
for f in files:
if _can_distclean(f):
- fname=root+os.sep+f
+ fname=os.path.join(root,f)
try:
os.remove(fname)
except OSError:
Logs.warn('Could not remove %r'%fname)- for x in[Context.DBFILE,'config.log']:
+ for x in(Context.DBFILE,'config.log'):
try:
os.remove(x)
except OSError:
@@ -193,15 +204,16 @@
pass
except OSError ,e:
if e.errno!=errno.ENOENT:
- Logs.warn('project %r cannot be removed'%proj[Context.OUT])+ Logs.warn('Could not remove %r'%proj['out_dir'])else:
distclean_dir(proj['out_dir'])
for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']):
+ p=os.path.join(k,Options.lockfile)
try:
- os.remove(os.path.join(k,Options.lockfile))
+ os.remove(p)
except OSError ,e:
if e.errno!=errno.ENOENT:
- Logs.warn('file %r cannot be removed'%f)+ Logs.warn('Could not remove %r'%p)if not Options.commands:
for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split():
if f.startswith(x):
@@ -225,7 +237,7 @@
node=self.base_path.make_node(arch_name)
try:
node.delete()
- except Exception:
+ except OSError:
pass
files=self.get_files()
if self.algo.startswith('tar.'):@@ -241,7 +253,7 @@
zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
zip.close()
else:
- self.fatal('Valid algo types are tar.bz2, tar.gz or zip')+ self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')try:
from hashlib import sha1 as sha
except ImportError:
@@ -290,10 +302,11 @@
try:
return self.excl
except AttributeError:
- self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
- nd=self.root.find_node(Context.out_dir)
- if nd:
- self.excl+=' '+nd.path_from(self.base_path)
+ self.excl=Node.exclude_regs+' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+ if Context.out_dir:
+ nd=self.root.find_node(Context.out_dir)
+ if nd:
+ self.excl+=' '+nd.path_from(self.base_path)
return self.excl
def get_files(self):
try:
@@ -369,6 +382,8 @@
if do_config:
Options.commands.insert(0,self.cmd)
Options.commands.insert(0,'configure')
+ if Configure.autoconfig=='clobber':
+ Options.options.__dict__=env.options
return
return execute_method(self)
return execute
--- a/waflib/Task.py
+++ b/waflib/Task.py
@@ -2,7 +2,7 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,shutil,re,tempfile
+import os,re,sys
from waflib import Utils,Logs,Errors
NOT_RUN=0
MISSING=1
@@ -37,24 +37,6 @@
lst = [x for x in lst if x]
return tsk.exec_command(lst, cwd=wd, env=env.env or None)
'''
-def cache_outputs(cls):
- m1=cls.run
- def run(self):
- bld=self.generator.bld
- if bld.cache_global and not bld.nocache:
- if self.can_retrieve_cache():
- return 0
- return m1(self)
- cls.run=run
- m2=cls.post_run
- def post_run(self):
- bld=self.generator.bld
- ret=m2(self)
- if bld.cache_global and not bld.nocache:
- self.put_files_cache()
- return ret
- cls.post_run=post_run
- return cls
classes={}class store_task_type(type):
def __init__(cls,name,bases,dict):
@@ -67,6 +49,7 @@
if getattr(cls,'run_str',None):
(f,dvars)=compile_fun(cls.run_str,cls.shell)
cls.hcode=cls.run_str
+ cls.orig_run_str=cls.run_str
cls.run_str=None
cls.run=f
cls.vars=list(set(cls.vars+dvars))
@@ -73,8 +56,8 @@
cls.vars.sort()
elif getattr(cls,'run',None)and not'hcode'in cls.__dict__:
cls.hcode=Utils.h_fun(cls.run)
- if not getattr(cls,'nocache',None):
- cls=cache_outputs(cls)
+ if sys.hexversion>0x3000000:
+ cls.hcode=cls.hcode.encode('iso8859-1','xmlcharrefreplace')getattr(cls,'register',classes)[name]=cls
evil=store_task_type('evil',(object,),{})class TaskBase(evil):
@@ -94,10 +77,14 @@
return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun','')))def __str__(self):
if hasattr(self,'fun'):
- return'executing: %s\n'%self.fun.__name__
- return self.__class__.__name__+'\n'
+ return self.fun.__name__
+ return self.__class__.__name__
def __hash__(self):
return id(self)
+ def keyword(self):
+ if hasattr(self,'fun'):
+ return'Function'
+ return'Processing'
def exec_command(self,cmd,**kw):
bld=self.generator.bld
try:
@@ -150,7 +137,20 @@
def post_run(self):
pass
def log_display(self,bld):
- bld.to_log(self.display())
+ if self.generator.bld.progress_bar==3:
+ return
+ s=self.display()
+ if s:
+ if bld.logger:
+ logger=bld.logger
+ else:
+ logger=Logs
+ if self.generator.bld.progress_bar==1:
+ c1=Logs.colors.cursor_off
+ c2=Logs.colors.cursor_on
+ logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2})+ else:
+ logger.info(s,extra={'terminator':'','c1':'','c2':''})def display(self):
col1=Logs.colors(self.color)
col2=Logs.colors.NORMAL
@@ -178,8 +178,11 @@
return None
total=master.total
n=len(str(total))
- fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n)
- return fs%(cur(),total,col1,s,col2)
+ fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n)
+ kw=self.keyword()
+ if kw:
+ kw+=' '
+ return fs%(cur(),total,kw,col1,s,col2)
def attr(self,att,default=None):
ret=getattr(self,att,self)
if ret is self:return getattr(self.__class__,att,default)
@@ -207,6 +210,8 @@
return'invalid status for task in %r: %r'%(name,self.hasrun)
def colon(self,var1,var2):
tmp=self.env[var1]
+ if not tmp:
+ return[]
if isinstance(var2,str):
it=self.env[var2]
else:
@@ -214,8 +219,6 @@
if isinstance(tmp,str):
return[tmp%x for x in it]
else:
- if Logs.verbose and not tmp and it:
- Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator))lst=[]
for y in it:
lst.extend(tmp)
@@ -232,12 +235,33 @@
self.dep_nodes=[]
self.run_after=set([])
def __str__(self):
- env=self.env
- src_str=' '.join([a.nice_path()for a in self.inputs])
- tgt_str=' '.join([a.nice_path()for a in self.outputs])
+ name=self.__class__.__name__
+ if self.outputs:
+ if(name.endswith('lib')or name.endswith('program'))or not self.inputs:+ node=self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
+ if not(self.inputs or self.outputs):
+ return self.__class__.__name__
+ if len(self.inputs)==1:
+ node=self.inputs[0]
+ return node.path_from(node.ctx.launch_node())
+ src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs])
+ tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs])
if self.outputs:sep=' -> '
else:sep=''
- return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)+ return'%s: %s%s%s'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str)+ def keyword(self):
+ name=self.__class__.__name__
+ if name.endswith('lib')or name.endswith('program'):+ return'Linking'
+ if len(self.inputs)==1 and len(self.outputs)==1:
+ return'Compiling'
+ if not self.inputs:
+ if self.outputs:
+ return'Creating'
+ else:
+ return'Running'
+ return'Processing'
def __repr__(self):
try:
ins=",".join([x.name for x in self.inputs])
@@ -361,9 +385,11 @@
try:
if prev==self.compute_sig_implicit_deps():
return prev
- except Exception:
+ except Errors.TaskNotReady:
+ raise
+ except EnvironmentError:
for x in bld.node_deps.get(self.uid(),[]):
- if x.is_child_of(bld.srcnode):
+ if not x.is_bld():
try:
os.stat(x.abspath())
except OSError:
@@ -419,71 +445,20 @@
for tsk in self.run_after:
if not tsk.hasrun:
raise Errors.TaskNotReady('not ready')- def can_retrieve_cache(self):
- if not getattr(self,'outputs',None):
- return None
- sig=self.signature()
- ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
- dname=os.path.join(self.generator.bld.cache_global,ssig)
+if sys.hexversion>0x3000000:
+ def uid(self):
try:
- t1=os.stat(dname).st_mtime
- except OSError:
- return None
- for node in self.outputs:
- orig=os.path.join(dname,node.name)
- try:
- shutil.copy2(orig,node.abspath())
- os.utime(orig,None)
- except(OSError,IOError):
- Logs.debug('task: failed retrieving file')- return None
- try:
- t2=os.stat(dname).st_mtime
- except OSError:
- return None
- if t1!=t2:
- return None
- for node in self.outputs:
- node.sig=sig
- if self.generator.bld.progress_bar<1:
- self.generator.bld.to_log('restoring from cache %r\n'%node.abspath())- self.cached=True
- return True
- def put_files_cache(self):
- if getattr(self,'cached',None):
- return None
- if not getattr(self,'outputs',None):
- return None
- sig=self.signature()
- ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig)
- dname=os.path.join(self.generator.bld.cache_global,ssig)
- tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf')
- try:
- shutil.rmtree(dname)
- except Exception:
- pass
- try:
- for node in self.outputs:
- dest=os.path.join(tmpdir,node.name)
- shutil.copy2(node.abspath(),dest)
- except(OSError,IOError):
- try:
- shutil.rmtree(tmpdir)
- except Exception:
- pass
- else:
- try:
- os.rename(tmpdir,dname)
- except OSError:
- try:
- shutil.rmtree(tmpdir)
- except Exception:
- pass
- else:
- try:
- os.chmod(dname,Utils.O755)
- except Exception:
- pass
+ return self.uid_
+ except AttributeError:
+ m=Utils.md5()
+ up=m.update
+ up(self.__class__.__name__.encode('iso8859-1','xmlcharrefreplace'))+ for x in self.inputs+self.outputs:
+ up(x.abspath().encode('iso8859-1','xmlcharrefreplace'))+ self.uid_=m.digest()
+ return self.uid_
+ uid.__doc__=Task.uid.__doc__
+ Task.uid=uid
def is_before(t1,t2):
to_list=Utils.to_list
for k in to_list(t2.ext_in):
@@ -578,6 +553,7 @@
def repl(match):
g=match.group
if g('dollar'):return"$"+ elif g('backslash'):return'\\' elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>"return None
line2=reg_act.sub(repl,line)
--- a/waflib/TaskGen.py
+++ b/waflib/TaskGen.py
@@ -5,8 +5,9 @@
import copy,re,os
from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
feats=Utils.defaultdict(set)
+HEADER_EXTS=['.h','.hpp','.hxx','.hh']
class task_gen(object):
- mappings={}+ mappings=Utils.ordered_iter_dict()
prec=Utils.defaultdict(list)
def __init__(self,*k,**kw):
self.source=''
@@ -36,7 +37,7 @@
def __repr__(self):
lst=[]
for x in self.__dict__.keys():
- if x not in['env','bld','compiled_tasks','tasks']:
+ if x not in('env','bld','compiled_tasks','tasks'): lst.append("%s=%s"%(x,repr(getattr(self,x)))) return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())def get_name(self):
@@ -111,27 +112,29 @@
return True
def get_hook(self,node):
name=node.name
- for k in self.mappings:
- if name.endswith(k):
- return self.mappings[k]
+ if self.mappings:
+ for k in self.mappings:
+ if name.endswith(k):
+ return self.mappings[k]
for k in task_gen.mappings:
if name.endswith(k):
return task_gen.mappings[k]
- raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys()))- def create_task(self,name,src=None,tgt=None):
+ raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)"%(node,task_gen.mappings.keys()))+ def create_task(self,name,src=None,tgt=None,**kw):
task=Task.classes[name](env=self.env.derive(),generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
+ task.__dict__.update(kw)
self.tasks.append(task)
return task
def clone(self,env):
newobj=self.bld()
for x in self.__dict__:
- if x in['env','bld']:
+ if x in('env','bld'):continue
- elif x in['path','features']:
+ elif x in('path','features'):setattr(newobj,x,getattr(self,x))
else:
setattr(newobj,x,copy.copy(getattr(self,x)))
@@ -153,7 +156,7 @@
_ext_in=ext_in[0]
tsk=self.create_task(name,node)
cnt=0
- keys=list(self.mappings.keys())+list(self.__class__.mappings.keys())
+ keys=set(self.mappings.keys())|set(self.__class__.mappings.keys())
for x in ext:
k=node.change_ext(x,ext_in=_ext_in)
tsk.outputs.append(k)
@@ -212,7 +215,7 @@
tmp=[]
path=path or self.path
find=path.find_resource
- if isinstance(lst,self.path.__class__):
+ if isinstance(lst,Node.Node):
lst=[lst]
for x in Utils.to_list(lst):
if isinstance(x,str):
@@ -262,7 +265,7 @@
Task.update_outputs(cls)
if getattr(self,'always',None):
Task.always_run(cls)
- for x in['after','before','ext_in','ext_out']:
+ for x in('after','before','ext_in','ext_out'):setattr(cls,x,getattr(self,x,[]))
if getattr(self,'cache_rule','True'):
cache[(name,self.rule)]=cls
@@ -307,11 +310,11 @@
os.chmod(self.outputs[0].abspath(),self.generator.chmod)
return None
if getattr(self.generator,'fun',None):
- self.generator.fun(self)
+ return self.generator.fun(self)
code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1'))
if getattr(self.generator,'subst_fun',None):
code=self.generator.subst_fun(self,code)
- if code:
+ if code is not None:
self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
return
code=code.replace('%','%%')@@ -329,8 +332,12 @@
except AttributeError:
d={}for x in lst:
- tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper())
- d[x]=str(tmp)
+ tmp=getattr(self.generator,x,'')or self.env[x]or self.env[x.upper()]
+ try:
+ tmp=''.join(tmp)
+ except TypeError:
+ tmp=str(tmp)
+ d[x]=tmp
code=code%d
self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst
@@ -397,8 +404,12 @@
if val:
has_constraints=True
setattr(tsk,k,val)
- if not has_constraints and b.name.endswith('.h'):- tsk.before=[k for k in('c','cxx')if k in Task.classes]+ if not has_constraints:
+ global HEADER_EXTS
+ for xt in HEADER_EXTS:
+ if b.name.endswith(xt):
+ tsk.before=[k for k in('c','cxx')if k in Task.classes]+ break
inst_to=getattr(self,'install_path',None)
if inst_to:
self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644))
--- a/waflib/Tools/ar.py
+++ b/waflib/Tools/ar.py
@@ -8,4 +8,6 @@
conf.load('ar')def configure(conf):
conf.find_program('ar',var='AR')- conf.env.ARFLAGS='rcs'
+ conf.add_os_flags('ARFLAGS')+ if not conf.env.ARFLAGS:
+ conf.env.ARFLAGS=['rcs']
--- a/waflib/Tools/asm.py
+++ b/waflib/Tools/asm.py
@@ -9,7 +9,7 @@
from waflib.TaskGen import extension,feature
class asm(Task.Task):
color='BLUE'
- run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'+ run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' @extension('.s','.S','.asm','.ASM','.spp','.SPP')def asm_hook(self,node):
return self.create_compiled_task('asm',node)--- a/waflib/Tools/c.py
+++ b/waflib/Tools/c.py
@@ -7,9 +7,11 @@
from waflib.Tools.ccroot import link_task,stlink_task
@TaskGen.extension('.c')def c_hook(self,node):
+ if not self.env.CC and self.env.CXX:
+ return self.create_compiled_task('cxx',node) return self.create_compiled_task('c',node)class c(Task.Task):
- run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'+ run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}'vars=['CCDEPS']
ext_in=['.h']
scan=c_preproc.scan
--- a/waflib/Tools/c_aliases.py
+++ b/waflib/Tools/c_aliases.py
@@ -29,9 +29,9 @@
feats.append('java')if'java'in exts:
return'java'
- if type in['program','shlib','stlib']:
+ if type in('program','shlib','stlib'):for x in feats:
- if x in['cxx','d','c']:
+ if x in('cxx','d','c'):feats.append(x+type)
return feats
def set_features(kw,_type):
--- a/waflib/Tools/c_config.py
+++ b/waflib/Tools/c_config.py
@@ -43,15 +43,19 @@
MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',}@conf
-def parse_flags(self,line,uselib_store,env=None,force_static=False):
+def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None):
assert(isinstance(line,str))
env=env or self.env
- app=env.append_value
- appu=env.append_unique
- lex=shlex.shlex(line,posix=False)
+ if posix is None:
+ posix=True
+ if'\\'in line:
+ posix=('\\ 'in line)or('\\\\'in line)+ lex=shlex.shlex(line,posix=posix)
lex.whitespace_split=True
lex.commenters=''
lst=list(lex)
+ app=env.append_value
+ appu=env.append_unique
uselib=uselib_store
while lst:
x=lst.pop(0)
@@ -60,7 +64,7 @@
if st=='-I'or st=='/I':
if not ot:ot=lst.pop(0)
appu('INCLUDES_'+uselib,[ot])- elif st=='-include':
+ elif st=='-i':
tmp=[x,lst.pop(0)]
app('CFLAGS',tmp) app('CXXFLAGS',tmp)@@ -84,6 +88,12 @@
appu('FRAMEWORK_'+uselib,[lst.pop(0)]) elif x.startswith('-F'): appu('FRAMEWORKPATH_'+uselib,[x[2:]])+ elif x=='-Wl,-rpath':
+ app('RPATH_'+uselib,lst.pop(0))+ elif x.startswith('-Wl,-R'):+ app('RPATH_'+uselib,x[6:])+ elif x.startswith('-Wl,-rpath,'):+ app('RPATH_'+uselib,x[11:]) elif x.startswith('-Wl'): app('LINKFLAGS_'+uselib,[x]) elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'):@@ -91,7 +101,7 @@
app('CXXFLAGS_'+uselib,[x]) elif x.startswith('-bundle'): app('LINKFLAGS_'+uselib,[x])- elif x.startswith('-undefined'):+ elif x.startswith('-undefined')or x.startswith('-Xlinker'):arg=lst.pop(0)
app('LINKFLAGS_'+uselib,[x,arg]) elif x.startswith('-arch')or x.startswith('-isysroot'):@@ -102,11 +112,6 @@
elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): appu('LINKFLAGS_'+uselib,[x])@conf
-def ret_msg(self,f,kw):
- if isinstance(f,str):
- return f
- return f(kw)
-@conf
def validate_cfg(self,kw):
if not'path'in kw:
if not self.env.PKGCONFIG:
@@ -132,14 +137,25 @@
if not'msg'in kw:
kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y])
return
+ if not'define_name'in kw:
+ pkgname=kw.get('uselib_store',kw['package'].upper())+ kw['define_name']=self.have_define(pkgname)
+ if not'uselib_store'in kw:
+ self.undefine(kw['define_name'])
if not'msg'in kw:
kw['msg']='Checking for %r'%(kw['package']or kw['path'])
@conf
def exec_cfg(self,kw):
+ path=Utils.to_list(kw['path'])
def define_it():
- self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)+ pkgname=kw.get('uselib_store',kw['package'].upper())+ if kw.get('global_define'):+ self.define(self.have_define(kw['package']),1,False)
+ else:
+ self.env.append_unique('DEFINES_%s'%pkgname,"%s=1"%self.have_define(pkgname))+ self.env[self.have_define(pkgname)]=1
if'atleast_pkgconfig_version'in kw:
- cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
+ cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
self.cmd_and_log(cmd)
if not'okmsg'in kw:
kw['okmsg']='yes'
@@ -147,22 +163,22 @@
for x in cfg_ver:
y=x.replace('-','_')if y in kw:
- self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']])
+ self.cmd_and_log(path+['--%s=%s'%(x,kw[y]),kw['package']])
if not'okmsg'in kw:
kw['okmsg']='yes'
define_it()
break
if'modversion'in kw:
- version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip()
+ version=self.cmd_and_log(path+['--modversion',kw['modversion']]).strip()
self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)return version
- lst=[kw['path']]
+ lst=[]+path
defi=kw.get('define_variable',None)if not defi:
defi=self.env.PKG_CONFIG_DEFINES or{}for key,val in defi.items():
lst.append('--define-variable=%s=%s'%(key,val))- static=False
+ static=kw.get('force_static',False)if'args'in kw:
args=Utils.to_list(kw['args'])
if'--static'in args or'--static-libs'in args:
@@ -184,7 +200,7 @@
if not'okmsg'in kw:
kw['okmsg']='yes'
define_it()
- self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static)+ self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static,posix=kw.get('posix',None))return ret
@conf
def check_cfg(self,*k,**kw):
@@ -194,24 +210,37 @@
kw['args']=' '.join(lst[1:])
self.validate_cfg(kw)
if'msg'in kw:
- self.start_msg(kw['msg'])
+ self.start_msg(kw['msg'],**kw)
ret=None
try:
ret=self.exec_cfg(kw)
except self.errors.WafError:
if'errmsg'in kw:
- self.end_msg(kw['errmsg'],'YELLOW')
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
if Logs.verbose>1:
raise
else:
self.fatal('The configuration failed')else:
+ if not ret:
+ ret=True
kw['success']=ret
if'okmsg'in kw:
- self.end_msg(self.ret_msg(kw['okmsg'],kw))
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
return ret
+def build_fun(bld):
+ if bld.kw['compile_filename']:
+ node=bld.srcnode.make_node(bld.kw['compile_filename'])
+ node.write(bld.kw['code'])
+ o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog')
+ for k,v in bld.kw.items():
+ setattr(o,k,v)
+ if not bld.kw.get('quiet',None):+ bld.conf.to_log("==>\n%s\n<=="%bld.kw['code'])@conf
def validate_c(self,kw):
+ if not'build_fun'in kw:
+ kw['build_fun']=build_fun
if not'env'in kw:
kw['env']=self.env.derive()
env=kw['env']
@@ -306,7 +335,7 @@
kw['msg']='Checking for code snippet'
if not'errmsg'in kw:
kw['errmsg']='no'
- for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:+ for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')):if flagsname in kw:
if not'msg'in kw:
kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
@@ -368,12 +397,12 @@
@conf
def check(self,*k,**kw):
self.validate_c(kw)
- self.start_msg(kw['msg'])
+ self.start_msg(kw['msg'],**kw)
ret=None
try:
- ret=self.run_c_code(*k,**kw)
+ ret=self.run_build(*k,**kw)
except self.errors.ConfigurationError:
- self.end_msg(kw['errmsg'],'YELLOW')
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
if Logs.verbose>1:
raise
else:
@@ -382,10 +411,10 @@
kw['success']=ret
ret=self.post_check(*k,**kw)
if not ret:
- self.end_msg(kw['errmsg'],'YELLOW')
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
self.fatal('The configuration failed %r'%ret)else:
- self.end_msg(self.ret_msg(kw['okmsg'],kw))
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
return ret
class test_exec(Task.Task):
color='PINK'
@@ -408,65 +437,7 @@
@after_method('apply_link')def test_exec_fun(self):
self.create_task('test_exec',self.link_task.outputs[0])-CACHE_RESULTS=1
-COMPILE_ERRORS=2
@conf
-def run_c_code(self,*k,**kw):
- lst=[str(v)for(p,v)in kw.items()if p!='env']
- h=Utils.h_list(lst)
- dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
- try:
- os.makedirs(dir)
- except OSError:
- pass
- try:
- os.stat(dir)
- except OSError:
- self.fatal('cannot use the configuration test folder %r'%dir)- cachemode=getattr(Options.options,'confcache',None)
- if cachemode==CACHE_RESULTS:
- try:
- proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
- except OSError:
- pass
- else:
- ret=proj['cache_run_c_code']
- if isinstance(ret,str)and ret.startswith('Test does not build'):- self.fatal(ret)
- return ret
- bdir=os.path.join(dir,'testbuild')
- if not os.path.exists(bdir):
- os.makedirs(bdir)
- self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
- bld.init_dirs()
- bld.progress_bar=0
- bld.targets='*'
- if kw['compile_filename']:
- node=bld.srcnode.make_node(kw['compile_filename'])
- node.write(kw['code'])
- bld.logger=self.logger
- bld.all_envs.update(self.all_envs)
- bld.env=kw['env']
- o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
- for k,v in kw.items():
- setattr(o,k,v)
- self.to_log("==>\n%s\n<=="%kw['code'])- bld.targets='*'
- ret=-1
- try:
- try:
- bld.compile()
- except Errors.WafError:
- ret='Test does not build: %s'%Utils.ex_stack()
- self.fatal(ret)
- else:
- ret=getattr(bld,'retval',0)
- finally:
- proj=ConfigSet.ConfigSet()
- proj['cache_run_c_code']=ret
- proj.store(os.path.join(dir,'cache_run_c_code'))
- return ret
-@conf
def check_cxx(self,*k,**kw):
kw['compiler']='cxx'
return self.check(*k,**kw)
@@ -529,9 +500,7 @@
def have_define(self,key):
return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key)
@conf
-def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True,define_prefix=''):
- if env:
- Logs.warn('Cannot pass env to write_config_header')+def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''):
if not configfile:configfile=WAF_CONFIG_H
waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile)
node=top and self.bldnode or self.path.get_bld()
@@ -554,12 +523,16 @@
for x in self.env[INCKEYS]:
lst.append('#include <%s>'%x)if defines:
- for x in self.env[DEFKEYS]:
- if self.is_defined(x):
- val=self.get_define(x)
- lst.append('#define %s %s'%(define_prefix+x,val))- else:
- lst.append('/* #undef %s */'%(define_prefix+x))+ tbl={}+ for k in self.env['DEFINES']:
+ a,_,b=k.partition('=')+ tbl[a]=b
+ for k in self.env[DEFKEYS]:
+ try:
+ txt='#define %s%s %s'%(define_prefix,k,tbl[k])
+ except KeyError:
+ txt='/* #undef %s%s */'%(define_prefix,k)
+ lst.append(txt)
return"\n".join(lst)
@conf
def cc_add_flags(conf):
@@ -584,7 +557,7 @@
conf.env.DEST_OS=Utils.unversioned_sys_platform()
conf.load('cxx')@conf
-def get_cc_version(conf,cc,gcc=False,icc=False):
+def get_cc_version(conf,cc,gcc=False,icc=False,clang=False):
cmd=cc+['-dM','-E','-']
env=conf.env.env or None
try:
@@ -602,8 +575,12 @@
conf.fatal('Could not determine the compiler type') if icc and out.find('__INTEL_COMPILER')<0: conf.fatal('Not icc/icpc')+ if clang and out.find('__clang__')<0:+ conf.fatal('Not clang/clang++')+ if not clang and out.find('__clang__')>=0:+ conf.fatal('Could not find g++, if renamed try eg: CXX=g++48 waf configure') k={}- if icc or gcc:
+ if icc or gcc or clang:
out=out.splitlines()
for line in out:
lst=shlex.split(line)
@@ -645,9 +622,15 @@
conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1])
else:
if isD('__clang__'):- conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
+ try:
+ conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
+ except KeyError:
+ conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
else:
- conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+ try:
+ conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+ except KeyError:
+ conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],0)
return k
@conf
def get_xlc_version(conf,cc):
@@ -687,7 +670,7 @@
@conf
def add_as_needed(self):
if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
- self.env.append_unique('LINKFLAGS','--as-needed')+ self.env.append_unique('LINKFLAGS','-Wl,--as-needed')class cfgtask(Task.TaskBase):
def display(self):
return''
@@ -708,12 +691,10 @@
return 1
@conf
def multicheck(self,*k,**kw):
- self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)))+ self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw)class par(object):
def __init__(self):
self.keep=False
- self.cache_global=Options.cache_global
- self.nocache=Options.options.nocache
self.returned_tasks=[]
self.task_sigs={}def total(self):
@@ -741,6 +722,6 @@
x.logger.memhandler.flush()
for x in tasks:
if x.hasrun!=Task.SUCCESS:
- self.end_msg(kw.get('errmsg','no'),color='YELLOW')+ self.end_msg(kw.get('errmsg','no'),color='YELLOW',**kw) self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information')- self.end_msg('ok')+ self.end_msg('ok',**kw)--- a/waflib/Tools/c_preproc.py
+++ b/waflib/Tools/c_preproc.py
@@ -15,7 +15,7 @@
standard_includes=[]
use_trigraphs=0
strict_quotes=0
-g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}+g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',} re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) re_mac=re.compile("^[a-zA-Z_]\w*") re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')@@ -71,17 +71,17 @@
elif d=='*':c=a*b
elif d=='/':c=a/b
elif d=='^':c=a^b
- elif d=='|':c=a|b
- elif d=='||':c=int(a or b)
- elif d=='&':c=a&b
- elif d=='&&':c=int(a and b)
elif d=='==':c=int(a==b)
- elif d=='!=':c=int(a!=b)
+ elif d=='|'or d=='bitor':c=a|b
+ elif d=='||'or d=='or':c=int(a or b)
+ elif d=='&'or d=='bitand':c=a&b
+ elif d=='&&'or d=='and':c=int(a and b)
+ elif d=='!='or d=='not_eq':c=int(a!=b)
+ elif d=='^'or d=='xor':c=int(a^b)
elif d=='<=':c=int(a<=b)
elif d=='<':c=int(a<b)
elif d=='>':c=int(a>b)
elif d=='>=':c=int(a>=b)
- elif d=='^':c=int(a^b)
elif d=='<<':c=a<<b
elif d=='>>':c=a>>b
else:c=0
@@ -381,7 +381,8 @@
return'"',toks[0][1]
else:
if toks[0][1]=='<'and toks[-1][1]=='>':
- return stringize(toks).lstrip('<').rstrip('>')+ ret='<',stringize(toks).lstrip('<').rstrip('>')+ return ret
raise PreprocError("could not parse include %s."%txt)def parse_char(txt):
if not txt:raise PreprocError("attempted to parse a null char")@@ -410,7 +411,9 @@
v=m(name)
if v:
if name==IDENT:
- try:v=g_optrans[v];name=OP
+ try:
+ g_optrans[v];
+ name=OP
except KeyError:
if v.lower()=="true":
v=1
@@ -472,6 +475,9 @@
nd[tup]=ret
return ret
def tryfind(self,filename):
+ if filename.endswith('.moc'):+ self.names.append(filename)
+ return None
self.curfile=filename
found=self.cached_find_resource(self.currentnode_stack[-1],filename)
for n in self.nodepaths:
@@ -480,8 +486,7 @@
found=self.cached_find_resource(n,filename)
if found and not found in self.ban_includes:
self.nodes.append(found)
- if filename[-4:]!='.moc':
- self.addlines(found)
+ self.addlines(found)
else:
if not filename in self.names:
self.names.append(filename)
@@ -519,8 +524,7 @@
try:
self.parse_cache=bld.parse_cache
except AttributeError:
- bld.parse_cache={}- self.parse_cache=bld.parse_cache
+ self.parse_cache=bld.parse_cache={}self.current_file=node
self.addlines(node)
if env['DEFINES']:
--- a/waflib/Tools/ccroot.py
+++ b/waflib/Tools/ccroot.py
@@ -75,12 +75,15 @@
if self.env.DEST_BINFMT=='pe':
name=name+'-'+nums[0]
elif self.env.DEST_OS=='openbsd':
- pattern='%s.%s.%s'%(pattern,nums[0],nums[1])
+ pattern='%s.%s'%(pattern,nums[0])
+ if len(nums)>=2:
+ pattern+='.%s'%nums[1]
tmp=folder+os.sep+pattern%name
target=self.generator.path.find_or_declare(tmp)
self.set_outputs(target)
class stlink_task(link_task):
run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'+ chmod=Utils.O644
def rm_tgt(cls):
old=cls.run
def wrap(self):
@@ -111,7 +114,7 @@
except AttributeError:
inst_to=self.link_task.__class__.inst_to
if inst_to:
- self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod)
+ self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod,task=self.link_task)
@taskgen_method
def use_rec(self,name,**kw):
if name in self.tmp_use_not or name in self.tmp_use_seen:
@@ -139,6 +142,8 @@
y.tmp_use_var='STLIB'
p=self.tmp_use_prec
for x in self.to_list(getattr(y,'use',[])):
+ if self.env["STLIB_"+x]:
+ continue
try:
p[x].append(name)
except KeyError:
@@ -190,11 +195,11 @@
y=self.bld.get_tgen_by_name(x)
var=y.tmp_use_var
if var and link_task:
- if var=='LIB'or y.tmp_use_stlib:
+ if var=='LIB'or y.tmp_use_stlib or x in names:
self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]])
self.link_task.dep_nodes.extend(y.link_task.outputs)
tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
- self.env.append_value(var+'PATH',[tmp_path])
+ self.env.append_unique(var+'PATH',[tmp_path])
else:
if y.tmp_use_objects:
self.add_objects_from_tgen(y)
@@ -205,11 +210,11 @@
for x in names:
try:
y=self.bld.get_tgen_by_name(x)
- except Exception:
+ except Errors.WafError:
if not self.env['STLIB_'+x]and not x in self.uselib:
self.uselib.append(x)
else:
- for k in self.to_list(getattr(y,'uselib',[])):
+ for k in self.to_list(getattr(y,'use',[])):
if not self.env['STLIB_'+k]and not k in self.uselib:
self.uselib.append(k)
@taskgen_method
@@ -238,16 +243,17 @@
def propagate_uselib_vars(self):
_vars=self.get_uselib_vars()
env=self.env
- for x in _vars:
- y=x.lower()
- env.append_unique(x,self.to_list(getattr(self,y,[])))
- for x in self.features:
- for var in _vars:
- compvar='%s_%s'%(var,x)
- env.append_value(var,env[compvar])
- for x in self.to_list(getattr(self,'uselib',[])):
- for v in _vars:
- env.append_value(v,env[v+'_'+x])
+ app=env.append_value
+ feature_uselib=self.features+self.to_list(getattr(self,'uselib',[]))
+ for var in _vars:
+ y=var.lower()
+ val=getattr(self,y,[])
+ if val:
+ app(var,self.to_list(val))
+ for x in feature_uselib:
+ val=env['%s_%s'%(var,x)]
+ if val:
+ app(var,val)
@feature('cshlib','cxxshlib','fcshlib') @after_method('apply_link')def apply_implib(self):
@@ -271,14 +277,19 @@
self.link_task.dep_nodes.append(node)
else:
self.link_task.inputs.append(node)
- try:
- inst_to=self.install_path
- except AttributeError:
- inst_to=self.link_task.__class__.inst_to
- if not inst_to:
- return
- self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env)-re_vnum=re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$')+ if getattr(self,'install_task',None):
+ try:
+ inst_to=self.install_path_implib
+ except AttributeError:
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to='${IMPLIBDIR}'+ self.install_task.dest='${BINDIR}'+ if not self.env.IMPLIBDIR:
+ self.env.IMPLIBDIR=self.env.LIBDIR
+ self.implib_install_task=self.bld.install_files(inst_to,implib,env=self.env,chmod=self.link_task.chmod,task=self.link_task)
+re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)[.]([1-9]\\d*|0))?$') @feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') @after_method('apply_link','propagate_uselib_vars')def apply_vnum(self):
@@ -300,7 +311,10 @@
v=self.env.SONAME_ST%name2
self.env.append_value('LINKFLAGS',v.split())if self.env.DEST_OS!='openbsd':
- self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)])+ outs=[node.parent.find_or_declare(name3)]
+ if name2!=name3:
+ outs.append(node.parent.find_or_declare(name2))
+ self.create_task('vnum',node,outs)if getattr(self,'install_task',None):
self.install_task.hasrun=Task.SKIP_ME
bld=self.bld
@@ -311,9 +325,12 @@
self.vnum_install_task=(t1,)
else:
t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod)
- t2=bld.symlink_as(path+os.sep+name2,name3)
t3=bld.symlink_as(path+os.sep+libname,name3)
- self.vnum_install_task=(t1,t2,t3)
+ if name2!=name3:
+ t2=bld.symlink_as(path+os.sep+name2,name3)
+ self.vnum_install_task=(t1,t2,t3)
+ else:
+ self.vnum_install_task=(t1,t3)
if'-dynamiclib'in self.env['LINKFLAGS']:
try:
inst_to=self.install_path
@@ -327,6 +344,8 @@
color='CYAN'
quient=True
ext_in=['.bin']
+ def keyword(self):
+ return'Symlinking'
def run(self):
for x in self.outputs:
path=x.abspath()
--- /dev/null
+++ b/waflib/Tools/clang.py
@@ -1,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib.Tools import ccroot,ar,gcc
+from waflib.Configure import conf
+@conf
+def find_clang(conf):
+ cc=conf.find_program('clang',var='CC')+ conf.get_cc_version(cc,clang=True)
+ conf.env.CC_NAME='clang'
+def configure(conf):
+ conf.find_clang()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
--- /dev/null
+++ b/waflib/Tools/clangxx.py
@@ -1,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib.Tools import ccroot,ar,gxx
+from waflib.Configure import conf
+@conf
+def find_clangxx(conf):
+ cxx=conf.find_program('clang++',var='CXX')+ conf.get_cc_version(cxx,clang=True)
+ conf.env.CXX_NAME='clang'
+def configure(conf):
+ conf.find_clangxx()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
--- a/waflib/Tools/compiler_c.py
+++ b/waflib/Tools/compiler_c.py
@@ -2,17 +2,21 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,imp,types
+import os,sys,imp,types,re
from waflib.Tools import ccroot
from waflib import Utils,Configure
from waflib.Logs import debug
-c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],}+c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['gcc','clang'],}+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=c_compiler.get(build_platform,c_compiler['default'])
+ return' '.join(possible_compiler_list)
def configure(conf):
- try:test_for_compiler=conf.options.check_c_compiler
+ try:test_for_compiler=conf.options.check_c_compiler or default_compilers()
except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")- for compiler in test_for_compiler.split():
+ for compiler in re.split('[ ,]+',test_for_compiler):conf.env.stash()
- conf.start_msg('Checking for %r (c compiler)'%compiler)+ conf.start_msg('Checking for %r (C compiler)'%compiler)try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
@@ -26,14 +30,11 @@
break
conf.end_msg(False)
else:
- conf.fatal('could not configure a c compiler!')+ conf.fatal('could not configure a C compiler!')def options(opt):
+ test_for_compiler=default_compilers()
opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])- global c_compiler
- build_platform=Utils.unversioned_sys_platform()
- possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default']
- test_for_compiler=' '.join(possible_compiler_list)
- cc_compiler_opts=opt.add_option_group("C Compiler Options")- cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")+ cc_compiler_opts=opt.add_option_group('Configuration options')+ cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler")for x in test_for_compiler.split():
opt.load('%s'%x)--- a/waflib/Tools/compiler_cxx.py
+++ b/waflib/Tools/compiler_cxx.py
@@ -2,17 +2,21 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,imp,types
+import os,sys,imp,types,re
from waflib.Tools import ccroot
from waflib import Utils,Configure
from waflib.Logs import debug
-cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']}+cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['g++','clang++']}+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default'])
+ return' '.join(possible_compiler_list)
def configure(conf):
- try:test_for_compiler=conf.options.check_cxx_compiler
+ try:test_for_compiler=conf.options.check_cxx_compiler or default_compilers()
except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')")- for compiler in test_for_compiler.split():
+ for compiler in re.split('[ ,]+',test_for_compiler):conf.env.stash()
- conf.start_msg('Checking for %r (c++ compiler)'%compiler)+ conf.start_msg('Checking for %r (C++ compiler)'%compiler)try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
@@ -26,14 +30,11 @@
break
conf.end_msg(False)
else:
- conf.fatal('could not configure a c++ compiler!')+ conf.fatal('could not configure a C++ compiler!')def options(opt):
+ test_for_compiler=default_compilers()
opt.load_special_tools('cxx_*.py')- global cxx_compiler
- build_platform=Utils.unversioned_sys_platform()
- possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default']
- test_for_compiler=' '.join(possible_compiler_list)
- cxx_compiler_opts=opt.add_option_group('C++ Compiler Options')- cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler")+ cxx_compiler_opts=opt.add_option_group('Configuration options')+ cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler")for x in test_for_compiler.split():
opt.load('%s'%x)--- a/waflib/Tools/compiler_d.py
+++ b/waflib/Tools/compiler_d.py
@@ -2,12 +2,19 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,imp,types
+import os,sys,imp,types,re
from waflib import Utils,Configure,Options,Logs
+d_compiler={'default':['gdc','dmd','ldc2']}+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=d_compiler.get(build_platform,d_compiler['default'])
+ return' '.join(possible_compiler_list)
def configure(conf):
- for compiler in conf.options.dcheck.split(','):+ try:test_for_compiler=conf.options.check_d_compiler or default_compilers()
+ except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_d')")+ for compiler in re.split('[ ,]+',test_for_compiler):conf.env.stash()
- conf.start_msg('Checking for %r (d compiler)'%compiler)+ conf.start_msg('Checking for %r (D compiler)'%compiler)try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
@@ -21,9 +28,10 @@
break
conf.end_msg(False)
else:
- conf.fatal('no suitable d compiler was found')+ conf.fatal('could not configure a D compiler!')def options(opt):
- d_compiler_opts=opt.add_option_group('D Compiler Options')- d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd,ldc2',action='store',help='check for the compiler [Default:gdc,dmd,ldc2]',dest='dcheck')- for d_compiler in['gdc','dmd','ldc2']:
- opt.load('%s'%d_compiler)+ test_for_compiler=default_compilers()
+ d_compiler_opts=opt.add_option_group('Configuration options')+ d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler')+ for x in test_for_compiler.split():
+ opt.load('%s'%x)--- a/waflib/Tools/compiler_fc.py
+++ b/waflib/Tools/compiler_fc.py
@@ -2,21 +2,20 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,imp,types
+import os,sys,imp,types,re
from waflib import Utils,Configure,Options,Logs,Errors
from waflib.Tools import fc
fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']}-def __list_possible_compiler(platform):
- try:
- return fc_compiler[platform]
- except KeyError:
- return fc_compiler["default"]
+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default'])
+ return' '.join(possible_compiler_list)
def configure(conf):
- try:test_for_compiler=conf.options.check_fc
+ try:test_for_compiler=conf.options.check_fortran_compiler or default_compilers()
except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')")- for compiler in test_for_compiler.split():
+ for compiler in re.split('[ ,]+',test_for_compiler):conf.env.stash()
- conf.start_msg('Checking for %r (fortran compiler)'%compiler)+ conf.start_msg('Checking for %r (Fortran compiler)'%compiler)try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
@@ -30,14 +29,11 @@
break
conf.end_msg(False)
else:
- conf.fatal('could not configure a fortran compiler!')+ conf.fatal('could not configure a Fortran compiler!')def options(opt):
+ test_for_compiler=default_compilers()
opt.load_special_tools('fc_*.py')- build_platform=Utils.unversioned_sys_platform()
- detected_platform=Options.platform
- possible_compiler_list=__list_possible_compiler(detected_platform)
- test_for_compiler=' '.join(possible_compiler_list)
- fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options")- fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc")- for compiler in test_for_compiler.split():
- opt.load('%s'%compiler)+ fortran_compiler_opts=opt.add_option_group('Configuration options')+ fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler")+ for x in test_for_compiler.split():
+ opt.load('%s'%x)--- a/waflib/Tools/cxx.py
+++ b/waflib/Tools/cxx.py
@@ -11,7 +11,7 @@
if not'.c'in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
class cxx(Task.Task):
- run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'+ run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'vars=['CXXDEPS']
ext_in=['.h']
scan=c_preproc.scan
--- a/waflib/Tools/dmd.py
+++ b/waflib/Tools/dmd.py
@@ -8,9 +8,9 @@
@conf
def find_dmd(conf):
conf.find_program(['dmd','dmd2','ldc'],var='D')
- out=conf.cmd_and_log([conf.env.D,'--help'])
+ out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find("D Compiler v")==-1:- out=conf.cmd_and_log([conf.env.D,'-version'])
+ out=conf.cmd_and_log(conf.env.D+['-version'])
if out.find("based on DMD v1.")==-1: conf.fatal("detected compiler is not dmd/ldc")@conf
@@ -40,7 +40,7 @@
def configure(conf):
conf.find_dmd()
if sys.platform=='win32':
- out=conf.cmd_and_log([conf.env.D,'--help'])
+ out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find("D Compiler v2.")>-1: conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') conf.load('ar')--- a/waflib/Tools/fc.py
+++ b/waflib/Tools/fc.py
@@ -89,8 +89,6 @@
class fcshlib(fcprogram):
inst_to='${LIBDIR}'class fcprogram_test(fcprogram):
- def can_retrieve_cache(self):
- return False
def runnable_status(self):
ret=super(fcprogram_test,self).runnable_status()
if ret==Task.SKIP_ME:
--- a/waflib/Tools/fc_config.py
+++ b/waflib/Tools/fc_config.py
@@ -2,10 +2,10 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import re,shutil,os,sys,string,shlex
+import re,os,sys,shlex
from waflib.Configure import conf
-from waflib.TaskGen import feature,after_method,before_method
-from waflib import Build,Utils
+from waflib.TaskGen import feature,before_method
+from waflib import Utils
FC_FRAGMENT=' program main\n end program main\n'
FC_FRAGMENT2=' PROGRAM MAIN\n END\n'
@conf
@@ -115,7 +115,7 @@
@conf
def check_fortran_verbose_flag(self,*k,**kw):
self.start_msg('fortran link verbose flag')- for x in['-v','--verbose','-verbose','-V']:
+ for x in('-v','--verbose','-verbose','-V'):try:
self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True)
except self.errors.ConfigurationError:
@@ -148,6 +148,25 @@
return final_flags
SPACE_OPTS=re.compile('^-[LRuYz]$') NOSPACE_OPTS=re.compile('^-[RL]')+def _parse_flink_token(lexer,token,tmp_flags):
+ if _match_ignore(token):
+ pass
+ elif token.startswith('-lkernel32')and sys.platform=='cygwin':+ tmp_flags.append(token)
+ elif SPACE_OPTS.match(token):
+ t=lexer.get_token()
+ if t.startswith('P,'):+ t=t[2:]
+ for opt in t.split(os.pathsep):
+ tmp_flags.append('-L%s'%opt)+ elif NOSPACE_OPTS.match(token):
+ tmp_flags.append(token)
+ elif POSIX_LIB_FLAGS.match(token):
+ tmp_flags.append(token)
+ else:
+ pass
+ t=lexer.get_token()
+ return t
def _parse_flink_line(line,final_flags):
lexer=shlex.shlex(line,posix=True)
lexer.whitespace_split=True
@@ -154,26 +173,7 @@
t=lexer.get_token()
tmp_flags=[]
while t:
- def parse(token):
- if _match_ignore(token):
- pass
- elif token.startswith('-lkernel32')and sys.platform=='cygwin':- tmp_flags.append(token)
- elif SPACE_OPTS.match(token):
- t=lexer.get_token()
- if t.startswith('P,'):- t=t[2:]
- for opt in t.split(os.pathsep):
- tmp_flags.append('-L%s'%opt)- elif NOSPACE_OPTS.match(token):
- tmp_flags.append(token)
- elif POSIX_LIB_FLAGS.match(token):
- tmp_flags.append(token)
- else:
- pass
- t=lexer.get_token()
- return t
- t=parse(t)
+ t=_parse_flink_token(lexer,t,tmp_flags)
final_flags.extend(tmp_flags)
return final_flags
@conf
@@ -240,9 +240,9 @@
bld(features='fc fcstlib',source='test.f',target='test')
bld(features='c fcprogram',source='main.c',target='app',use='test')
def mangling_schemes():
- for u in['_','']:
- for du in['','_']:
- for c in["lower","upper"]:
+ for u in('_',''):+ for du in('','_'):+ for c in("lower","upper"):yield(u,du,c)
def mangle_name(u,du,c,name):
return getattr(name,c)()+u+(name.find('_')!=-1 and du or'')@@ -274,7 +274,7 @@
self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN']
@conf
def detect_openmp(self):
- for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
+ for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):try:
self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP')
except self.errors.ConfigurationError:
--- a/waflib/Tools/flex.py
+++ b/waflib/Tools/flex.py
@@ -28,5 +28,5 @@
def configure(conf):
conf.find_program('flex',var='FLEX')conf.env.FLEXFLAGS=['-t']
- if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX):
+ if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]):
conf.env.FLEX_MSYS=True
--- a/waflib/Tools/g95.py
+++ b/waflib/Tools/g95.py
@@ -9,7 +9,6 @@
@conf
def find_g95(conf):
fc=conf.find_program('g95',var='FC')- fc=conf.cmd_to_list(fc)
conf.get_g95_version(fc)
conf.env.FC_NAME='G95'
@conf
--- a/waflib/Tools/gcc.py
+++ b/waflib/Tools/gcc.py
@@ -2,15 +2,15 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+import os,sys
+from waflib import Configure,Options,Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_gcc(conf):
cc=conf.find_program(['gcc','cc'],var='CC')
- cc=conf.cmd_to_list(cc)
conf.get_cc_version(cc,gcc=True)
conf.env.CC_NAME='gcc'
- conf.env.CC=cc
@conf
def gcc_common_flags(conf):
v=conf.env
--- a/waflib/Tools/gdc.py
+++ b/waflib/Tools/gdc.py
@@ -8,8 +8,8 @@
@conf
def find_gdc(conf):
conf.find_program('gdc',var='D')- out=conf.cmd_and_log([conf.env.D,'--version'])
- if out.find("gdc ")==-1:+ out=conf.cmd_and_log(conf.env.D+['--version'])
+ if out.find("gdc")==-1: conf.fatal("detected compiler is not gdc")@conf
def common_flags_gdc(conf):
--- a/waflib/Tools/gfortran.py
+++ b/waflib/Tools/gfortran.py
@@ -9,7 +9,6 @@
@conf
def find_gfortran(conf):
fc=conf.find_program(['gfortran','g77'],var='FC')
- fc=conf.cmd_to_list(fc)
conf.get_gfortran_version(fc)
conf.env.FC_NAME='GFORTRAN'
@conf
--- a/waflib/Tools/glib2.py
+++ b/waflib/Tools/glib2.py
@@ -3,8 +3,9 @@
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os
-from waflib import Task,Utils,Options,Errors,Logs
-from waflib.TaskGen import taskgen_method,before_method,after_method,feature
+from waflib import Context,Task,Utils,Options,Errors,Logs
+from waflib.TaskGen import taskgen_method,before_method,after_method,feature,extension
+from waflib.Configure import conf
@taskgen_method
def add_marshal_file(self,filename,prefix):
if not hasattr(self,'marshal_list'):
@@ -98,14 +99,6 @@
if type(filename_list)!='list':
filename_list=[filename_list]
self.settings_enum_files=filename_list
-def r_change_ext(self,ext):
- name=self.name
- k=name.rfind('.')- if k>=0:
- name=name[:k]+ext
- else:
- name=name+ext
- return self.parent.find_or_declare([name])
@feature('glib2')def process_settings(self):
enums_tgt_node=[]
@@ -136,7 +129,7 @@
source_list=enums_tgt_node+[schema_node]
schema_task.set_inputs(source_list)
schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list]- target_node=r_change_ext(schema_node,'.xml.valid')
+ target_node=schema_node.change_ext('.xml.valid')schema_task.set_outputs(target_node)
schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath()
def compile_schemas_callback(bld):
@@ -155,10 +148,69 @@
class glib_validate_schema(Task.Task):
run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'color='PINK'
-def configure(conf):
+@extension('.gresource.xml')+def process_gresource_source(self,node):
+ if not self.env['GLIB_COMPILE_RESOURCES']:
+ raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure")+ if'gresource'in self.features:
+ return
+ h_node=node.change_ext('_xml.h')+ c_node=node.change_ext('_xml.c')+ self.create_task('glib_gresource_source',node,[h_node,c_node])+ self.source.append(c_node)
+@feature('gresource')+def process_gresource_bundle(self):
+ for i in self.to_list(self.source):
+ node=self.path.find_resource(i)
+ task=self.create_task('glib_gresource_bundle',node,node.change_ext(''))+ inst_to=getattr(self,'install_path',None)
+ if inst_to:
+ self.bld.install_files(inst_to,task.outputs)
+class glib_gresource_base(Task.Task):
+ color='BLUE'
+ base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'+ def scan(self):
+ bld=self.generator.bld
+ kw={}+ try:
+ if not kw.get('cwd',None):+ kw['cwd']=bld.cwd
+ except AttributeError:
+ bld.cwd=kw['cwd']=bld.variant_dir
+ kw['quiet']=Context.BOTH
+ cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env)+ output=bld.cmd_and_log(cmd,**kw)
+ nodes=[]
+ names=[]
+ for dep in output.splitlines():
+ if dep:
+ node=bld.bldnode.find_node(dep)
+ if node:
+ nodes.append(node)
+ else:
+ names.append(dep)
+ return(nodes,names)
+class glib_gresource_source(glib_gresource_base):
+ vars=['GLIB_COMPILE_RESOURCES']
+ fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}')+ fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}')+ ext_out=['.h']
+ def run(self):
+ return self.fun_h[0](self)or self.fun_c[0](self)
+class glib_gresource_bundle(glib_gresource_base):
+ run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}'+ shell=True
+@conf
+def find_glib_genmarshal(conf):
conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')- conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS')- conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False)+@conf
+def find_glib_mkenums(conf):
+ if not conf.env.PERL:
+ conf.find_program('perl',var='PERL')+ conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS')+@conf
+def find_glib_compile_schemas(conf):
+ conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS')def getstr(varname):
return getattr(Options.options,varname,getattr(conf.env,varname,''))
gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')@@ -169,5 +221,14 @@
datadir=os.path.join(prefix,'share')
gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir
+@conf
+def find_glib_compile_resources(conf):
+ conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES')+def configure(conf):
+ conf.find_glib_genmarshal()
+ conf.find_glib_mkenums()
+ conf.find_glib_compile_schemas(mandatory=False)
+ conf.find_glib_compile_resources(mandatory=False)
def options(opt):
- opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')+ gr=opt.add_option_group('Installation directories')+ gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')--- a/waflib/Tools/gnu_dirs.py
+++ b/waflib/Tools/gnu_dirs.py
@@ -2,29 +2,30 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os
+import os,re
from waflib import Utils,Options,Context
-_options=[x.split(', ')for x in'''-bindir, user executables, ${EXEC_PREFIX}/bin-sbindir, system admin executables, ${EXEC_PREFIX}/sbin-libexecdir, program executables, ${EXEC_PREFIX}/libexec-sysconfdir, read-only single-machine data, ${PREFIX}/etc-sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com-localstatedir, modifiable single-machine data, ${PREFIX}/var-libdir, object code libraries, ${EXEC_PREFIX}/lib-includedir, C header files, ${PREFIX}/include-oldincludedir, C header files for non-gcc, /usr/include
-datarootdir, read-only arch.-independent data root, ${PREFIX}/share-datadir, read-only architecture-independent data, ${DATAROOTDIR}-infodir, info documentation, ${DATAROOTDIR}/info+gnuopts='''
+bindir, user commands, ${EXEC_PREFIX}/bin+sbindir, system binaries, ${EXEC_PREFIX}/sbin+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec+sysconfdir, host-specific configuration, ${PREFIX}/etc+sharedstatedir, architecture-independent variable data, ${PREFIX}/com+localstatedir, variable data, ${PREFIX}/var+libdir, object code libraries, ${EXEC_PREFIX}/lib%s+includedir, header files, ${PREFIX}/include+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share+datadir, architecture-independent data, ${DATAROOTDIR}+infodir, GNU "info" documentation, ${DATAROOTDIR}/info localedir, locale-dependent data, ${DATAROOTDIR}/locale-mandir, man documentation, ${DATAROOTDIR}/man+mandir, manual pages, ${DATAROOTDIR}/man docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}-htmldir, html documentation, ${DOCDIR}-dvidir, dvi documentation, ${DOCDIR}-pdfdir, pdf documentation, ${DOCDIR}-psdir, ps documentation, ${DOCDIR}-'''.split('\n')if x]+htmldir, HTML documentation, ${DOCDIR}+dvidir, DVI documentation, ${DOCDIR}+pdfdir, PDF documentation, ${DOCDIR}+psdir, PostScript documentation, ${DOCDIR}+'''%Utils.lib64()
+_options=[x.split(', ')for x in gnuopts.splitlines()if x]def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
@@ -45,10 +46,10 @@
except TypeError:
complete=False
if not complete:
- lst=[name for name,_,_ in _options if not env[name.upper()]]
+ lst=[x for x,_,_ in _options if not env[x.upper()]]
raise conf.errors.WafError('Variable substitution failure %r'%lst)def options(opt):
- inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\+ inst_dir=opt.add_option_group('Installation prefix','By default, "waf install" will put the files in\"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
for k in('--prefix','--destdir'):@@ -56,10 +57,10 @@
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)
- inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')- dirs_options=opt.add_option_group('Pre-defined installation directories','')+ inst_dir.add_option('--exec-prefix',help='installation prefix for binaries [PREFIX]',default='',dest='EXEC_PREFIX')+ dirs_options=opt.add_option_group('Installation directories')for name,help,default in _options:
option_name='--'+name
str_default=default
- str_help='%s [Default: %s]'%(help,str_default)
+ str_help='%s [%s]'%(help,re.sub(r'\$\{([^}]+)\}',r'\1',str_default))dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
--- a/waflib/Tools/gxx.py
+++ b/waflib/Tools/gxx.py
@@ -2,15 +2,15 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+import os,sys
+from waflib import Configure,Options,Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_gxx(conf):
cxx=conf.find_program(['g++','c++'],var='CXX')
- cxx=conf.cmd_to_list(cxx)
conf.get_cc_version(cxx,gcc=True)
conf.env.CXX_NAME='gcc'
- conf.env.CXX=cxx
@conf
def gxx_common_flags(conf):
v=conf.env
--- a/waflib/Tools/icc.py
+++ b/waflib/Tools/icc.py
@@ -9,17 +9,9 @@
def find_icc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')- v=conf.env
- cc=None
- if v['CC']:cc=v['CC']
- elif'CC'in conf.environ:cc=conf.environ['CC']
- if not cc:cc=conf.find_program('icc',var='CC')- if not cc:cc=conf.find_program('ICL',var='CC')- if not cc:conf.fatal('Intel C Compiler (icc) was not found')- cc=conf.cmd_to_list(cc)
+ cc=conf.find_program(['icc','ICL'],var='CC')
conf.get_cc_version(cc,icc=True)
- v['CC']=cc
- v['CC_NAME']='icc'
+ conf.env.CC_NAME='icc'
def configure(conf):
conf.find_icc()
conf.find_ar()
--- a/waflib/Tools/icpc.py
+++ b/waflib/Tools/icpc.py
@@ -9,16 +9,9 @@
def find_icpc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')- v=conf.env
- cxx=None
- if v['CXX']:cxx=v['CXX']
- elif'CXX'in conf.environ:cxx=conf.environ['CXX']
- if not cxx:cxx=conf.find_program('icpc',var='CXX')- if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')- cxx=conf.cmd_to_list(cxx)
+ cxx=conf.find_program('icpc',var='CXX')conf.get_cc_version(cxx,icc=True)
- v['CXX']=cxx
- v['CXX_NAME']='icc'
+ conf.env.CXX_NAME='icc'
def configure(conf):
conf.find_icpc()
conf.find_ar()
--- a/waflib/Tools/ifort.py
+++ b/waflib/Tools/ifort.py
@@ -9,7 +9,6 @@
@conf
def find_ifort(conf):
fc=conf.find_program('ifort',var='FC')- fc=conf.cmd_to_list(fc)
conf.get_ifort_version(fc)
conf.env.FC_NAME='IFORT'
@conf
@@ -29,8 +28,11 @@
ifort_modifier_func()
@conf
def get_ifort_version(conf,fc):
- version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
- cmd=fc+['--version']
+ version_re=re.compile(r"Intel[\sa-zA-Z()0-9,-]*Version\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+ if Utils.is_win32:
+ cmd=fc
+ else:
+ cmd=fc+['-logo']
out,err=fc_config.getoutput(conf,cmd,stdin=False)
if out:
match=version_re(out)
--- a/waflib/Tools/intltool.py
+++ b/waflib/Tools/intltool.py
@@ -3,30 +3,47 @@
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,re
-from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs
+from waflib import Configure,Context,TaskGen,Task,Utils,Runner,Options,Build,Logs
import waflib.Tools.ccroot
-from waflib.TaskGen import feature,before_method
+from waflib.TaskGen import feature,before_method,taskgen_method
from waflib.Logs import error
+from waflib.Configure import conf
+_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',}+@taskgen_method
+def ensure_localedir(self):
+ if not self.env.LOCALEDIR:
+ if self.env.DATAROOTDIR:
+ self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale')
+ else:
+ self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale')
@before_method('process_source') @feature('intltool_in')def apply_intltool_in_f(self):
try:self.meths.remove('process_source')except ValueError:pass
- if not self.env.LOCALEDIR:
- self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
+ self.ensure_localedir()
+ podir=getattr(self,'podir','.')
+ podirnode=self.path.find_dir(podir)
+ if not podirnode:
+ error("could not find the podir %r"%podir)+ return
+ cache=getattr(self,'intlcache','.intlcache')
+ self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)]
+ self.env.INTLPODIR=podirnode.bldpath()
+ self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT))+ if'-c'in self.env.INTLFLAGS:
+ self.bld.fatal('Redundant -c flag in intltool task %r'%self)+ style=getattr(self,'style',None)
+ if style:
+ try:
+ style_flag=_style_flags[style]
+ except KeyError:
+ self.bld.fatal('intltool_in style "%s" is not valid'%style)+ self.env.append_unique('INTLFLAGS',[style_flag])for i in self.to_list(self.source):
node=self.path.find_resource(i)
- podir=getattr(self,'podir','po')
- podirnode=self.path.find_dir(podir)
- if not podirnode:
- error("could not find the podir %r"%podir)- continue
- cache=getattr(self,'intlcache','.intlcache')
- self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache)
- self.env['INTLPODIR']=podirnode.bldpath()
- self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c'])
task=self.create_task('intltool',node,node.change_ext(''))- inst=getattr(self,'install_path','${LOCALEDIR}')+ inst=getattr(self,'install_path',None)
if inst:
self.bld.install_files(inst,task.outputs)
@feature('intltool_po')@@ -33,10 +50,9 @@
def apply_intltool_po(self):
try:self.meths.remove('process_source')except ValueError:pass
- if not self.env.LOCALEDIR:
- self.env.LOCALEDIR=self.env.PREFIX+'/share/locale'
- appname=getattr(self,'appname','set_your_app_name')
- podir=getattr(self,'podir','')
+ self.ensure_localedir()
+ appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name'))
+ podir=getattr(self,'podir','.')
inst=getattr(self,'install_path','${LOCALEDIR}')linguas=self.path.find_node(os.path.join(podir,'LINGUAS'))
if linguas:
@@ -62,16 +78,20 @@
run_str='${MSGFMT} -o ${TGT} ${SRC}'color='BLUE'
class intltool(Task.Task):
- run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'+ run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'color='BLUE'
-def configure(conf):
+@conf
+def find_msgfmt(conf):
conf.find_program('msgfmt',var='MSGFMT')- conf.find_perl_program('intltool-merge',var='INTLTOOL')- prefix=conf.env.PREFIX
- datadir=conf.env.DATADIR
- if not datadir:
- datadir=os.path.join(prefix,'share')
- conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\'))- conf.define('DATADIR',datadir.replace('\\','\\\\'))+@conf
+def find_intltool_merge(conf):
+ if not conf.env.PERL:
+ conf.find_program('perl',var='PERL')+ conf.env.INTLCACHE_ST='--cache=%s'
+ conf.env.INTLFLAGS_DEFAULT=['-q','-u']
+ conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL')+def configure(conf):
+ conf.find_msgfmt()
+ conf.find_intltool_merge()
if conf.env.CC or conf.env.CXX:
conf.check(header_name='locale.h')
--- a/waflib/Tools/irixcc.py
+++ b/waflib/Tools/irixcc.py
@@ -14,7 +14,6 @@
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('cc',var='CC') if not cc:conf.fatal('irixcc was not found')- cc=conf.cmd_to_list(cc)
try:
conf.cmd_and_log(cc+['-version'])
except Exception:
--- a/waflib/Tools/javaw.py
+++ b/waflib/Tools/javaw.py
@@ -157,7 +157,6 @@
return super(jar_create,self).runnable_status()
class javac(Task.Task):
color='BLUE'
- nocache=True
vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR']
def runnable_status(self):
for t in self.run_after:
@@ -253,7 +252,6 @@
self.env['JAVA_HOME']=[self.environ['JAVA_HOME']]
for x in'javac java jar javadoc'.split():
self.find_program(x,var=x.upper(),path_list=java_path)
- self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()])
if'CLASSPATH'in self.environ:
v['CLASSPATH']=self.environ['CLASSPATH']
if not v['JAR']:self.fatal('jar is required for making java packages')--- a/waflib/Tools/kde4.py
+++ b/waflib/Tools/kde4.py
@@ -19,7 +19,7 @@
run_str='${MSGFMT} ${SRC} -o ${TGT}'def configure(self):
kdeconfig=self.find_program('kde4-config')- prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip()+ prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip()
fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
try:os.stat(fname)
except OSError:
@@ -28,7 +28,7 @@
except OSError:self.fatal('could not open %s'%fname)try:
txt=Utils.readf(fname)
- except(OSError,IOError):
+ except EnvironmentError:
self.fatal('could not read %s'%fname) txt=txt.replace('\\\n','\n') fu=re.compile('#(.*)\n')--- a/waflib/Tools/ldc2.py
+++ b/waflib/Tools/ldc2.py
@@ -8,7 +8,7 @@
@conf
def find_ldc2(conf):
conf.find_program(['ldc2'],var='D')
- out=conf.cmd_and_log([conf.env.D,'-version'])
+ out=conf.cmd_and_log(conf.env.D+['-version'])
if out.find("based on DMD v2.")==-1: conf.fatal("detected compiler is not ldc2")@conf
--- a/waflib/Tools/msvc.py
+++ b/waflib/Tools/msvc.py
@@ -59,7 +59,11 @@
@conf
def get_msvc_version(conf,compiler,version,target,vcvars):
debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)- batfile=conf.bldnode.make_node('waf-print-msvc.bat')+ try:
+ conf.msvc_cnt+=1
+ except AttributeError:
+ conf.msvc_cnt=1
+ batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) batfile.write("""@echo offset INCLUDE=
set LIB=
@@ -68,7 +72,7 @@
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
"""%(vcvars,target))
- sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
+ sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
lines=sout.splitlines()
if not lines[0]:
lines.pop(0)
@@ -87,7 +91,6 @@
env.update(PATH=path)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
- cxx=conf.cmd_to_list(cxx)
if'CL'in env:
del(env['CL'])
try:
@@ -175,7 +178,7 @@
def gather_msvc_detected_versions():
version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')detected_versions=[]
- for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:+ for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')):try:
prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
@@ -372,7 +375,7 @@
setattr(conf,compilervars_warning_attr,False)
patch_url='http://software.intel.com/en-us/forums/topic/328487'
compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
- for vscomntool in['VS110COMNTOOLS','VS100COMNTOOLS']:
+ for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'):if vscomntool in os.environ:
vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe'
dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe'
@@ -528,7 +531,6 @@
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
- cxx=conf.cmd_to_list(cxx)
env=dict(conf.environ)
if path:env.update(PATH=';'.join(path))
if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
@@ -618,10 +620,8 @@
if d[1:]=='debug':
pdbnode=self.link_task.outputs[0].change_ext('.pdb')self.link_task.outputs.append(pdbnode)
- try:
- self.install_task.source.append(pdbnode)
- except AttributeError:
- pass
+ if getattr(self,'install_task',None):
+ self.pdb_install_task=self.bld.install_files(self.install_task.dest,pdbnode,env=self.env)
break
@feature('cprogram','cshlib','cxxprogram','cxxshlib') @after_method('apply_link')@@ -651,13 +651,11 @@
elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
mode='2'
debug('msvc: embedding manifest in mode %r'%mode)- lst=[]
- lst.append(env['MT'])
+ lst=[]+mtool
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(['-manifest',manifest])
lst.append('-outputresource:%s;%s'%(outfile,mode))- lst=[lst]
- return self.exec_command(*lst)
+ return self.exec_command(lst)
def quote_response_command(self,flag):
if flag.find(' ')>-1: for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):@@ -725,6 +723,8 @@
derived_class.quote_response_command=quote_response_command
derived_class.exec_command_msvc=exec_command_msvc
derived_class.exec_mf=exec_mf
+ if hasattr(cls,'hcode'):
+ derived_class.hcode=cls.hcode
return derived_class
for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
wrap_class(k)
--- a/waflib/Tools/perl.py
+++ b/waflib/Tools/perl.py
@@ -36,7 +36,7 @@
self.end_msg("Perl not found",color="YELLOW")return False
self.env['PERL']=perl
- version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V'])
+ version=self.cmd_and_log(self.env.PERL+["-e",'printf \"%vd\", $^V'])
if not version:
res=False
version="Unknown"
@@ -48,7 +48,7 @@
return res
@conf
def check_perl_module(self,module):
- cmd=[self.env['PERL'],'-e','use %s'%module]
+ cmd=self.env.PERL+['-e','use %s'%module]
self.start_msg('perl module %s'%module)try:
r=self.cmd_and_log(cmd)
@@ -63,18 +63,22 @@
perl=env.PERL
if not perl:
self.fatal('find perl first')- def read_out(cmd):
- return Utils.to_list(self.cmd_and_log(perl+cmd))
- env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'")- env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")- env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")- env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")- env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")+ def cmd_perl_config(s):
+ return perl+['-MConfig','-e','print \"%s\"'%s]
+ def cfg_str(cfg):
+ return self.cmd_and_log(cmd_perl_config(cfg))
+ def cfg_lst(cfg):
+ return Utils.to_list(cfg_str(cfg))
+ env['LINKFLAGS_PERLEXT']=cfg_lst('$Config{lddlflags}')+ env['INCLUDES_PERLEXT']=cfg_lst('$Config{archlib}/CORE')+ env['CFLAGS_PERLEXT']=cfg_lst('$Config{ccflags} $Config{cccdlflags}')+ env['XSUBPP']=cfg_lst('$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}')+ env['EXTUTILS_TYPEMAP']=cfg_lst('$Config{privlib}/ExtUtils/typemap')if not getattr(Options.options,'perlarchdir',None):
- env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'")+ env['ARCHDIR_PERL']=cfg_str('$Config{sitearch}')else:
env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir')
- env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'")+ env['perlext_PATTERN']='%s.'+cfg_str('$Config{dlext}')def options(opt):
opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None) opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)--- a/waflib/Tools/python.py
+++ b/waflib/Tools/python.py
@@ -3,7 +3,7 @@
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
-from waflib import Utils,Options,Errors,Logs
+from waflib import Utils,Options,Errors,Logs,Task,Node
from waflib.TaskGen import extension,before_method,after_method,feature
from waflib.Configure import conf
FRAG='''
@@ -26,64 +26,66 @@
'''
INST='''
import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
+@before_method('process_source')+@feature('py')+def feature_py(self):
+ self.install_path=getattr(self,'install_path','${PYTHONDIR}')+ install_from=getattr(self,'install_from',None)
+ if install_from and not isinstance(install_from,Node.Node):
+ install_from=self.path.find_dir(install_from)
+ self.install_from=install_from
+ ver=self.env.PYTHON_VERSION
+ if not ver:
+ self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')+ if int(ver.replace('.',''))>31:+ self.install_32=True
@extension('.py')def process_py(self,node):
- try:
- if not self.bld.is_install:
- return
- except AttributeError:
- return
- try:
- if not self.install_path:
- return
- except AttributeError:
- self.install_path='${PYTHONDIR}'- def inst_py(ctx):
- install_from=getattr(self,'install_from',None)
- if install_from:
- install_from=self.path.find_dir(install_from)
- install_pyfile(self,node,install_from)
- self.bld.add_post_fun(inst_py)
-def install_pyfile(self,node,install_from=None):
- from_node=install_from or node.parent
- tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False)
- path=tsk.get_install_path()
- if self.bld.is_install<0:
- Logs.info("+ removing byte compiled python files")- for x in'co':
- try:
- os.remove(path+x)
- except OSError:
- pass
- if self.bld.is_install>0:
- try:
- st1=os.stat(path)
- except OSError:
- Logs.error('The python file is missing, this should not happen')- for x in['c','o']:
- do_inst=self.env['PY'+x.upper()]
- try:
- st2=os.stat(path+x)
- except OSError:
- pass
- else:
- if st1.st_mtime<=st2.st_mtime:
- do_inst=False
- if do_inst:
- lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[]
- (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x)
- argv=self.env['PYTHON']+lst+['-c',INST,a,b,c]
- Logs.info('+ byte compiling %r'%(path+x))- env=self.env.env or None
- ret=Utils.subprocess.Popen(argv,env=env).wait()
- if ret:
- raise Errors.WafError('py%s compilation failed %r'%(x,path))-@feature('py')-def feature_py(self):
- pass
+ assert(node.get_bld_sig())
+ assert(getattr(self,'install_path')),'add features="py"'
+ if self.install_path:
+ if self.install_from:
+ self.bld.install_files(self.install_path,[node],cwd=self.install_from,relative_trick=True)
+ else:
+ self.bld.install_files(self.install_path,[node],relative_trick=True)
+ lst=[]
+ if self.env.PYC:
+ lst.append('pyc')+ if self.env.PYO:
+ lst.append('pyo')+ if self.install_path:
+ if self.install_from:
+ pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env)+ else:
+ pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env)+ else:
+ pyd=node.abspath()
+ for ext in lst:
+ if self.env.PYTAG:
+ name=node.name[:-3]
+ pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext))+ pyobj.parent.mkdir()
+ else:
+ pyobj=node.change_ext(".%s"%ext)+ tsk=self.create_task(ext,node,pyobj)
+ tsk.pyd=pyd
+ if self.install_path:
+ self.bld.install_files(os.path.dirname(pyd),pyobj,cwd=node.parent.get_bld(),relative_trick=True)
+class pyc(Task.Task):
+ color='PINK'
+ def run(self):
+ cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]+ ret=self.generator.bld.exec_command(cmd)
+ return ret
+class pyo(Task.Task):
+ color='PINK'
+ def run(self):
+ cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]+ ret=self.generator.bld.exec_command(cmd)
+ return ret
@feature('pyext') @before_method('propagate_uselib_vars','apply_link') @after_method('apply_bundle')@@ -131,7 +133,7 @@
self.fatal('The distutils module is unusable: install "python-devel"?')self.to_log(out)
return_values=[]
- for s in out.split('\n'):+ for s in out.splitlines():
s=s.strip()
if not s:
continue
@@ -144,33 +146,82 @@
else:break
return return_values
@conf
-def check_python_headers(conf):
+def python_cross_compile(self,features='pyembed pyext'):
+ features=Utils.to_list(features)
+ if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ):+ return False
+ for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+ if not x in self.environ:
+ self.fatal('Please set %s in the os environment'%x)+ else:
+ self.env[x]=self.environ[x]
+ xx=self.env.CXX_NAME and'cxx'or'c'
+ if'pyext'in features:
+ flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None))+ if flags is None:
+ self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')+ else:
+ self.parse_flags(flags,'PYEXT')
+ self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg='Testing pyext configuration',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
+ if'pyembed'in features:
+ flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None))+ if flags is None:
+ self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')+ else:
+ self.parse_flags(flags,'PYEMBED')
+ self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg='Testing pyembed configuration',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
+ return True
+@conf
+def check_python_headers(conf,features='pyembed pyext'):
+ features=Utils.to_list(features)
+ assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'"env=conf.env
if not env['CC_NAME']and not env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')+ if conf.python_cross_compile(features):
+ return
if not env['PYTHON_VERSION']:
conf.check_python_version()
- pybin=conf.env.PYTHON
+ pybin=env.PYTHON
if not pybin:
conf.fatal('Could not find the python executable')- v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
+ v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
try:
lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
- conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals)))+ conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals)))dct=dict(zip(v,lst))
x='MACOSX_DEPLOYMENT_TARGET'
if dct[x]:
- conf.env[x]=conf.environ[x]=dct[x]
+ env[x]=conf.environ[x]=dct[x]
env['pyext_PATTERN']='%s'+dct['SO']
+ num='.'.join(env['PYTHON_VERSION'].split('.')[:2])+ conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False)
+ if env.PYTHON_CONFIG:
+ all_flags=[['--cflags','--libs','--ldflags']]
+ if sys.hexversion<0x2070000:
+ all_flags=[[k]for k in all_flags[0]]
+ xx=env.CXX_NAME and'cxx'or'c'
+ if'pyembed'in features:
+ for flags in all_flags:
+ conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags)
+ conf.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
+ if'pyext'in features:
+ for flags in all_flags:
+ conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags)
+ conf.check(header_name='Python.h',define_name='HAVE_PYEXT',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
+ conf.define('HAVE_PYTHON_H',1)+ return
all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
conf.parse_flags(all_flags,'PYEMBED')
all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
conf.parse_flags(all_flags,'PYEXT')
result=None
- for name in('python'+env['PYTHON_VERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')):+ if not dct["LDVERSION"]:
+ dct["LDVERSION"]=env['PYTHON_VERSION']
+ for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')):if not result and env['LIBPATH_PYEMBED']:
path=env['LIBPATH_PYEMBED']
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)@@ -194,25 +245,12 @@
env.append_value('LIB_PYEMBED',[name])else:
conf.to_log("\n\n### LIB NOT FOUND\n")- if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']):+ if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
env['LIB_PYEXT']=env['LIB_PYEMBED']
- num='.'.join(env['PYTHON_VERSION'].split('.')[:2])- conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False)
- includes=[]
- if conf.env.PYTHON_CONFIG:
- for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split():
- if(incstr.startswith('-I')or incstr.startswith('/I')):- incstr=incstr[2:]
- if incstr not in includes:
- includes.append(incstr)
- conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,))- env['INCLUDES_PYEXT']=includes
- env['INCLUDES_PYEMBED']=includes
- else:
- conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],))- env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
- env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
+ conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],))+ env['INCLUDES_PYEXT']=[dct['INCLUDEPY']]
+ env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']]
if env['CC_NAME']=='gcc':
env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])@@ -226,17 +264,7 @@
env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)- try:
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(')- except conf.errors.ConfigurationError:
- xx=conf.env.CXX_NAME and'cxx'or'c'
- flags=['--cflags','--libs','--ldflags']
- for f in flags:
- conf.check_cfg(msg='Asking python-config for pyembed %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=[f])
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx))
- for f in flags:
- conf.check_cfg(msg='Asking python-config for pyext %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=[f])
- conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions')
+ conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!')
@conf
def check_python_version(conf,minver=None):
assert minver is None or isinstance(minver,tuple)
@@ -252,23 +280,27 @@
if result:
pyver='.'.join([str(x)for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION']=pyver
- if'PYTHONDIR'in conf.environ:
+ if'PYTHONDIR'in conf.env:
+ pydir=conf.env['PYTHONDIR']
+ elif'PYTHONDIR'in conf.environ:
pydir=conf.environ['PYTHONDIR']
else:
if Utils.is_win32:
- (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])+ (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"])else:
python_LIBDEST=None
- (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+ (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver)
else:
python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver)
- if'PYTHONARCHDIR'in conf.environ:
+ if'PYTHONARCHDIR'in conf.env:
+ pyarchdir=conf.env['PYTHONARCHDIR']
+ elif'PYTHONARCHDIR'in conf.environ:
pyarchdir=conf.environ['PYTHONARCHDIR']
else:
- (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']])
+ (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0) or ''"])
if not pyarchdir:
pyarchdir=pydir
if hasattr(conf,'define'):
@@ -288,13 +320,13 @@
import %s as current_module
version = getattr(current_module, '__version__', None)
if version is not None:
- print(str(version))
+ print(str(version))
else:
- print('unknown version')+ print('unknown version')'''
@conf
def check_python_module(conf,module_name,condition=''):
- msg='Python module %s'%module_name
+ msg="Checking for python module '%s'"%module_name
if condition:
msg='%s (%s)'%(msg,condition)
conf.start_msg(msg)
@@ -324,20 +356,25 @@
else:
conf.end_msg(ret)
def configure(conf):
- try:
- conf.find_program('python',var='PYTHON')- except conf.errors.ConfigurationError:
- Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable)- conf.env.PYTHON=sys.executable
- if conf.env.PYTHON!=sys.executable:
- Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable))- conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON)
v=conf.env
- v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
+ v['PYTHON']=Options.options.python or os.environ.get('PYTHON',sys.executable)+ if Options.options.pythondir:
+ v['PYTHONDIR']=Options.options.pythondir
+ if Options.options.pythonarchdir:
+ v['PYTHONARCHDIR']=Options.options.pythonarchdir
+ conf.find_program('python',var='PYTHON')v['PYFLAGS']=''
v['PYFLAGS_OPT']='-O'
v['PYC']=getattr(Options.options,'pyc',1)
v['PYO']=getattr(Options.options,'pyo',1)
+ try:
+ v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip()
+ except Errors.WafError:
+ pass
def options(opt):
- opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc')- opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo')+ pyopt=opt.add_option_group("Python Options")+ pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]')+ pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]')+ pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable)+ pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)')+ pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')--- a/waflib/Tools/qt4.py
+++ b/waflib/Tools/qt4.py
@@ -11,8 +11,8 @@
else:
has_xml=True
import os,sys
-from waflib.Tools import c_preproc,cxx
-from waflib import Task,Utils,Options,Errors
+from waflib.Tools import cxx
+from waflib import Task,Utils,Options,Errors,Context
from waflib.TaskGen import feature,after_method,extension
from waflib.Configure import conf
from waflib import Logs
@@ -25,17 +25,6 @@
def __init__(self,*k,**kw):
Task.Task.__init__(self,*k,**kw)
self.moc_done=0
- def scan(self):
- (nodes,names)=c_preproc.scan(self)
- lst=[]
- for x in nodes:
- if x.name.endswith('.moc'):- s=x.path_from(self.inputs[0].parent.get_bld())
- if s not in names:
- names.append(s)
- else:
- lst.append(x)
- return(lst,names)
def runnable_status(self):
if self.moc_done:
return Task.Task.runnable_status(self)
@@ -56,10 +45,20 @@
tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator)
tsk.set_inputs(h_node)
tsk.set_outputs(m_node)
+ if self.generator:
+ self.generator.tasks.append(tsk)
gen=self.generator.bld.producer
gen.outstanding.insert(0,tsk)
gen.total+=1
return tsk
+ def moc_h_ext(self):
+ try:
+ ext=Options.options.qt_header_ext.split()
+ except AttributeError:
+ pass
+ if not ext:
+ ext=MOC_H
+ return ext
def add_moc_tasks(self):
node=self.inputs[0]
bld=self.generator.bld
@@ -69,27 +68,19 @@
pass
else:
delattr(self,'cache_sig')
+ include_nodes=[node.parent]+self.generator.includes_nodes
moctasks=[]
- mocfiles=[]
- try:
- tmp_lst=bld.raw_deps[self.uid()]
- bld.raw_deps[self.uid()]=[]
- except KeyError:
- tmp_lst=[]
- for d in tmp_lst:
+ mocfiles=set([])
+ for d in bld.raw_deps.get(self.uid(),[]):
if not d.endswith('.moc'):continue
if d in mocfiles:
- Logs.error("paranoia owns")continue
- mocfiles.append(d)
+ mocfiles.add(d)
h_node=None
- try:ext=Options.options.qt_header_ext.split()
- except AttributeError:pass
- if not ext:ext=MOC_H
base2=d[:-4]
- for x in[node.parent]+self.generator.includes_nodes:
- for e in ext:
+ for x in include_nodes:
+ for e in self.moc_h_ext():
h_node=x.find_node(base2+e)
if h_node:
break
@@ -99,28 +90,19 @@
else:
for k in EXT_QT4:
if base2.endswith(k):
- for x in[node.parent]+self.generator.includes_nodes:
+ for x in include_nodes:
h_node=x.find_node(base2)
if h_node:
break
- if h_node:
- m_node=h_node.change_ext(k+'.moc')
- break
+ if h_node:
+ m_node=h_node.change_ext(k+'.moc')
+ break
if not h_node:
- raise Errors.WafError('no header found for %r which is a moc file'%d)- bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node
+ raise Errors.WafError('No source found for %r which is a moc file'%d)task=self.create_moc_task(h_node,m_node)
moctasks.append(task)
- tmp_lst=bld.raw_deps[self.uid()]=mocfiles
- lst=bld.node_deps.get(self.uid(),())
- for d in lst:
- name=d.name
- if name.endswith('.moc'):- task=self.create_moc_task(bld.node_deps[(self.inputs[0].parent.abspath(),name)],d)
- moctasks.append(task)
self.run_after.update(set(moctasks))
self.moc_done=1
- run=Task.classes['cxx'].__dict__['run']
class trans_update(Task.Task):
run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'color='BLUE'
@@ -140,7 +122,7 @@
@extension(*EXT_RCC)
def create_rcc_task(self,node):
rcnode=node.change_ext('_rc.cpp')- rcctask=self.create_task('rcc',node,rcnode)+ self.create_task('rcc',node,rcnode) cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))try:
self.compiled_tasks.append(cpptask)
@@ -179,7 +161,7 @@
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag)<2:continue
f=flag[0:2]
- if f in['-D','-I','/D','/I']:
+ if f in('-D','-I','/D','/I'):if(f[0]=='/'):
lst.append('-'+flag[1:])else:
@@ -190,10 +172,11 @@
return self.create_compiled_task('qxx',node)class rcc(Task.Task):
color='BLUE'
- run_str='${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'+ run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'ext_out=['.h']
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
def scan(self):
- node=self.inputs[0]
if not has_xml:
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')return([],[])
@@ -216,6 +199,10 @@
class moc(Task.Task):
color='BLUE'
run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'+ def keyword(self):
+ return"Creating"
+ def __str__(self):
+ return self.outputs[0].path_from(self.generator.bld.launch_node())
class ui4(Task.Task):
color='BLUE'
run_str='${QT_UIC} ${SRC} -o ${TGT}'@@ -267,7 +254,7 @@
paths.append(qtbin)
cand=None
prev_ver=['4','0','0']
- for qmk in['qmake-qt4','qmake4','qmake']:
+ for qmk in('qmake-qt4','qmake4','qmake'):try:
qmake=self.find_program(qmk,path_list=paths)
except self.errors.ConfigurationError:
@@ -274,7 +261,7 @@
pass
else:
try:
- version=self.cmd_and_log([qmake,'-query','QT_VERSION']).strip()
+ version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip()
except self.errors.WafError:
pass
else:
@@ -287,7 +274,7 @@
self.env.QMAKE=cand
else:
self.fatal('Could not find qmake for qt4')- qtbin=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_BINS']).strip()+os.sep
+ qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep
def find_bin(lst,var):
if var in env:
return
@@ -301,18 +288,17 @@
break
find_bin(['uic-qt3','uic3'],'QT_UIC3')
find_bin(['uic-qt4','uic'],'QT_UIC')
- if not env['QT_UIC']:
+ if not env.QT_UIC:
self.fatal('cannot find the uic compiler for qt4')- try:
- uicver=self.cmd_and_log(env['QT_UIC']+" -version 2>&1").strip()
- except self.errors.ConfigurationError:
- self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')+ self.start_msg('Checking for uic version')+ uicver=self.cmd_and_log(env.QT_UIC+["-version"],output=Context.BOTH)
+ uicver=''.join(uicver).strip()
uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')- self.msg('Checking for uic version','%s'%uicver)+ self.end_msg(uicver)
if uicver.find(' 3.')!=-1: self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')find_bin(['moc-qt4','moc'],'QT_MOC')
- find_bin(['rcc'],'QT_RCC')
+ find_bin(['rcc-qt4','rcc'],'QT_RCC')
find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE')
find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE')
env['UIC3_ST']='%s -o %s'
@@ -327,12 +313,12 @@
qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None)if not qtlibs:
try:
- qtlibs=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_LIBS']).strip()
+ qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip()
except Errors.WafError:
- qtdir=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_PREFIX']).strip()+os.sep
+ qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep
qtlibs=os.path.join(qtdir,'lib')
self.msg('Found the Qt4 libraries in',qtlibs)- qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_HEADERS']).strip()+ qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip()env=self.env
if not'PKG_CONFIG_PATH'in os.environ:
os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs)
--- /dev/null
+++ b/waflib/Tools/qt5.py
@@ -1,0 +1,488 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
+
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml=False
+ ContentHandler=object
+else:
+ has_xml=True
+import os,sys
+from waflib.Tools import cxx
+from waflib import Task,Utils,Options,Errors,Context
+from waflib.TaskGen import feature,after_method,extension
+from waflib.Configure import conf
+from waflib import Logs
+MOC_H=['.h','.hpp','.hxx','.hh']
+EXT_RCC=['.qrc']
+EXT_UI=['.ui']
+EXT_QT5=['.cpp','.cc','.cxx','.C']
+QT5_LIBS='''
+qtmain
+Qt5Bluetooth
+Qt5CLucene
+Qt5Concurrent
+Qt5Core
+Qt5DBus
+Qt5Declarative
+Qt5DesignerComponents
+Qt5Designer
+Qt5Gui
+Qt5Help
+Qt5MultimediaQuick_p
+Qt5Multimedia
+Qt5MultimediaWidgets
+Qt5Network
+Qt5Nfc
+Qt5OpenGL
+Qt5Positioning
+Qt5PrintSupport
+Qt5Qml
+Qt5QuickParticles
+Qt5Quick
+Qt5QuickTest
+Qt5Script
+Qt5ScriptTools
+Qt5Sensors
+Qt5SerialPort
+Qt5Sql
+Qt5Svg
+Qt5Test
+Qt5WebKit
+Qt5WebKitWidgets
+Qt5Widgets
+Qt5WinExtras
+Qt5X11Extras
+Qt5XmlPatterns
+Qt5Xml'''
+class qxx(Task.classes['cxx']):
+ def __init__(self,*k,**kw):
+ Task.Task.__init__(self,*k,**kw)
+ self.moc_done=0
+ def runnable_status(self):
+ if self.moc_done:
+ return Task.Task.runnable_status(self)
+ else:
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ self.add_moc_tasks()
+ return Task.Task.runnable_status(self)
+ def create_moc_task(self,h_node,m_node):
+ try:
+ moc_cache=self.generator.bld.moc_cache
+ except AttributeError:
+ moc_cache=self.generator.bld.moc_cache={}+ try:
+ return moc_cache[h_node]
+ except KeyError:
+ tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator)
+ tsk.set_inputs(h_node)
+ tsk.set_outputs(m_node)
+ if self.generator:
+ self.generator.tasks.append(tsk)
+ gen=self.generator.bld.producer
+ gen.outstanding.insert(0,tsk)
+ gen.total+=1
+ return tsk
+ else:
+ delattr(self,'cache_sig')
+ def moc_h_ext(self):
+ try:
+ ext=Options.options.qt_header_ext.split()
+ except AttributeError:
+ pass
+ if not ext:
+ ext=MOC_H
+ return ext
+ def add_moc_tasks(self):
+ node=self.inputs[0]
+ bld=self.generator.bld
+ try:
+ self.signature()
+ except KeyError:
+ pass
+ else:
+ delattr(self,'cache_sig')
+ include_nodes=[node.parent]+self.generator.includes_nodes
+ moctasks=[]
+ mocfiles=set([])
+ for d in bld.raw_deps.get(self.uid(),[]):
+ if not d.endswith('.moc'):+ continue
+ if d in mocfiles:
+ continue
+ mocfiles.add(d)
+ h_node=None
+ base2=d[:-4]
+ for x in include_nodes:
+ for e in self.moc_h_ext():
+ h_node=x.find_node(base2+e)
+ if h_node:
+ break
+ if h_node:
+ m_node=h_node.change_ext('.moc')+ break
+ else:
+ for k in EXT_QT5:
+ if base2.endswith(k):
+ for x in include_nodes:
+ h_node=x.find_node(base2)
+ if h_node:
+ break
+ if h_node:
+ m_node=h_node.change_ext(k+'.moc')
+ break
+ if not h_node:
+ raise Errors.WafError('No source found for %r which is a moc file'%d)+ task=self.create_moc_task(h_node,m_node)
+ moctasks.append(task)
+ self.run_after.update(set(moctasks))
+ self.moc_done=1
+class trans_update(Task.Task):
+ run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'+ color='BLUE'
+Task.update_outputs(trans_update)
+class XMLHandler(ContentHandler):
+ def __init__(self):
+ self.buf=[]
+ self.files=[]
+ def startElement(self,name,attrs):
+ if name=='file':
+ self.buf=[]
+ def endElement(self,name):
+ if name=='file':
+ self.files.append(str(''.join(self.buf)))+ def characters(self,cars):
+ self.buf.append(cars)
+@extension(*EXT_RCC)
+def create_rcc_task(self,node):
+ rcnode=node.change_ext('_rc.cpp')+ self.create_task('rcc',node,rcnode)+ cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))+ try:
+ self.compiled_tasks.append(cpptask)
+ except AttributeError:
+ self.compiled_tasks=[cpptask]
+ return cpptask
+@extension(*EXT_UI)
+def create_uic_task(self,node):
+ uictask=self.create_task('ui5',node)+ uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])]
+@extension('.ts')+def add_lang(self,node):
+ self.lang=self.to_list(getattr(self,'lang',[]))+[node]
+@feature('qt5')+@after_method('apply_link')+def apply_qt5(self):
+ if getattr(self,'lang',None):
+ qmtasks=[]
+ for x in self.to_list(self.lang):
+ if isinstance(x,str):
+ x=self.path.find_resource(x+'.ts')
+ qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm')))+ if getattr(self,'update',None)and Options.options.trans_qt5:
+ cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')]+ for x in qmtasks:
+ self.create_task('trans_update',cxxnodes,x.inputs)+ if getattr(self,'langname',None):
+ qmnodes=[x.outputs[0]for x in qmtasks]
+ rcnode=self.langname
+ if isinstance(rcnode,str):
+ rcnode=self.path.find_or_declare(rcnode+'.qrc')
+ t=self.create_task('qm2rcc',qmnodes,rcnode)+ k=create_rcc_task(self,t.outputs[0])
+ self.link_task.inputs.append(k.outputs[0])
+ lst=[]
+ for flag in self.to_list(self.env['CXXFLAGS']):
+ if len(flag)<2:continue
+ f=flag[0:2]
+ if f in('-D','-I','/D','/I'):+ if(f[0]=='/'):
+ lst.append('-'+flag[1:])+ else:
+ lst.append(flag)
+ self.env.append_value('MOC_FLAGS',lst)+@extension(*EXT_QT5)
+def cxx_hook(self,node):
+ return self.create_compiled_task('qxx',node)+class rcc(Task.Task):
+ color='BLUE'
+ run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'+ ext_out=['.h']
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
+ def scan(self):
+ if not has_xml:
+ Logs.error('no xml support was found, the rcc dependencies will be incomplete!')+ return([],[])
+ parser=make_parser()
+ curHandler=XMLHandler()
+ parser.setContentHandler(curHandler)
+ fi=open(self.inputs[0].abspath(),'r')
+ try:
+ parser.parse(fi)
+ finally:
+ fi.close()
+ nodes=[]
+ names=[]
+ root=self.inputs[0].parent
+ for x in curHandler.files:
+ nd=root.find_resource(x)
+ if nd:nodes.append(nd)
+ else:names.append(x)
+ return(nodes,names)
+class moc(Task.Task):
+ color='BLUE'
+ run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'+class ui5(Task.Task):
+ color='BLUE'
+ run_str='${QT_UIC} ${SRC} -o ${TGT}'+ ext_out=['.h']
+class ts2qm(Task.Task):
+ color='BLUE'
+ run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'+class qm2rcc(Task.Task):
+ color='BLUE'
+ after='ts2qm'
+ def run(self):
+ txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
+ code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
+ self.outputs[0].write(code)
+def configure(self):
+ self.find_qt5_binaries()
+ self.set_qt5_libs_to_check()
+ self.set_qt5_defines()
+ self.find_qt5_libraries()
+ self.add_qt5_rpath()
+ self.simplify_qt5_libs()
+@conf
+def find_qt5_binaries(self):
+ env=self.env
+ opt=Options.options
+ qtdir=getattr(opt,'qtdir','')
+ qtbin=getattr(opt,'qtbin','')
+ paths=[]
+ if qtdir:
+ qtbin=os.path.join(qtdir,'bin')
+ if not qtdir:
+ qtdir=os.environ.get('QT5_ROOT','')+ qtbin=os.environ.get('QT5_BIN',None)or os.path.join(qtdir,'bin')+ if qtbin:
+ paths=[qtbin]
+ if not qtdir:
+ paths=os.environ.get('PATH','').split(os.pathsep)+ paths.append('/usr/share/qt5/bin/')+ try:
+ lst=Utils.listdir('/usr/local/Trolltech/')+ except OSError:
+ pass
+ else:
+ if lst:
+ lst.sort()
+ lst.reverse()
+ qtdir='/usr/local/Trolltech/%s/'%lst[0]
+ qtbin=os.path.join(qtdir,'bin')
+ paths.append(qtbin)
+ cand=None
+ prev_ver=['5','0','0']
+ for qmk in('qmake-qt5','qmake5','qmake'):+ try:
+ qmake=self.find_program(qmk,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ try:
+ version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip()
+ except self.errors.WafError:
+ pass
+ else:
+ if version:
+ new_ver=version.split('.')+ if new_ver>prev_ver:
+ cand=qmake
+ prev_ver=new_ver
+ if not cand:
+ try:
+ self.find_program('qtchooser')+ except self.errors.ConfigurationError:
+ pass
+ else:
+ cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake']
+ try:
+ version=self.cmd_and_log(cmd+['-query','QT_VERSION'])
+ except self.errors.WafError:
+ pass
+ else:
+ cand=cmd
+ if cand:
+ self.env.QMAKE=cand
+ else:
+ self.fatal('Could not find qmake for qt5')+ self.env.QT_INSTALL_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep
+ paths.insert(0,qtbin)
+ def find_bin(lst,var):
+ if var in env:
+ return
+ for f in lst:
+ try:
+ ret=self.find_program(f,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ env[var]=ret
+ break
+ find_bin(['uic-qt5','uic'],'QT_UIC')
+ if not env.QT_UIC:
+ self.fatal('cannot find the uic compiler for qt5')+ self.start_msg('Checking for uic version')+ uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH)
+ uicver=''.join(uicver).strip()
+ uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')+ self.end_msg(uicver)
+ if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1:+ self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path')+ find_bin(['moc-qt5','moc'],'QT_MOC')
+ find_bin(['rcc-qt5','rcc'],'QT_RCC')
+ find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE')
+ find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE')
+ env['UIC_ST']='%s -o %s'
+ env['MOC_ST']='-o'
+ env['ui_PATTERN']='ui_%s.h'
+ env['QT_LRELEASE_FLAGS']=['-silent']
+ env.MOCCPPPATH_ST='-I%s'
+ env.MOCDEFINES_ST='-D%s'
+@conf
+def find_qt5_libraries(self):
+ qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT5_LIBDIR",None)+ if not qtlibs:
+ try:
+ qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip()
+ except Errors.WafError:
+ qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep
+ qtlibs=os.path.join(qtdir,'lib')
+ self.msg('Found the Qt5 libraries in',qtlibs)+ qtincludes=os.environ.get("QT5_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip()+ env=self.env
+ if not'PKG_CONFIG_PATH'in os.environ:
+ os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(qtlibs,qtlibs)
+ try:
+ if os.environ.get("QT5_XCOMPILE",None):+ raise self.errors.ConfigurationError()
+ self.check_cfg(atleast_pkgconfig_version='0.1')
+ except self.errors.ConfigurationError:
+ for i in self.qt5_vars:
+ uselib=i.upper()
+ if Utils.unversioned_sys_platform()=="darwin":
+ frameworkName=i+".framework"
+ qtDynamicLib=os.path.join(qtlibs,frameworkName,i)
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('FRAMEWORK_'+uselib,i)+ self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers'))+ elif env.DEST_OS!="win32":
+ qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so")
+ qtStaticLib=os.path.join(qtlibs,"lib"+i+".a")
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('LIB_'+uselib,i)+ self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')+ elif os.path.exists(qtStaticLib):
+ env.append_unique('LIB_'+uselib,i)+ self.msg('Checking for %s'%i,qtStaticLib,'GREEN')+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')+ env.append_unique('LIBPATH_'+uselib,qtlibs)+ env.append_unique('INCLUDES_'+uselib,qtincludes)+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))+ else:
+ for k in("lib%s.a","lib%s5.a","%s.lib","%s5.lib"):+ lib=os.path.join(qtlibs,k%i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])+ self.msg('Checking for %s'%i,lib,'GREEN')+ break
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')+ env.append_unique('LIBPATH_'+uselib,qtlibs)+ env.append_unique('INCLUDES_'+uselib,qtincludes)+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))+ uselib=i.upper()+"_debug"
+ for k in("lib%sd.a","lib%sd5.a","%sd.lib","%sd5.lib"):+ lib=os.path.join(qtlibs,k%i)
+ if os.path.exists(lib):
+ env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')])+ self.msg('Checking for %s'%i,lib,'GREEN')+ break
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')+ env.append_unique('LIBPATH_'+uselib,qtlibs)+ env.append_unique('INCLUDES_'+uselib,qtincludes)+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i))+ else:
+ for i in self.qt5_vars_debug+self.qt5_vars:
+ self.check_cfg(package=i,args='--cflags --libs',mandatory=False)
+@conf
+def simplify_qt5_libs(self):
+ env=self.env
+ def process_lib(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ if var=='QTCORE':
+ continue
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if lib in core:
+ continue
+ accu.append(lib)
+ env['LIBPATH_'+var]=accu
+ process_lib(self.qt5_vars,'LIBPATH_QTCORE')
+ process_lib(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG')
+@conf
+def add_qt5_rpath(self):
+ env=self.env
+ if getattr(Options.options,'want_rpath',False):
+ def process_rpath(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if var!='QTCORE':
+ if lib in core:
+ continue
+ accu.append('-Wl,--rpath='+lib)+ env['RPATH_'+var]=accu
+ process_rpath(self.qt5_vars,'LIBPATH_QTCORE')
+ process_rpath(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG')
+@conf
+def set_qt5_libs_to_check(self):
+ if not hasattr(self,'qt5_vars'):
+ self.qt5_vars=QT5_LIBS
+ self.qt5_vars=Utils.to_list(self.qt5_vars)
+ if not hasattr(self,'qt5_vars_debug'):
+ self.qt5_vars_debug=[a+'_debug'for a in self.qt5_vars]
+ self.qt5_vars_debug=Utils.to_list(self.qt5_vars_debug)
+@conf
+def set_qt5_defines(self):
+ if sys.platform!='win32':
+ return
+ for x in self.qt5_vars:
+ y=x[2:].upper()
+ self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y)+ self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y)+def options(opt):
+ opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries')+ opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext')+ for i in'qtdir qtbin qtlibs'.split():
+ opt.add_option('--'+i,type='string',default='',dest=i)+ opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt5",default=False)--- a/waflib/Tools/ruby.py
+++ b/waflib/Tools/ruby.py
@@ -27,7 +27,7 @@
self.find_program('ruby',var='RUBY')ruby=self.env.RUBY
try:
- version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+ version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except Exception:
self.fatal('could not determine ruby version')self.env.RUBY_VERSION=version
@@ -51,9 +51,9 @@
self.fatal('load a c/c++ compiler first') version=tuple(map(int,self.env.RUBY_VERSION.split(".")))def read_out(cmd):
- return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd]))
+ return Utils.to_list(self.cmd_and_log(self.env.RUBY+['-rrbconfig','-e',cmd]))
def read_config(key):
- return read_out('puts Config::CONFIG[%r]'%key)+ return read_out('puts RbConfig::CONFIG[%r]'%key)ruby=self.env['RUBY']
archdir=read_config('archdir')cpppath=archdir
@@ -87,7 +87,7 @@
def check_ruby_module(self,module_name):
self.start_msg('Ruby module %s'%module_name)try:
- self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name])
+ self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name])
except Exception:
self.end_msg(False)
self.fatal('Could not find the ruby module %r'%module_name)--- a/waflib/Tools/suncc.py
+++ b/waflib/Tools/suncc.py
@@ -2,7 +2,6 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os
from waflib import Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@@ -9,18 +8,12 @@
@conf
def find_scc(conf):
v=conf.env
- cc=None
- if v['CC']:cc=v['CC']
- elif'CC'in conf.environ:cc=conf.environ['CC']
- if not cc:cc=conf.find_program('cc',var='CC')- if not cc:conf.fatal('Could not find a Sun C compiler')- cc=conf.cmd_to_list(cc)
+ cc=conf.find_program('cc',var='CC')try:
conf.cmd_and_log(cc+['-flags'])
except Exception:
conf.fatal('%r is not a Sun compiler'%cc)- v['CC']=cc
- v['CC_NAME']='sun'
+ v.CC_NAME='sun'
conf.get_suncc_version(cc)
@conf
def scc_common_flags(conf):
--- a/waflib/Tools/suncxx.py
+++ b/waflib/Tools/suncxx.py
@@ -2,7 +2,6 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os
from waflib import Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@@ -9,19 +8,12 @@
@conf
def find_sxx(conf):
v=conf.env
- cc=None
- if v['CXX']:cc=v['CXX']
- elif'CXX'in conf.environ:cc=conf.environ['CXX']
- if not cc:cc=conf.find_program('CC',var='CXX')- if not cc:cc=conf.find_program('c++',var='CXX')- if not cc:conf.fatal('Could not find a Sun C++ compiler')- cc=conf.cmd_to_list(cc)
+ cc=conf.find_program(['CC','c++'],var='CXX')
try:
conf.cmd_and_log(cc+['-flags'])
except Exception:
conf.fatal('%r is not a Sun compiler'%cc)- v['CXX']=cc
- v['CXX_NAME']='sun'
+ v.CXX_NAME='sun'
conf.get_suncc_version(cc)
@conf
def sxx_common_flags(conf):
--- a/waflib/Tools/tex.py
+++ b/waflib/Tools/tex.py
@@ -3,7 +3,7 @@
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,re
-from waflib import Utils,Task,Errors,Logs
+from waflib import Utils,Task,Errors,Logs,Node
from waflib.TaskGen import feature,before_method
re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
def bibunitscan(self):
@@ -14,7 +14,7 @@
for match in re_bibunit.finditer(code):
path=match.group('file')if path:
- for k in['','.bib']:
+ for k in('','.bib'): Logs.debug('tex: trying %s%s'%(path,k))fi=node.parent.find_resource(path+k)
if fi:
@@ -23,10 +23,11 @@
Logs.debug('tex: could not find %s'%path) Logs.debug("tex: found the following bibunit files: %s"%nodes)return nodes
-exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps']
+exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty']
exts_tex=['.ltx','.tex']
-re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)+re_tex=re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M) g_bibtex_re=re.compile('bibdata',re.M)+g_glossaries_re=re.compile('\\@newglossary',re.M)class tex(Task.Task):
bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)bibtex_fun.__doc__="""
@@ -36,8 +37,13 @@
makeindex_fun.__doc__="""
Execute the program **makeindex**
"""
+ makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False)+ makeglossaries_fun.__doc__="""
+ Execute the program **makeglossaries**
+ """
def exec_command(self,cmd,**kw):
bld=self.generator.bld
+ Logs.info('runner: %r'%cmd)try:
if not kw.get('cwd',None):kw['cwd']=bld.cwd
@@ -71,13 +77,23 @@
code=node.read()
global re_tex
for match in re_tex.finditer(code):
+ multibib=match.group('type')+ if multibib and multibib.startswith('bibliography'):+ multibib=multibib[len('bibliography'):]+ if multibib.startswith('style'):+ continue
+ else:
+ multibib=None
for path in match.group('file').split(','):if path:
add_name=True
found=None
for k in exts_deps_tex:
- Logs.debug('tex: trying %s%s'%(path,k))- found=node.parent.find_resource(path+k)
+ for up in self.texinputs_nodes:
+ Logs.debug('tex: trying %s%s'%(path,k))+ found=up.find_resource(path+k)
+ if found:
+ break
for tsk in self.generator.tasks:
if not found or found in tsk.outputs:
break
@@ -88,6 +104,11 @@
if found.name.endswith(ext):
parse_node(found)
break
+ if found and multibib and found.name.endswith('.bib'):+ try:
+ self.multibibs.append(found)
+ except AttributeError:
+ self.multibibs=[found]
if add_name:
names.append(path)
parse_node(node)
@@ -102,16 +123,22 @@
for aux_node in self.aux_nodes:
try:
ct=aux_node.read()
- except(OSError,IOError):
+ except EnvironmentError:
Logs.error('Error reading %s: %r'%aux_node.abspath())continue
if g_bibtex_re.findall(ct):
- Logs.warn('calling bibtex')+ Logs.info('calling bibtex') self.env.env={}self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS})+ self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})self.env.SRCFILE=aux_node.name[:-4]
self.check_status('error when calling bibtex',self.bibtex_fun())+ for node in getattr(self,'multibibs',[]):
+ self.env.env={}+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})+ self.env.SRCFILE=node.name[:-4]
+ self.check_status('error when calling bibtex',self.bibtex_fun())def bibunits(self):
try:
bibunits=bibunitscan(self)
@@ -119,21 +146,22 @@
Logs.error('error bibunitscan')else:
if bibunits:
- fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)]
+ fn=['bu'+str(i)for i in range(1,len(bibunits)+1)]
if fn:
- Logs.warn('calling bibtex on bibunits')+ Logs.info('calling bibtex on bibunits')for f in fn:
- self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}+ self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}self.env.SRCFILE=f
self.check_status('error when calling bibtex',self.bibtex_fun())def makeindex(self):
+ self.idx_node=self.inputs[0].change_ext('.idx')try:
idx_path=self.idx_node.abspath()
os.stat(idx_path)
except OSError:
- Logs.warn('index file %s absent, not calling makeindex'%idx_path)+ Logs.info('index file %s absent, not calling makeindex'%idx_path)else:
- Logs.warn('calling makeindex')+ Logs.info('calling makeindex')self.env.SRCFILE=self.idx_node.name
self.env.env={} self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun())@@ -141,6 +169,25 @@
p=self.inputs[0].parent.get_bld()
if os.path.exists(os.path.join(p.abspath(),'btaux.aux')):
self.aux_nodes+=p.ant_glob('*[0-9].aux')+ def makeglossaries(self):
+ src_file=self.inputs[0].abspath()
+ base_file=os.path.basename(src_file)
+ base,_=os.path.splitext(base_file)
+ for aux_node in self.aux_nodes:
+ try:
+ ct=aux_node.read()
+ except EnvironmentError:
+ Logs.error('Error reading %s: %r'%aux_node.abspath())+ continue
+ if g_glossaries_re.findall(ct):
+ if not self.env.MAKEGLOSSARIES:
+ raise Errors.WafError("The program 'makeglossaries' is missing!")+ Logs.warn('calling makeglossaries')+ self.env.SRCFILE=base
+ self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun())+ return
+ def texinputs(self):
+ return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep
def run(self):
env=self.env
if not env['PROMPT_LATEX']:
@@ -147,41 +194,40 @@
env.append_value('LATEXFLAGS','-interaction=batchmode') env.append_value('PDFLATEXFLAGS','-interaction=batchmode') env.append_value('XELATEXFLAGS','-interaction=batchmode')- fun=self.texfun
- node=self.inputs[0]
- srcfile=node.abspath()
- texinputs=self.env.TEXINPUTS or''
- self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep
self.cwd=self.inputs[0].parent.get_bld().abspath()
- Logs.warn('first pass on %s'%self.__class__.__name__)- self.env.env={}- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS':self.TEXINPUTS})- self.env.SRCFILE=srcfile
- self.check_status('error when calling latex',fun())- self.aux_nodes=self.scan_aux(node.change_ext('.aux'))- self.idx_node=node.change_ext('.idx')+ Logs.info('first pass on %s'%self.__class__.__name__)+ cur_hash=self.hash_aux_nodes()
+ self.call_latex()
+ self.hash_aux_nodes()
self.bibtopic()
self.bibfile()
self.bibunits()
self.makeindex()
- hash=''
+ self.makeglossaries()
for i in range(10):
- prev_hash=hash
- try:
- hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes]
- hash=Utils.h_list(hashes)
- except(OSError,IOError):
- Logs.error('could not read aux.h')- pass
- if hash and hash==prev_hash:
+ prev_hash=cur_hash
+ cur_hash=self.hash_aux_nodes()
+ if not cur_hash:
+ Logs.error('No aux.h to process')+ if cur_hash and cur_hash==prev_hash:
break
- Logs.warn('calling %s'%self.__class__.__name__)- self.env.env={}- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS':self.TEXINPUTS})- self.env.SRCFILE=srcfile
- self.check_status('error when calling %s'%self.__class__.__name__,fun())+ Logs.info('calling %s'%self.__class__.__name__)+ self.call_latex()
+ def hash_aux_nodes(self):
+ try:
+ nodes=self.aux_nodes
+ except AttributeError:
+ try:
+ self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux'))+ except IOError:
+ return None
+ return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes])
+ def call_latex(self):
+ self.env.env={}+ self.env.env.update(os.environ)
+ self.env.env.update({'TEXINPUTS':self.texinputs()})+ self.env.SRCFILE=self.inputs[0].abspath()
+ self.check_status('error when calling latex',self.texfun())class latex(tex):
texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)class pdflatex(tex):
@@ -203,7 +249,7 @@
@feature('tex') @before_method('process_source')def apply_tex(self):
- if not getattr(self,'type',None)in['latex','pdflatex','xelatex']:
+ if not getattr(self,'type',None)in('latex','pdflatex','xelatex'):self.type='pdflatex'
tree=self.bld
outs=Utils.to_list(getattr(self,'outs',[]))
@@ -211,12 +257,15 @@
deps_lst=[]
if getattr(self,'deps',None):
deps=self.to_list(self.deps)
- for filename in deps:
- n=self.path.find_resource(filename)
- if not n:
- self.bld.fatal('Could not find %r for %r'%(filename,self))- if not n in deps_lst:
- deps_lst.append(n)
+ for dep in deps:
+ if isinstance(dep,str):
+ n=self.path.find_resource(dep)
+ if not n:
+ self.bld.fatal('Could not find %r for %r'%(dep,self))+ if not n in deps_lst:
+ deps_lst.append(n)
+ elif isinstance(dep,Node.Node):
+ deps_lst.append(dep)
for node in self.to_nodes(self.source):
if self.type=='latex':
task=self.create_task('latex',node,node.change_ext('.dvi'))@@ -226,23 +275,35 @@
task=self.create_task('xelatex',node,node.change_ext('.pdf'))task.env=self.env
if deps_lst:
- try:
- lst=tree.node_deps[task.uid()]
- for n in deps_lst:
- if not n in lst:
- lst.append(n)
- except KeyError:
- tree.node_deps[task.uid()]=deps_lst
- v=dict(os.environ)
- p=node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()+os.pathsep+v.get('TEXINPUTS','')+os.pathsep- v['TEXINPUTS']=p
+ for n in deps_lst:
+ if not n in task.dep_nodes:
+ task.dep_nodes.append(n)
+ if hasattr(self,'texinputs_nodes'):
+ task.texinputs_nodes=self.texinputs_nodes
+ else:
+ task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()]
+ lst=os.environ.get('TEXINPUTS','')+ if self.env.TEXINPUTS:
+ lst+=os.pathsep+self.env.TEXINPUTS
+ if lst:
+ lst=lst.split(os.pathsep)
+ for x in lst:
+ if x:
+ if os.path.isabs(x):
+ p=self.bld.root.find_node(x)
+ if p:
+ task.texinputs_nodes.append(p)
+ else:
+ Logs.error('Invalid TEXINPUTS folder %s'%x)+ else:
+ Logs.error('Cannot resolve relative paths in TEXINPUTS %s'%x)if self.type=='latex':
if'ps'in outs:
tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))- tsk.env.env=dict(v)
+ tsk.env.env=dict(os.environ)
if'pdf'in outs:
tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))- tsk.env.env=dict(v)
+ tsk.env.env=dict(os.environ)
elif self.type=='pdflatex':
if'ps'in outs:
self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))@@ -249,7 +310,7 @@
self.source=[]
def configure(self):
v=self.env
- for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+ for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
try:
self.find_program(p,var=p.upper())
except self.errors.ConfigurationError:
--- a/waflib/Tools/vala.py
+++ b/waflib/Tools/vala.py
@@ -10,7 +10,7 @@
vars=["VALAC","VALAC_VERSION","VALAFLAGS"]
ext_out=['.h']
def run(self):
- cmd=[self.env['VALAC']]+self.env['VALAFLAGS']
+ cmd=self.env.VALAC+self.env.VALAFLAGS
cmd.extend([a.abspath()for a in self.inputs])
ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath())
if ret:
@@ -159,7 +159,7 @@
def find_valac(self,valac_name,min_version):
valac=self.find_program(valac_name,var='VALAC')
try:
- output=self.cmd_and_log(valac+' --version')
+ output=self.cmd_and_log(valac+['--version'])
except Exception:
valac_version=None
else:
--- a/waflib/Tools/waf_unit_test.py
+++ b/waflib/Tools/waf_unit_test.py
@@ -2,8 +2,8 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys
-from waflib.TaskGen import feature,after_method
+import os
+from waflib.TaskGen import feature,after_method,taskgen_method
from waflib import Utils,Task,Logs,Options
testlock=Utils.threading.Lock()
@feature('test')@@ -11,6 +11,14 @@
def make_test(self):
if getattr(self,'link_task',None):
self.create_task('utest',self.link_task.outputs)+@taskgen_method
+def add_test_results(self,tup):
+ Logs.debug("ut: %r",tup)+ self.utest_result=tup
+ try:
+ self.bld.utest_results.append(tup)
+ except AttributeError:
+ self.bld.utest_results=[tup]
class utest(Task.Task):
color='PINK'
after=['vnum','inst']
@@ -23,11 +31,9 @@
if getattr(Options.options,'all_tests',False):
return Task.RUN_ME
return ret
- def run(self):
- filename=self.inputs[0].abspath()
- self.ut_exec=getattr(self.generator,'ut_exec',[filename])
- if getattr(self.generator,'ut_fun',None):
- self.generator.ut_fun(self)
+ def add_path(self,dct,path,var):
+ dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
+ def get_test_env(self):
try:
fu=getattr(self.generator.bld,'all_test_paths')
except AttributeError:
@@ -39,32 +45,30 @@
s=tg.link_task.outputs[0].parent.abspath()
if s not in lst:
lst.append(s)
- def add_path(dct,path,var):
- dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')])
if Utils.is_win32:
- add_path(fu,lst,'PATH')
+ self.add_path(fu,lst,'PATH')
elif Utils.unversioned_sys_platform()=='darwin':
- add_path(fu,lst,'DYLD_LIBRARY_PATH')
- add_path(fu,lst,'LD_LIBRARY_PATH')
+ self.add_path(fu,lst,'DYLD_LIBRARY_PATH')
+ self.add_path(fu,lst,'LD_LIBRARY_PATH')
else:
- add_path(fu,lst,'LD_LIBRARY_PATH')
+ self.add_path(fu,lst,'LD_LIBRARY_PATH')
self.generator.bld.all_test_paths=fu
+ return fu
+ def run(self):
+ filename=self.inputs[0].abspath()
+ self.ut_exec=getattr(self.generator,'ut_exec',[filename])
+ if getattr(self.generator,'ut_fun',None):
+ self.generator.ut_fun(self)
cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath()
- testcmd=getattr(Options.options,'testcmd',False)
+ testcmd=getattr(self.generator,'ut_cmd',False)or getattr(Options.options,'testcmd',False)
if testcmd:
self.ut_exec=(testcmd%self.ut_exec[0]).split(' ')- proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE)
+ proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE)
(stdout,stderr)=proc.communicate()
tup=(filename,proc.returncode,stdout,stderr)
- self.generator.utest_result=tup
testlock.acquire()
try:
- bld=self.generator.bld
- Logs.debug("ut: %r",tup)- try:
- bld.utest_results.append(tup)
- except AttributeError:
- bld.utest_results=[tup]
+ return self.generator.add_test_results(tup)
finally:
testlock.release()
def summary(bld):
--- a/waflib/Tools/xlc.py
+++ b/waflib/Tools/xlc.py
@@ -7,10 +7,8 @@
@conf
def find_xlc(conf):
cc=conf.find_program(['xlc_r','xlc'],var='CC')
- cc=conf.cmd_to_list(cc)
conf.get_xlc_version(cc)
conf.env.CC_NAME='xlc'
- conf.env.CC=cc
@conf
def xlc_common_flags(conf):
v=conf.env
--- a/waflib/Tools/xlcxx.py
+++ b/waflib/Tools/xlcxx.py
@@ -7,10 +7,8 @@
@conf
def find_xlcxx(conf):
cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
- cxx=conf.cmd_to_list(cxx)
conf.get_xlc_version(cxx)
conf.env.CXX_NAME='xlc++'
- conf.env.CXX=cxx
@conf
def xlcxx_common_flags(conf):
v=conf.env
--- a/waflib/Utils.py
+++ b/waflib/Utils.py
@@ -2,15 +2,10 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import os,sys,errno,traceback,inspect,re,shutil,datetime,gc
+import os,sys,errno,traceback,inspect,re,shutil,datetime,gc,platform
import subprocess
+from collections import deque,defaultdict
try:
- from collections import deque
-except ImportError:
- class deque(list):
- def popleft(self):
- return self.pop(0)
-try:
import _winreg as winreg
except ImportError:
try:
@@ -32,6 +27,8 @@
try:
import threading
except ImportError:
+ if not'JOBS'in os.environ:
+ os.environ['JOBS']='1'
class threading(object):
pass
class Lock(object):
@@ -56,23 +53,34 @@
rot_chr=['\\','|','/','-']
rot_idx=0
try:
- from collections import defaultdict
+ from collections import OrderedDict as ordered_iter_dict
except ImportError:
- class defaultdict(dict):
- def __init__(self,default_factory):
- super(defaultdict,self).__init__()
- self.default_factory=default_factory
- def __getitem__(self,key):
+ class ordered_iter_dict(dict):
+ def __init__(self,*k,**kw):
+ self.lst=[]
+ dict.__init__(self,*k,**kw)
+ def clear(self):
+ dict.clear(self)
+ self.lst=[]
+ def __setitem__(self,key,value):
+ dict.__setitem__(self,key,value)
try:
- return super(defaultdict,self).__getitem__(key)
- except KeyError:
- value=self.default_factory()
- self[key]=value
- return value
-is_win32=sys.platform in('win32','cli')-indicator='\x1b[K%s%s%s\r'
-if is_win32 and'NOCOLOR'in os.environ:
- indicator='%s%s%s\r'
+ self.lst.remove(key)
+ except ValueError:
+ pass
+ self.lst.append(key)
+ def __delitem__(self,key):
+ dict.__delitem__(self,key)
+ try:
+ self.lst.remove(key)
+ except ValueError:
+ pass
+ def __iter__(self):
+ for x in self.lst:
+ yield x
+ def keys(self):
+ return self.lst
+is_win32=sys.platform in('win32','cli','os2')def readf(fname,m='r',encoding='ISO8859-1'):
if sys.hexversion>0x3000000 and not'b'in m:
m+='b'
@@ -81,7 +89,10 @@
txt=f.read()
finally:
f.close()
- txt=txt.decode(encoding)
+ if encoding:
+ txt=txt.decode(encoding)
+ else:
+ txt=txt.decode()
else:
f=open(fname,m)
try:
@@ -108,67 +119,70 @@
finally:
f.close()
return m.digest()
-if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000:
- def readf_win32(f,m='r',encoding='ISO8859-1'):
- flags=os.O_NOINHERIT|os.O_RDONLY
- if'b'in m:
- flags|=os.O_BINARY
- if'+'in m:
- flags|=os.O_RDWR
+def readf_win32(f,m='r',encoding='ISO8859-1'):
+ flags=os.O_NOINHERIT|os.O_RDONLY
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise IOError('Cannot read from %r'%f)+ if sys.hexversion>0x3000000 and not'b'in m:
+ m+='b'
+ f=os.fdopen(fd,m)
try:
- fd=os.open(f,flags)
- except OSError:
- raise IOError('Cannot read from %r'%f)- if sys.hexversion>0x3000000 and not'b'in m:
- m+='b'
- f=os.fdopen(fd,m)
- try:
- txt=f.read()
- finally:
- f.close()
+ txt=f.read()
+ finally:
+ f.close()
+ if encoding:
txt=txt.decode(encoding)
else:
- f=os.fdopen(fd,m)
- try:
- txt=f.read()
- finally:
- f.close()
- return txt
- def writef_win32(f,data,m='w',encoding='ISO8859-1'):
- if sys.hexversion>0x3000000 and not'b'in m:
- data=data.encode(encoding)
- m+='b'
- flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
- if'b'in m:
- flags|=os.O_BINARY
- if'+'in m:
- flags|=os.O_RDWR
- try:
- fd=os.open(f,flags)
- except OSError:
- raise IOError('Cannot write to %r'%f)+ txt=txt.decode()
+ else:
f=os.fdopen(fd,m)
try:
- f.write(data)
+ txt=f.read()
finally:
f.close()
- def h_file_win32(fname):
- try:
- fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
- except OSError:
- raise IOError('Cannot read from %r'%fname)- f=os.fdopen(fd,'rb')
- m=md5()
- try:
- while fname:
- fname=f.read(200000)
- m.update(fname)
- finally:
- f.close()
- return m.digest()
- readf_old=readf
- writef_old=writef
- h_file_old=h_file
+ return txt
+def writef_win32(f,data,m='w',encoding='ISO8859-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ data=data.encode(encoding)
+ m+='b'
+ flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise IOError('Cannot write to %r'%f)+ f=os.fdopen(fd,m)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+def h_file_win32(fname):
+ try:
+ fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
+ except OSError:
+ raise IOError('Cannot read from %r'%fname)+ f=os.fdopen(fd,'rb')
+ m=md5()
+ try:
+ while fname:
+ fname=f.read(200000)
+ m.update(fname)
+ finally:
+ f.close()
+ return m.digest()
+readf_unix=readf
+writef_unix=writef
+h_file_unix=h_file
+if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000:
readf=readf_win32
writef=writef_win32
h_file=h_file_win32
@@ -190,27 +204,27 @@
:param s: string to convert
:type s: string
"""
+def listdir_win32(s):
+ if not s:
+ try:
+ import ctypes
+ except ImportError:
+ return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]+ else:
+ dlen=4
+ maxdrives=26
+ buf=ctypes.create_string_buffer(maxdrives*dlen)
+ ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
+ return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]+ if len(s)==2 and s[1]==":":
+ s+=os.sep
+ if not os.path.isdir(s):
+ e=OSError('%s is not a directory'%s)+ e.errno=errno.ENOENT
+ raise e
+ return os.listdir(s)
listdir=os.listdir
if is_win32:
- def listdir_win32(s):
- if not s:
- try:
- import ctypes
- except ImportError:
- return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')]- else:
- dlen=4
- maxdrives=26
- buf=ctypes.create_string_buffer(maxdrives*dlen)
- ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
- return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]- if len(s)==2 and s[1]==":":
- s+=os.sep
- if not os.path.isdir(s):
- e=OSError('%s is not a directory'%s)- e.errno=errno.ENOENT
- raise e
- return os.listdir(s)
listdir=listdir_win32
def num2ver(ver):
if isinstance(ver,str):
@@ -231,18 +245,7 @@
return sth.split()
else:
return sth
-re_nl=re.compile('\r*\n',re.M)-def str_to_dict(txt):
- tbl={}- lines=re_nl.split(txt)
- for x in lines:
- x=x.strip()
- if not x or x.startswith('#')or x.find('=')<0:- continue
- tmp=x.split('=')- tbl[tmp[0].strip()]='='.join(tmp[1:]).strip()
- return tbl
-def split_path(path):
+def split_path_unix(path):
return path.split('/')def split_path_cygwin(path):
if path.startswith('//'):@@ -261,6 +264,8 @@
split_path=split_path_cygwin
elif is_win32:
split_path=split_path_win32
+else:
+ split_path=split_path_unix
split_path.__doc__="""
Split a path by / or \\. This function is not like os.path.split
@@ -275,12 +280,29 @@
except OSError ,e:
if not os.path.isdir(path):
raise Errors.WafError('Cannot create the folder %r'%path,ex=e)+def check_exe(name,env=None):
+ if not name:
+ raise ValueError('Cannot execute an empty string!')+ def is_exe(fpath):
+ return os.path.isfile(fpath)and os.access(fpath,os.X_OK)
+ fpath,fname=os.path.split(name)
+ if fpath and is_exe(name):
+ return os.path.abspath(name)
+ else:
+ env=env or os.environ
+ for path in env["PATH"].split(os.pathsep):
+ path=path.strip('"')+ exe_file=os.path.join(path,name)
+ if is_exe(exe_file):
+ return os.path.abspath(exe_file)
+ return None
def def_attrs(cls,**kw):
for k,v in kw.items():
if not hasattr(cls,k):
setattr(cls,k,v)
def quote_define_name(s):
- fu=re.compile("[^a-zA-Z0-9]").sub("_",s)+ fu=re.sub('[^a-zA-Z0-9]','_',s)+ fu=re.sub('_+','_',fu)fu=fu.upper()
return fu
def h_list(lst):
@@ -336,7 +358,8 @@
else:s=s.lower()
if s=='powerpc':
return'darwin'
- if s=='win32'or s.endswith('os2')and s!='sunos2':return s+ if s=='win32'or s=='os2':
+ return s
return re.split('\d+$',s)[0]def nada(*k,**kw):
pass
@@ -345,10 +368,10 @@
self.start_time=datetime.datetime.utcnow()
def __str__(self):
delta=datetime.datetime.utcnow()-self.start_time
- days=int(delta.days)
- hours=delta.seconds//3600
- minutes=(delta.seconds-hours*3600)//60
- seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000
+ days=delta.days
+ hours,rem=divmod(delta.seconds,3600)
+ minutes,seconds=divmod(rem,60)
+ seconds+=delta.microseconds*1e-6
result=''
if days:
result+='%dd'%days
@@ -410,3 +433,11 @@
else:
if os.path.isfile(result):
return result
+def lib64():
+ if os.sep=='/':
+ is_64=platform.architecture()[0]=='64bit'
+ if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'):+ return'64'if is_64 else''
+ else:
+ return''if is_64 else'32'
+ return''
--- a/waflib/ansiterm.py
+++ b/waflib/ansiterm.py
@@ -2,19 +2,49 @@
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
-import sys,os
+import os,re,sys
+from waflib.Utils import threading
+wlock=threading.Lock()
try:
- if not(sys.stderr.isatty()and sys.stdout.isatty()):
- raise ValueError('not a tty')- from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,POINTER,c_long,c_char
+ from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long
+except ImportError:
+ class AnsiTerm(object):
+ def __init__(self,stream):
+ self.stream=stream
+ try:
+ self.errors=self.stream.errors
+ except AttributeError:
+ pass
+ self.encoding=self.stream.encoding
+ def write(self,txt):
+ try:
+ wlock.acquire()
+ self.stream.write(txt)
+ self.stream.flush()
+ finally:
+ wlock.release()
+ def fileno(self):
+ return self.stream.fileno()
+ def flush(self):
+ self.stream.flush()
+ def isatty(self):
+ return self.stream.isatty()
+else:
class COORD(Structure):
_fields_=[("X",c_short),("Y",c_short)]class SMALL_RECT(Structure):
_fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]class CONSOLE_SCREEN_BUFFER_INFO(Structure):
- _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]+ _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]class CONSOLE_CURSOR_INFO(Structure):
_fields_=[('dwSize',c_ulong),('bVisible',c_int)]+ try:
+ _type=unicode
+ except NameError:
+ _type=str
+ to_int=lambda number,default:number and int(number)or default
+ STD_OUTPUT_HANDLE=-11
+ STD_ERROR_HANDLE=-12
windll.kernel32.GetStdHandle.argtypes=[c_ulong]
windll.kernel32.GetStdHandle.restype=c_ulong
windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
@@ -21,8 +51,8 @@
windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long
windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort]
windll.kernel32.SetConsoleTextAttribute.restype=c_long
- windll.kernel32.FillConsoleOutputCharacterA.argtypes=[c_ulong,c_char,c_ulong,POINTER(COORD),POINTER(c_ulong)]
- windll.kernel32.FillConsoleOutputCharacterA.restype=c_long
+ windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)]
+ windll.kernel32.FillConsoleOutputCharacterW.restype=c_long
windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)]
windll.kernel32.FillConsoleOutputAttribute.restype=c_long
windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)]
@@ -29,38 +59,26 @@
windll.kernel32.SetConsoleCursorPosition.restype=c_long
windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)]
windll.kernel32.SetConsoleCursorInfo.restype=c_long
- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- csinfo=CONSOLE_CURSOR_INFO()
- hconsole=windll.kernel32.GetStdHandle(-11)
- windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo))
- if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console')- windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo))
-except Exception:
- pass
-else:
- import re,threading
- is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6
- try:
- _type=unicode
- except NameError:
- _type=str
- to_int=lambda number,default:number and int(number)or default
- wlock=threading.Lock()
- STD_OUTPUT_HANDLE=-11
- STD_ERROR_HANDLE=-12
class AnsiTerm(object):
- def __init__(self):
- self.encoding=sys.stdout.encoding
- self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+ def __init__(self,s):
+ self.stream=s
+ try:
+ self.errors=s.errors
+ except AttributeError:
+ pass
+ self.encoding=s.encoding
self.cursor_history=[]
- self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- self.orig_csinfo=CONSOLE_CURSOR_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo))
- windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo))
+ handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+ self.hconsole=windll.kernel32.GetStdHandle(handle)
+ self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ self._csinfo=CONSOLE_CURSOR_INFO()
+ windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
+ self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo))
+ self._isatty=r==1
def screen_buffer_info(self):
- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
- return sbinfo
+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo))
+ return self._sbinfo
def clear_line(self,param):
mode=param and int(param)or 0
sbinfo=self.screen_buffer_info()
@@ -74,7 +92,7 @@
line_start=sbinfo.CursorPosition
line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
chars_written=c_ulong()
- windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),line_length,line_start,byref(chars_written))+ windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
def clear_screen(self,param):
mode=to_int(param,0)
@@ -90,7 +108,7 @@
clear_start=sbinfo.CursorPosition
clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
chars_written=c_ulong()
- windll.kernel32.FillConsoleOutputCharacterA(self.hconsole,c_char(' '),clear_length,clear_start,byref(chars_written))+ windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
def push_cursor(self,param):
sbinfo=self.screen_buffer_info()
@@ -133,20 +151,16 @@
return((c&1)<<2)|(c&2)|((c&4)>>2)
def set_color(self,param):
cols=param.split(';')- sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo))
+ sbinfo=self.screen_buffer_info()
attr=sbinfo.Attributes
for c in cols:
- if is_vista:
- c=int(c)
- else:
- c=to_int(c,0)
+ c=to_int(c,0)
if 29<c<38:
attr=(attr&0xfff0)|self.rgb2bgr(c-30)
elif 39<c<48:
attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4)
elif c==0:
- attr=self.orig_sbinfo.Attributes
+ attr=self._orig_sbinfo.Attributes
elif c==1:
attr|=0x08
elif c==4:
@@ -155,23 +169,26 @@
attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4)
windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
def show_cursor(self,param):
- csinfo.bVisible=1
- windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+ self._csinfo.bVisible=1
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
def hide_cursor(self,param):
- csinfo.bVisible=0
- windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo))
+ self._csinfo.bVisible=0
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')def write(self,text):
try:
wlock.acquire()
- for param,cmd,txt in self.ansi_tokens.findall(text):
- if cmd:
- cmd_func=self.ansi_command_table.get(cmd)
- if cmd_func:
- cmd_func(self,param)
- else:
- self.writeconsole(txt)
+ if self._isatty:
+ for param,cmd,txt in self.ansi_tokens.findall(text):
+ if cmd:
+ cmd_func=self.ansi_command_table.get(cmd)
+ if cmd_func:
+ cmd_func(self,param)
+ else:
+ self.writeconsole(txt)
+ else:
+ self.stream.write(text)
finally:
wlock.release()
def writeconsole(self,txt):
@@ -179,13 +196,43 @@
writeconsole=windll.kernel32.WriteConsoleA
if isinstance(txt,_type):
writeconsole=windll.kernel32.WriteConsoleW
- TINY_STEP=3000
- for x in range(0,len(txt),TINY_STEP):
- tiny=txt[x:x+TINY_STEP]
- writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None)
+ done=0
+ todo=len(txt)
+ chunk=32<<10
+ while todo!=0:
+ doing=min(chunk,todo)
+ buf=txt[done:done+doing]
+ r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None)
+ if r==0:
+ chunk>>=1
+ continue
+ done+=doing
+ todo-=doing
+ def fileno(self):
+ return self.stream.fileno()
def flush(self):
pass
def isatty(self):
- return True
- sys.stderr=sys.stdout=AnsiTerm()
- os.environ['TERM']='vt100'
+ return self._isatty
+ if sys.stdout.isatty()or sys.stderr.isatty():
+ handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+ console=windll.kernel32.GetStdHandle(handle)
+ sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ def get_term_cols():
+ windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo))
+ return sbinfo.Size.X-1
+try:
+ import struct,fcntl,termios
+except ImportError:
+ pass
+else:
+ if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'):+ FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno()
+ def fun():
+ return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1]+ try:
+ fun()
+ except Exception ,e:
+ pass
+ else:
+ get_term_cols=fun
--- a/waflib/extras/compat15.py
+++ b/waflib/extras/compat15.py
@@ -26,8 +26,36 @@
Build.BuildContext.new_task_gen=Build.BuildContext.__call__
Build.BuildContext.is_install=0
Node.Node.relpath_gen=Node.Node.path_from
+Utils.pproc=Utils.subprocess
+Utils.get_term_cols=Logs.get_term_cols
+def cmd_output(cmd,**kw):
+ silent=False
+ if'silent'in kw:
+ silent=kw['silent']
+ del(kw['silent'])
+ if'e'in kw:
+ tmp=kw['e']
+ del(kw['e'])
+ kw['env']=tmp
+ kw['shell']=isinstance(cmd,str)
+ kw['stdout']=Utils.subprocess.PIPE
+ if silent:
+ kw['stderr']=Utils.subprocess.PIPE
+ try:
+ p=Utils.subprocess.Popen(cmd,**kw)
+ output=p.communicate()[0]
+ except OSError ,e:
+ raise ValueError(str(e))
+ if p.returncode:
+ if not silent:
+ msg="command execution failed: %s -> %r"%(cmd,str(output))
+ raise ValueError(msg)
+ output=''
+ return output
+Utils.cmd_output=cmd_output
def name_to_obj(self,s,env=None):
- Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')+ if Logs.verbose:
+ Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')return self.get_tgen_by_name(s)
Build.BuildContext.name_to_obj=name_to_obj
def env_of_name(self,name):
@@ -49,7 +77,8 @@
self.prepare_env(env)
self.all_envs[name]=env
else:
- if fromenv:Logs.warn("The environment %s may have been configured already"%name)+ if fromenv:
+ Logs.warn("The environment %s may have been configured already"%name)return env
Configure.ConfigurationContext.retrieve=retrieve
Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse
@@ -56,6 +85,7 @@
Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load
Configure.conftest=Configure.conf
Configure.ConfigurationError=Errors.ConfigurationError
+Utils.WafError=Errors.WafError
Options.OptionsContext.sub_options=Options.OptionsContext.recurse
Options.OptionsContext.tool_options=Context.Context.load
Options.Handler=Options.OptionsContext
@@ -76,24 +106,32 @@
def load_tool(*k,**kw):
ret=eld(*k,**kw)
if'set_options'in ret.__dict__:
- Logs.warn('compat: rename "set_options" to options')+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to options')ret.options=ret.set_options
if'detect'in ret.__dict__:
- Logs.warn('compat: rename "detect" to "configure"')+ if Logs.verbose:
+ Logs.warn('compat: rename "detect" to "configure"')ret.configure=ret.detect
return ret
Context.load_tool=load_tool
+def get_curdir(self):
+ return self.path.abspath()
+Context.Context.curdir=property(get_curdir,Utils.nada)
rev=Context.load_module
-def load_module(path):
- ret=rev(path)
+def load_module(path,encoding=None):
+ ret=rev(path,encoding)
if'set_options'in ret.__dict__:
- Logs.warn('compat: rename "set_options" to "options" (%r)'%path)+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to "options" (%r)'%path)ret.options=ret.set_options
if'srcdir'in ret.__dict__:
- Logs.warn('compat: rename "srcdir" to "top" (%r)'%path)+ if Logs.verbose:
+ Logs.warn('compat: rename "srcdir" to "top" (%r)'%path)ret.top=ret.srcdir
if'blddir'in ret.__dict__:
- Logs.warn('compat: rename "blddir" to "out" (%r)'%path)+ if Logs.verbose:
+ Logs.warn('compat: rename "blddir" to "out" (%r)'%path)ret.out=ret.blddir
return ret
Context.load_module=load_module
@@ -101,15 +139,18 @@
def post(self):
self.features=self.to_list(self.features)
if'cc'in self.features:
- Logs.warn('compat: the feature cc does not exist anymore (use "c")')+ if Logs.verbose:
+ Logs.warn('compat: the feature cc does not exist anymore (use "c")') self.features.remove('cc') self.features.append('c')if'cstaticlib'in self.features:
- Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')+ if Logs.verbose:
+ Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")') self.features.remove('cstaticlib') self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib')if getattr(self,'ccflags',None):
- Logs.warn('compat: "ccflags" was renamed to "cflags"')+ if Logs.verbose:
+ Logs.warn('compat: "ccflags" was renamed to "cflags"')self.cflags=self.ccflags
return old_post(self)
TaskGen.task_gen.post=post
@@ -128,9 +169,11 @@
names=self.to_list(getattr(self,'uselib_local',[]))
get=self.bld.get_tgen_by_name
seen=set([])
+ seen_uselib=set([])
tmp=Utils.deque(names)
if tmp:
- Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')+ if Logs.verbose:
+ Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')while tmp:
lib_name=tmp.popleft()
if lib_name in seen:
@@ -157,9 +200,11 @@
if not tmp_path in env['LIBPATH']:
env.prepend_value('LIBPATH',[tmp_path])for v in self.to_list(getattr(y,'uselib',[])):
- if not env['STLIB_'+v]:
- if not v in self.uselib:
- self.uselib.insert(0,v)
+ if v not in seen_uselib:
+ seen_uselib.add(v)
+ if not env['STLIB_'+v]:
+ if not v in self.uselib:
+ self.uselib.insert(0,v)
if getattr(y,'export_includes',None):
self.includes.extend(y.to_incnodes(y.export_includes))
@TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib')@@ -218,3 +263,17 @@
Logs.warn('compat: change "export_incdirs" by "export_includes"')self.export_includes=val
TaskGen.task_gen.export_incdirs=property(None,set_incdirs)
+def install_dir(self,path):
+ if not path:
+ return[]
+ destpath=Utils.subst_vars(path,self.env)
+ if self.is_install>0:
+ Logs.info('* creating %s'%destpath)+ Utils.check_dir(destpath)
+ elif self.is_install<0:
+ Logs.info('* removing %s'%destpath)+ try:
+ os.remove(destpath)
+ except OSError:
+ pass
+Build.BuildContext.install_dir=install_dir
--
⑨