mirror of
https://github.com/google/pebble.git
synced 2025-06-21 08:40:36 +00:00
Import of the watch repository from Pebble
This commit is contained in:
commit
3b92768480
10334 changed files with 2564465 additions and 0 deletions
3
third_party/waf/waf-light/waflib/Tools/__init__.py
vendored
Normal file
3
third_party/waf/waf-light/waflib/Tools/__init__.py
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2010 (ita)
|
22
third_party/waf/waf-light/waflib/Tools/ar.py
vendored
Normal file
22
third_party/waf/waf-light/waflib/Tools/ar.py
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
"""
|
||||
The **ar** program creates static libraries. This tool is almost always loaded
|
||||
from others (C, C++, D, etc) for static library support.
|
||||
"""
|
||||
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_ar(conf):
|
||||
"""Configuration helper used by C/C++ tools to enable the support for static libraries"""
|
||||
conf.load('ar')
|
||||
|
||||
def configure(conf):
|
||||
"""Find the ar program and set the default flags in ``conf.env.ARFLAGS``"""
|
||||
conf.find_program('ar', var='AR')
|
||||
conf.env.ARFLAGS = 'rcs'
|
||||
|
75
third_party/waf/waf-light/waflib/Tools/asm.py
vendored
Normal file
75
third_party/waf/waf-light/waflib/Tools/asm.py
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"""
|
||||
Assembly support, used by tools such as gas and nasm
|
||||
|
||||
To declare targets using assembly::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('gcc gas')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features='c cstlib asm',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
|
||||
bld(
|
||||
features='asm asmprogram',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
|
||||
Support for pure asm programs and libraries should also work::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('nasm')
|
||||
conf.find_program('ld', 'ASLINK')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features='asm asmprogram',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib import Task, Utils
|
||||
import waflib.Task
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
from waflib.TaskGen import extension, feature
|
||||
|
||||
class asm(Task.Task):
|
||||
"""
|
||||
Compile asm files by gas/nasm/yasm/...
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
|
||||
|
||||
@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
|
||||
def asm_hook(self, node):
|
||||
"""
|
||||
Bind the asm extension to the asm task
|
||||
|
||||
:param node: input file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
return self.create_compiled_task('asm', node)
|
||||
|
||||
class asmprogram(link_task):
|
||||
"Link object files into a c program"
|
||||
run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
|
||||
ext_out = ['.bin']
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class asmshlib(asmprogram):
|
||||
"Link object files into a c shared library"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class asmstlib(stlink_task):
|
||||
"Link object files into a c static library"
|
||||
pass # do not remove
|
||||
|
||||
def configure(conf):
|
||||
conf.env['ASMPATH_ST'] = '-I%s'
|
49
third_party/waf/waf-light/waflib/Tools/bison.py
vendored
Normal file
49
third_party/waf/waf-light/waflib/Tools/bison.py
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# John O'Meara, 2006
|
||||
# Thomas Nagy 2009-2010 (ita)
|
||||
|
||||
"""
|
||||
The **bison** program is a code generator which creates C or C++ files.
|
||||
The generated files are compiled into object files.
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
from waflib.TaskGen import extension
|
||||
|
||||
class bison(Task.Task):
|
||||
"""Compile bison files"""
|
||||
color = 'BLUE'
|
||||
run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
|
||||
ext_out = ['.h'] # just to make sure
|
||||
|
||||
@extension('.y', '.yc', '.yy')
|
||||
def big_bison(self, node):
|
||||
"""
|
||||
Create a bison task, which must be executed from the directory of the output file.
|
||||
"""
|
||||
has_h = '-d' in self.env['BISONFLAGS']
|
||||
|
||||
outs = []
|
||||
if node.name.endswith('.yc'):
|
||||
outs.append(node.change_ext('.tab.cc'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.hh'))
|
||||
else:
|
||||
outs.append(node.change_ext('.tab.c'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.h'))
|
||||
|
||||
tsk = self.create_task('bison', node, outs)
|
||||
tsk.cwd = node.parent.get_bld().abspath()
|
||||
|
||||
# and the c/cxx file must be compiled too
|
||||
self.source.append(outs[0])
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the *bison* program
|
||||
"""
|
||||
conf.find_program('bison', var='BISON')
|
||||
conf.env.BISONFLAGS = ['-d']
|
||||
|
37
third_party/waf/waf-light/waflib/Tools/c.py
vendored
Normal file
37
third_party/waf/waf-light/waflib/Tools/c.py
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"Base for c programs/libraries"
|
||||
|
||||
from waflib import TaskGen, Task, Utils
|
||||
from waflib.Tools import c_preproc
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
|
||||
@TaskGen.extension('.c')
|
||||
def c_hook(self, node):
|
||||
"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
|
||||
return self.create_compiled_task('c', node)
|
||||
|
||||
class c(Task.Task):
|
||||
"Compile C files into object files"
|
||||
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
|
||||
vars = ['CCDEPS'] # unused variable to depend on, just in case
|
||||
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
|
||||
scan = c_preproc.scan
|
||||
|
||||
class cprogram(link_task):
|
||||
"Link object files into a c program"
|
||||
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
|
||||
ext_out = ['.bin']
|
||||
vars = ['LINKDEPS']
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class cshlib(cprogram):
|
||||
"Link object files into a c shared library"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class cstlib(stlink_task):
|
||||
"Link object files into a c static library"
|
||||
pass # do not remove
|
||||
|
128
third_party/waf/waf-light/waflib/Tools/c_aliases.py
vendored
Normal file
128
third_party/waf/waf-light/waflib/Tools/c_aliases.py
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2010 (ita)
|
||||
|
||||
"base for all c/c++ programs and libraries"
|
||||
|
||||
import os, sys, re
|
||||
from waflib import Utils, Build
|
||||
from waflib.Configure import conf
|
||||
|
||||
def get_extensions(lst):
|
||||
"""
|
||||
:param lst: files to process
|
||||
:list lst: list of string or :py:class:`waflib.Node.Node`
|
||||
:return: list of file extensions
|
||||
:rtype: list of string
|
||||
"""
|
||||
ret = []
|
||||
for x in Utils.to_list(lst):
|
||||
try:
|
||||
if not isinstance(x, str):
|
||||
x = x.name
|
||||
ret.append(x[x.rfind('.') + 1:])
|
||||
except Exception:
|
||||
pass
|
||||
return ret
|
||||
|
||||
def sniff_features(**kw):
|
||||
"""
|
||||
Look at the source files and return the features for a task generator (mainly cc and cxx)::
|
||||
|
||||
snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
|
||||
# returns ['cxx', 'c', 'cxxshlib', 'cshlib']
|
||||
|
||||
:param source: source files to process
|
||||
:type source: list of string or :py:class:`waflib.Node.Node`
|
||||
:param type: object type in *program*, *shlib* or *stlib*
|
||||
:type type: string
|
||||
:return: the list of features for a task generator processing the source files
|
||||
:rtype: list of string
|
||||
"""
|
||||
exts = get_extensions(kw['source'])
|
||||
type = kw['_type']
|
||||
feats = []
|
||||
|
||||
# watch the order, cxx will have the precedence
|
||||
if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts:
|
||||
feats.append('cxx')
|
||||
|
||||
if 'c' in exts or 'vala' in exts:
|
||||
feats.append('c')
|
||||
|
||||
if 'd' in exts:
|
||||
feats.append('d')
|
||||
|
||||
if 'java' in exts:
|
||||
feats.append('java')
|
||||
|
||||
if 'java' in exts:
|
||||
return 'java'
|
||||
|
||||
if type in ['program', 'shlib', 'stlib']:
|
||||
for x in feats:
|
||||
if x in ['cxx', 'd', 'c']:
|
||||
feats.append(x + type)
|
||||
|
||||
return feats
|
||||
|
||||
def set_features(kw, _type):
|
||||
kw['_type'] = _type
|
||||
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
|
||||
|
||||
@conf
|
||||
def program(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating programs by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.program(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c cprogram', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'program')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def shlib(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating shared libraries by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c cshlib', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'shlib')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def stlib(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating static libraries by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.stlib(source='foo.cpp', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='cxx cxxstlib', source='foo.cpp', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'stlib')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def objects(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating object files by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.objects(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'objects')
|
||||
return bld(*k, **kw)
|
||||
|
1233
third_party/waf/waf-light/waflib/Tools/c_config.py
vendored
Executable file
1233
third_party/waf/waf-light/waflib/Tools/c_config.py
vendored
Executable file
File diff suppressed because it is too large
Load diff
188
third_party/waf/waf-light/waflib/Tools/c_osx.py
vendored
Normal file
188
third_party/waf/waf-light/waflib/Tools/c_osx.py
vendored
Normal file
|
@ -0,0 +1,188 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2008-2010
|
||||
|
||||
"""
|
||||
MacOSX related tools
|
||||
"""
|
||||
|
||||
import os, shutil, sys, platform
|
||||
from waflib import TaskGen, Task, Build, Options, Utils, Errors
|
||||
from waflib.TaskGen import taskgen_method, feature, after_method, before_method
|
||||
|
||||
app_info = '''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
|
||||
<plist version="0.9">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>Created by Waf</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>NOTE</key>
|
||||
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>%s</string>
|
||||
</dict>
|
||||
</plist>
|
||||
'''
|
||||
"""
|
||||
plist template
|
||||
"""
|
||||
|
||||
@feature('c', 'cxx')
|
||||
def set_macosx_deployment_target(self):
|
||||
"""
|
||||
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
|
||||
"""
|
||||
if self.env['MACOSX_DEPLOYMENT_TARGET']:
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
|
||||
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
|
||||
if Utils.unversioned_sys_platform() == 'darwin':
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
|
||||
|
||||
@taskgen_method
|
||||
def create_bundle_dirs(self, name, out):
|
||||
"""
|
||||
Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
|
||||
"""
|
||||
bld = self.bld
|
||||
dir = out.parent.find_or_declare(name)
|
||||
dir.mkdir()
|
||||
macos = dir.find_or_declare(['Contents', 'MacOS'])
|
||||
macos.mkdir()
|
||||
return dir
|
||||
|
||||
def bundle_name_for_output(out):
|
||||
name = out.name
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + '.app'
|
||||
else:
|
||||
name = name + '.app'
|
||||
return name
|
||||
|
||||
@feature('cprogram', 'cxxprogram')
|
||||
@after_method('apply_link')
|
||||
def create_task_macapp(self):
|
||||
"""
|
||||
To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', mac_app = True)
|
||||
|
||||
To force *all* executables to be transformed into Mac applications::
|
||||
|
||||
def build(bld):
|
||||
bld.env.MACAPP = True
|
||||
bld.shlib(source='a.c', target='foo')
|
||||
"""
|
||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
|
||||
out = self.link_task.outputs[0]
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
|
||||
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
|
||||
|
||||
self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
|
||||
self.bld.install_files(inst_to, n1, chmod=Utils.O755)
|
||||
|
||||
if getattr(self, 'mac_resources', None):
|
||||
res_dir = n1.parent.parent.make_node('Resources')
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
|
||||
for x in self.to_list(self.mac_resources):
|
||||
node = self.path.find_node(x)
|
||||
if not node:
|
||||
raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))
|
||||
|
||||
parent = node.parent
|
||||
if os.path.isdir(node.abspath()):
|
||||
nodes = node.ant_glob('**')
|
||||
else:
|
||||
nodes = [node]
|
||||
for node in nodes:
|
||||
rel = node.path_from(parent)
|
||||
tsk = self.create_task('macapp', node, res_dir.make_node(rel))
|
||||
self.bld.install_as(inst_to + '/%s' % rel, node)
|
||||
|
||||
if getattr(self.bld, 'is_install', None):
|
||||
# disable the normal binary installation
|
||||
self.install_task.hasrun = Task.SKIP_ME
|
||||
|
||||
@feature('cprogram', 'cxxprogram')
|
||||
@after_method('apply_link')
|
||||
def create_task_macplist(self):
|
||||
"""
|
||||
Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
|
||||
"""
|
||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
|
||||
out = self.link_task.outputs[0]
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
n1 = dir.find_or_declare(['Contents', 'Info.plist'])
|
||||
self.plisttask = plisttask = self.create_task('macplist', [], n1)
|
||||
|
||||
if getattr(self, 'mac_plist', False):
|
||||
node = self.path.find_resource(self.mac_plist)
|
||||
if node:
|
||||
plisttask.inputs.append(node)
|
||||
else:
|
||||
plisttask.code = self.mac_plist
|
||||
else:
|
||||
plisttask.code = app_info % self.link_task.outputs[0].name
|
||||
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
|
||||
self.bld.install_files(inst_to, n1)
|
||||
|
||||
@feature('cshlib', 'cxxshlib')
|
||||
@before_method('apply_link', 'propagate_uselib_vars')
|
||||
def apply_bundle(self):
|
||||
"""
|
||||
To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', mac_bundle = True)
|
||||
|
||||
To force *all* executables to be transformed into bundles::
|
||||
|
||||
def build(bld):
|
||||
bld.env.MACBUNDLE = True
|
||||
bld.shlib(source='a.c', target='foo')
|
||||
"""
|
||||
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
|
||||
self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
|
||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
|
||||
use = self.use = self.to_list(getattr(self, 'use', []))
|
||||
if not 'MACBUNDLE' in use:
|
||||
use.append('MACBUNDLE')
|
||||
|
||||
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
|
||||
|
||||
class macapp(Task.Task):
|
||||
"""
|
||||
Create mac applications
|
||||
"""
|
||||
color = 'PINK'
|
||||
def run(self):
|
||||
self.outputs[0].parent.mkdir()
|
||||
shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
|
||||
|
||||
class macplist(Task.Task):
|
||||
"""
|
||||
Create plist files
|
||||
"""
|
||||
color = 'PINK'
|
||||
ext_in = ['.bin']
|
||||
def run(self):
|
||||
if getattr(self, 'code', None):
|
||||
txt = self.code
|
||||
else:
|
||||
txt = self.inputs[0].read()
|
||||
self.outputs[0].write(txt)
|
||||
|
1029
third_party/waf/waf-light/waflib/Tools/c_preproc.py
vendored
Normal file
1029
third_party/waf/waf-light/waflib/Tools/c_preproc.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
226
third_party/waf/waf-light/waflib/Tools/c_tests.py
vendored
Normal file
226
third_party/waf/waf-light/waflib/Tools/c_tests.py
vendored
Normal file
|
@ -0,0 +1,226 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2010 (ita)
|
||||
|
||||
"""
|
||||
Various configuration tests.
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
from waflib.Configure import conf
|
||||
from waflib.TaskGen import feature, before_method, after_method
|
||||
import sys
|
||||
|
||||
LIB_CODE = '''
|
||||
#ifdef _MSC_VER
|
||||
#define testEXPORT __declspec(dllexport)
|
||||
#else
|
||||
#define testEXPORT
|
||||
#endif
|
||||
testEXPORT int lib_func(void) { return 9; }
|
||||
'''
|
||||
|
||||
MAIN_CODE = '''
|
||||
#ifdef _MSC_VER
|
||||
#define testEXPORT __declspec(dllimport)
|
||||
#else
|
||||
#define testEXPORT
|
||||
#endif
|
||||
testEXPORT int lib_func(void);
|
||||
int main(int argc, char **argv) {
|
||||
(void)argc; (void)argv;
|
||||
return !(lib_func() == 9);
|
||||
}
|
||||
'''
|
||||
|
||||
@feature('link_lib_test')
|
||||
@before_method('process_source')
|
||||
def link_lib_test_fun(self):
|
||||
"""
|
||||
The configuration test :py:func:`waflib.Tools.ccroot.run_c_code` declares a unique task generator,
|
||||
so we need to create other task generators from here to check if the linker is able to link libraries.
|
||||
"""
|
||||
def write_test_file(task):
|
||||
task.outputs[0].write(task.generator.code)
|
||||
|
||||
rpath = []
|
||||
if getattr(self, 'add_rpath', False):
|
||||
rpath = [self.bld.path.get_bld().abspath()]
|
||||
|
||||
mode = self.mode
|
||||
m = '%s %s' % (mode, mode)
|
||||
ex = self.test_exec and 'test_exec' or ''
|
||||
bld = self.bld
|
||||
bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
|
||||
bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
|
||||
bld(features='%sshlib' % m, source='test.' + mode, target='test')
|
||||
bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
|
||||
|
||||
@conf
|
||||
def check_library(self, mode=None, test_exec=True):
|
||||
"""
|
||||
Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
|
||||
|
||||
:param mode: c or cxx or d
|
||||
:type mode: string
|
||||
"""
|
||||
if not mode:
|
||||
mode = 'c'
|
||||
if self.env.CXX:
|
||||
mode = 'cxx'
|
||||
self.check(
|
||||
compile_filename = [],
|
||||
features = 'link_lib_test',
|
||||
msg = 'Checking for libraries',
|
||||
mode = mode,
|
||||
test_exec = test_exec,
|
||||
)
|
||||
|
||||
########################################################################################
|
||||
|
||||
INLINE_CODE = '''
|
||||
typedef int foo_t;
|
||||
static %s foo_t static_foo () {return 0; }
|
||||
%s foo_t foo () {
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
INLINE_VALUES = ['inline', '__inline__', '__inline']
|
||||
|
||||
@conf
|
||||
def check_inline(self, **kw):
|
||||
"""
|
||||
Check for the right value for inline macro.
|
||||
Define INLINE_MACRO to 1 if the define is found.
|
||||
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
|
||||
|
||||
:param define_name: define INLINE_MACRO by default to 1 if the macro is defined
|
||||
:type define_name: string
|
||||
:param features: by default *c* or *cxx* depending on the compiler present
|
||||
:type features: list of string
|
||||
"""
|
||||
|
||||
self.start_msg('Checking for inline')
|
||||
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = 'INLINE_MACRO'
|
||||
if not 'features' in kw:
|
||||
if self.env.CXX:
|
||||
kw['features'] = ['cxx']
|
||||
else:
|
||||
kw['features'] = ['c']
|
||||
|
||||
for x in INLINE_VALUES:
|
||||
kw['fragment'] = INLINE_CODE % (x, x)
|
||||
|
||||
try:
|
||||
self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
continue
|
||||
else:
|
||||
self.end_msg(x)
|
||||
if x != 'inline':
|
||||
self.define('inline', x, quote=False)
|
||||
return x
|
||||
self.fatal('could not use inline functions')
|
||||
|
||||
########################################################################################
|
||||
|
||||
LARGE_FRAGMENT = '''#include <unistd.h>
|
||||
int main(int argc, char **argv) {
|
||||
(void)argc; (void)argv;
|
||||
return !(sizeof(off_t) >= 8);
|
||||
}
|
||||
'''
|
||||
|
||||
@conf
|
||||
def check_large_file(self, **kw):
|
||||
"""
|
||||
Check for large file support and define the macro HAVE_LARGEFILE
|
||||
The test is skipped on win32 systems (DEST_BINFMT == pe).
|
||||
|
||||
:param define_name: define to set, by default *HAVE_LARGEFILE*
|
||||
:type define_name: string
|
||||
:param execute: execute the test (yes by default)
|
||||
:type execute: bool
|
||||
"""
|
||||
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = 'HAVE_LARGEFILE'
|
||||
if not 'execute' in kw:
|
||||
kw['execute'] = True
|
||||
|
||||
if not 'features' in kw:
|
||||
if self.env.CXX:
|
||||
kw['features'] = ['cxx', 'cxxprogram']
|
||||
else:
|
||||
kw['features'] = ['c', 'cprogram']
|
||||
|
||||
kw['fragment'] = LARGE_FRAGMENT
|
||||
|
||||
kw['msg'] = 'Checking for large file support'
|
||||
ret = True
|
||||
try:
|
||||
if self.env.DEST_BINFMT != 'pe':
|
||||
ret = self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
if ret:
|
||||
return True
|
||||
|
||||
kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
|
||||
kw['defines'] = ['_FILE_OFFSET_BITS=64']
|
||||
try:
|
||||
ret = self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
self.define('_FILE_OFFSET_BITS', 64)
|
||||
return ret
|
||||
|
||||
self.fatal('There is no support for large files')
|
||||
|
||||
########################################################################################
|
||||
|
||||
ENDIAN_FRAGMENT = '''
|
||||
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
|
||||
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
|
||||
int use_ascii (int i) {
|
||||
return ascii_mm[i] + ascii_ii[i];
|
||||
}
|
||||
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
|
||||
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
|
||||
int use_ebcdic (int i) {
|
||||
return ebcdic_mm[i] + ebcdic_ii[i];
|
||||
}
|
||||
extern int foo;
|
||||
'''
|
||||
|
||||
class grep_for_endianness(Task.Task):
|
||||
color = 'PINK'
|
||||
def run(self):
|
||||
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
|
||||
if txt.find('LiTTleEnDian') > -1:
|
||||
self.generator.tmp.append('little')
|
||||
elif txt.find('BIGenDianSyS') > -1:
|
||||
self.generator.tmp.append('big')
|
||||
else:
|
||||
return -1
|
||||
|
||||
@feature('grep_for_endianness')
|
||||
@after_method('process_source')
|
||||
def grep_for_endianness_fun(self):
|
||||
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
|
||||
|
||||
@conf
|
||||
def check_endianness(self):
|
||||
"""
|
||||
Execute a configuration test to determine the endianness
|
||||
"""
|
||||
tmp = []
|
||||
def check_msg(self):
|
||||
return tmp[0]
|
||||
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
|
||||
return tmp[0]
|
||||
|
653
third_party/waf/waf-light/waflib/Tools/ccroot.py
vendored
Normal file
653
third_party/waf/waf-light/waflib/Tools/ccroot.py
vendored
Normal file
|
@ -0,0 +1,653 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2010 (ita)
|
||||
|
||||
"""
|
||||
Classes and methods shared by tools providing support for C-like language such
|
||||
as C/C++/D/Assembly/Go (this support module is almost never used alone).
|
||||
"""
|
||||
|
||||
import os, re
|
||||
from waflib import Task, Utils, Node, Errors
|
||||
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
|
||||
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
|
||||
from waflib.Configure import conf
|
||||
|
||||
SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']
|
||||
|
||||
USELIB_VARS = Utils.defaultdict(set)
|
||||
"""
|
||||
Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
|
||||
"""
|
||||
|
||||
USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
|
||||
USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
|
||||
USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS'])
|
||||
USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
|
||||
|
||||
USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
|
||||
USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
|
||||
USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS'])
|
||||
|
||||
USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS'])
|
||||
|
||||
USELIB_VARS['asm'] = set(['ASFLAGS'])
|
||||
|
||||
# =================================================================================================
|
||||
|
||||
@taskgen_method
|
||||
def create_compiled_task(self, name, node):
|
||||
"""
|
||||
Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
|
||||
The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
|
||||
|
||||
:param name: name of the task class
|
||||
:type name: string
|
||||
:param node: the file to compile
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
:return: The task created
|
||||
:rtype: :py:class:`waflib.Task.Task`
|
||||
"""
|
||||
out = '%s.%d.o' % (node.name, self.idx)
|
||||
task = self.create_task(name, node, node.parent.find_or_declare(out))
|
||||
try:
|
||||
self.compiled_tasks.append(task)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [task]
|
||||
return task
|
||||
|
||||
@taskgen_method
|
||||
def to_incnodes(self, inlst):
|
||||
"""
|
||||
Task generator method provided to convert a list of string/nodes into a list of includes folders.
|
||||
|
||||
The paths are assumed to be relative to the task generator path, except if they begin by **#**
|
||||
in which case they are searched from the top-level directory (``bld.srcnode``).
|
||||
The folders are simply assumed to be existing.
|
||||
|
||||
The node objects in the list are returned in the output list. The strings are converted
|
||||
into node objects if possible. The node is searched from the source directory, and if a match is found,
|
||||
the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
|
||||
|
||||
:param inlst: list of folders
|
||||
:type inlst: space-delimited string or a list of string/nodes
|
||||
:rtype: list of :py:class:`waflib.Node.Node`
|
||||
:return: list of include folders as nodes
|
||||
"""
|
||||
lst = []
|
||||
seen = set([])
|
||||
for x in self.to_list(inlst):
|
||||
if x in seen or not x:
|
||||
continue
|
||||
seen.add(x)
|
||||
|
||||
if isinstance(x, Node.Node):
|
||||
lst.append(x)
|
||||
else:
|
||||
if os.path.isabs(x):
|
||||
lst.append(self.bld.root.make_node(x) or x)
|
||||
else:
|
||||
if x[0] == '#':
|
||||
p = self.bld.bldnode.make_node(x[1:])
|
||||
v = self.bld.srcnode.make_node(x[1:])
|
||||
else:
|
||||
p = self.path.get_bld().make_node(x)
|
||||
v = self.path.make_node(x)
|
||||
if p.is_child_of(self.bld.bldnode):
|
||||
p.mkdir()
|
||||
lst.append(p)
|
||||
lst.append(v)
|
||||
return lst
|
||||
|
||||
@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
|
||||
@after_method('propagate_uselib_vars', 'process_source')
|
||||
def apply_incpaths(self):
|
||||
"""
|
||||
Task generator method that processes the attribute *includes*::
|
||||
|
||||
tg = bld(features='includes', includes='.')
|
||||
|
||||
The folders only need to be relative to the current directory, the equivalent build directory is
|
||||
added automatically (for headers created in the build directory). This enable using a build directory
|
||||
or not (``top == out``).
|
||||
|
||||
This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
|
||||
and the list of include paths in ``tg.env.INCLUDES``.
|
||||
"""
|
||||
|
||||
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
|
||||
self.includes_nodes = lst
|
||||
self.env['INCPATHS'] = [x.abspath() for x in lst]
|
||||
|
||||
class link_task(Task.Task):
|
||||
"""
|
||||
Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
|
||||
|
||||
.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
|
||||
"""
|
||||
color = 'YELLOW'
|
||||
|
||||
inst_to = None
|
||||
"""Default installation path for the link task outputs, or None to disable"""
|
||||
|
||||
chmod = Utils.O755
|
||||
"""Default installation mode for the link task outputs"""
|
||||
|
||||
def add_target(self, target):
|
||||
"""
|
||||
Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
|
||||
The settings are retrieved from ``env.clsname_PATTERN``
|
||||
"""
|
||||
if isinstance(target, str):
|
||||
pattern = self.env[self.__class__.__name__ + '_PATTERN']
|
||||
if not pattern:
|
||||
pattern = '%s'
|
||||
folder, name = os.path.split(target)
|
||||
|
||||
if self.__class__.__name__.find('shlib') > 0:
|
||||
if self.env.DEST_BINFMT == 'pe' and getattr(self.generator, 'vnum', None):
|
||||
# include the version in the dll file name,
|
||||
# the import lib file name stays unversionned.
|
||||
name = name + '-' + self.generator.vnum.split('.')[0]
|
||||
|
||||
tmp = folder + os.sep + pattern % name
|
||||
target = self.generator.path.find_or_declare(tmp)
|
||||
self.set_outputs(target)
|
||||
|
||||
class stlink_task(link_task):
|
||||
"""
|
||||
Base for static link tasks, which use *ar* most of the time.
|
||||
The target is always removed before being written.
|
||||
"""
|
||||
run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
|
||||
|
||||
def rm_tgt(cls):
|
||||
old = cls.run
|
||||
def wrap(self):
|
||||
try: os.remove(self.outputs[0].abspath())
|
||||
except OSError: pass
|
||||
return old(self)
|
||||
setattr(cls, 'run', wrap)
|
||||
rm_tgt(stlink_task)
|
||||
|
||||
@feature('c', 'cxx', 'd', 'fc', 'asm')
|
||||
@after_method('process_source')
|
||||
def apply_link(self):
|
||||
"""
|
||||
Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
|
||||
use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
|
||||
matching a name from the attribute *features*, for example::
|
||||
|
||||
def build(bld):
|
||||
tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
|
||||
|
||||
will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
|
||||
"""
|
||||
|
||||
for x in self.features:
|
||||
if x == 'cprogram' and 'cxx' in self.features: # limited compat
|
||||
x = 'cxxprogram'
|
||||
elif x == 'cshlib' and 'cxx' in self.features:
|
||||
x = 'cxxshlib'
|
||||
|
||||
if x in Task.classes:
|
||||
if issubclass(Task.classes[x], link_task):
|
||||
link = x
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
|
||||
self.link_task = self.create_task(link, objs)
|
||||
self.link_task.add_target(self.target)
|
||||
|
||||
# remember that the install paths are given by the task generators
|
||||
try:
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
inst_to = self.link_task.__class__.inst_to
|
||||
if inst_to:
|
||||
# install a copy of the node list we have at this moment (implib not added)
|
||||
self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod)
|
||||
|
||||
@taskgen_method
|
||||
def use_rec(self, name, **kw):
|
||||
"""
|
||||
Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
|
||||
"""
|
||||
|
||||
if name in self.tmp_use_not or name in self.tmp_use_seen:
|
||||
return
|
||||
|
||||
try:
|
||||
y = self.bld.get_tgen_by_name(name)
|
||||
except Errors.WafError:
|
||||
self.uselib.append(name)
|
||||
self.tmp_use_not.add(name)
|
||||
return
|
||||
|
||||
self.tmp_use_seen.append(name)
|
||||
y.post()
|
||||
|
||||
# bind temporary attributes on the task generator
|
||||
y.tmp_use_objects = objects = kw.get('objects', True)
|
||||
y.tmp_use_stlib = stlib = kw.get('stlib', True)
|
||||
try:
|
||||
link_task = y.link_task
|
||||
except AttributeError:
|
||||
y.tmp_use_var = ''
|
||||
else:
|
||||
objects = False
|
||||
if not isinstance(link_task, stlink_task):
|
||||
stlib = False
|
||||
y.tmp_use_var = 'LIB'
|
||||
else:
|
||||
y.tmp_use_var = 'STLIB'
|
||||
|
||||
p = self.tmp_use_prec
|
||||
for x in self.to_list(getattr(y, 'use', [])):
|
||||
try:
|
||||
p[x].append(name)
|
||||
except KeyError:
|
||||
p[x] = [name]
|
||||
self.use_rec(x, objects=objects, stlib=stlib)
|
||||
|
||||
@feature('c', 'cxx', 'd', 'use', 'fc')
|
||||
@before_method('apply_incpaths', 'propagate_uselib_vars')
|
||||
@after_method('apply_link', 'process_source')
|
||||
def process_use(self):
|
||||
"""
|
||||
Process the ``use`` attribute which contains a list of task generator names::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='lib1')
|
||||
bld.program(source='main.c', target='app', use='lib1')
|
||||
|
||||
See :py:func:`waflib.Tools.ccroot.use_rec`.
|
||||
"""
|
||||
|
||||
use_not = self.tmp_use_not = set([])
|
||||
self.tmp_use_seen = [] # we would like an ordered set
|
||||
use_prec = self.tmp_use_prec = {}
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
self.includes = self.to_list(getattr(self, 'includes', []))
|
||||
names = self.to_list(getattr(self, 'use', []))
|
||||
|
||||
for x in names:
|
||||
self.use_rec(x)
|
||||
|
||||
for x in use_not:
|
||||
if x in use_prec:
|
||||
del use_prec[x]
|
||||
|
||||
# topological sort
|
||||
out = []
|
||||
tmp = []
|
||||
for x in self.tmp_use_seen:
|
||||
for k in use_prec.values():
|
||||
if x in k:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
|
||||
while tmp:
|
||||
e = tmp.pop()
|
||||
out.append(e)
|
||||
try:
|
||||
nlst = use_prec[e]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del use_prec[e]
|
||||
for x in nlst:
|
||||
for y in use_prec:
|
||||
if x in use_prec[y]:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
if use_prec:
|
||||
raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
|
||||
out.reverse()
|
||||
|
||||
link_task = getattr(self, 'link_task', None)
|
||||
for x in out:
|
||||
y = self.bld.get_tgen_by_name(x)
|
||||
var = y.tmp_use_var
|
||||
if var and link_task:
|
||||
if var == 'LIB' or y.tmp_use_stlib:
|
||||
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
|
||||
self.link_task.dep_nodes.extend(y.link_task.outputs)
|
||||
tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
|
||||
self.env.append_value(var + 'PATH', [tmp_path])
|
||||
else:
|
||||
if y.tmp_use_objects:
|
||||
self.add_objects_from_tgen(y)
|
||||
|
||||
if getattr(y, 'export_includes', None):
|
||||
self.includes.extend(y.to_incnodes(y.export_includes))
|
||||
|
||||
if getattr(y, 'export_defines', None):
|
||||
self.env.append_value('DEFINES', self.to_list(y.export_defines))
|
||||
|
||||
|
||||
# and finally, add the uselib variables (no recursion needed)
|
||||
for x in names:
|
||||
try:
|
||||
y = self.bld.get_tgen_by_name(x)
|
||||
except Exception:
|
||||
if not self.env['STLIB_' + x] and not x in self.uselib:
|
||||
self.uselib.append(x)
|
||||
else:
|
||||
for k in self.to_list(getattr(y, 'uselib', [])):
|
||||
if not self.env['STLIB_' + k] and not k in self.uselib:
|
||||
self.uselib.append(k)
|
||||
|
||||
@taskgen_method
|
||||
def accept_node_to_link(self, node):
|
||||
"""
|
||||
PRIVATE INTERNAL USE ONLY
|
||||
"""
|
||||
return not node.name.endswith('.pdb')
|
||||
|
||||
@taskgen_method
|
||||
def add_objects_from_tgen(self, tg):
|
||||
"""
|
||||
Add the objects from the depending compiled tasks as link task inputs.
|
||||
|
||||
Some objects are filtered: for instance, .pdb files are added
|
||||
to the compiled tasks but not to the link tasks (to avoid errors)
|
||||
PRIVATE INTERNAL USE ONLY
|
||||
"""
|
||||
try:
|
||||
link_task = self.link_task
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
for tsk in getattr(tg, 'compiled_tasks', []):
|
||||
for x in tsk.outputs:
|
||||
if self.accept_node_to_link(x):
|
||||
link_task.inputs.append(x)
|
||||
|
||||
@taskgen_method
|
||||
def get_uselib_vars(self):
|
||||
"""
|
||||
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
|
||||
:rtype: list of string
|
||||
"""
|
||||
_vars = set([])
|
||||
for x in self.features:
|
||||
if x in USELIB_VARS:
|
||||
_vars |= USELIB_VARS[x]
|
||||
return _vars
|
||||
|
||||
@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm')
|
||||
@after_method('process_use')
|
||||
def propagate_uselib_vars(self):
|
||||
"""
|
||||
Process uselib variables for adding flags. For example, the following target::
|
||||
|
||||
def build(bld):
|
||||
bld.env.AFLAGS_aaa = ['bar']
|
||||
from waflib.Tools.ccroot import USELIB_VARS
|
||||
USELIB_VARS['aaa'] = set('AFLAGS')
|
||||
|
||||
tg = bld(features='aaa', aflags='test')
|
||||
|
||||
The *aflags* attribute will be processed and this method will set::
|
||||
|
||||
tg.env.AFLAGS = ['bar', 'test']
|
||||
"""
|
||||
_vars = self.get_uselib_vars()
|
||||
env = self.env
|
||||
|
||||
for x in _vars:
|
||||
y = x.lower()
|
||||
env.append_unique(x, self.to_list(getattr(self, y, [])))
|
||||
|
||||
for x in self.features:
|
||||
for var in _vars:
|
||||
compvar = '%s_%s' % (var, x)
|
||||
env.append_value(var, env[compvar])
|
||||
|
||||
for x in self.to_list(getattr(self, 'uselib', [])):
|
||||
for v in _vars:
|
||||
env.append_value(v, env[v + '_' + x])
|
||||
|
||||
# ============ the code above must not know anything about import libs ==========
|
||||
|
||||
@feature('cshlib', 'cxxshlib', 'fcshlib')
|
||||
@after_method('apply_link')
|
||||
def apply_implib(self):
|
||||
"""
|
||||
Handle dlls and their import libs on Windows-like systems.
|
||||
|
||||
A ``.dll.a`` file called *import library* is generated.
|
||||
It must be installed as it is required for linking the library.
|
||||
"""
|
||||
if not self.env.DEST_BINFMT == 'pe':
|
||||
return
|
||||
|
||||
dll = self.link_task.outputs[0]
|
||||
if isinstance(self.target, Node.Node):
|
||||
name = self.target.name
|
||||
else:
|
||||
name = os.path.split(self.target)[1]
|
||||
implib = self.env['implib_PATTERN'] % name
|
||||
implib = dll.parent.find_or_declare(implib)
|
||||
self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
|
||||
self.link_task.outputs.append(implib)
|
||||
|
||||
if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
|
||||
node = self.path.find_resource(self.defs)
|
||||
if not node:
|
||||
raise Errors.WafError('invalid def file %r' % self.defs)
|
||||
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
|
||||
self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
|
||||
self.link_task.dep_nodes.append(node)
|
||||
else:
|
||||
#gcc for windows takes *.def file a an input without any special flag
|
||||
self.link_task.inputs.append(node)
|
||||
|
||||
try:
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
inst_to = self.link_task.__class__.inst_to
|
||||
if not inst_to:
|
||||
return
|
||||
|
||||
self.implib_install_task = self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
|
||||
|
||||
# ============ the code above must not know anything about vnum processing on unix platforms =========
|
||||
|
||||
re_vnum = re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$')
|
||||
@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
|
||||
@after_method('apply_link', 'propagate_uselib_vars')
|
||||
def apply_vnum(self):
|
||||
"""
|
||||
Enforce version numbering on shared libraries. The valid version numbers must have at most two dots::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', vnum='14.15.16')
|
||||
|
||||
In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created:
|
||||
|
||||
* ``libfoo.so → libfoo.so.1.2.3``
|
||||
* ``libfoo.so.1 → libfoo.so.1.2.3``
|
||||
"""
|
||||
if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
|
||||
return
|
||||
|
||||
link = self.link_task
|
||||
if not re_vnum.match(self.vnum):
|
||||
raise Errors.WafError('Invalid version %r for %r' % (self.vnum, self))
|
||||
nums = self.vnum.split('.')
|
||||
node = link.outputs[0]
|
||||
|
||||
libname = node.name
|
||||
if libname.endswith('.dylib'):
|
||||
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
|
||||
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
|
||||
else:
|
||||
name3 = libname + '.' + self.vnum
|
||||
name2 = libname + '.' + nums[0]
|
||||
|
||||
# add the so name for the ld linker - to disable, just unset env.SONAME_ST
|
||||
if self.env.SONAME_ST:
|
||||
v = self.env.SONAME_ST % name2
|
||||
self.env.append_value('LINKFLAGS', v.split())
|
||||
|
||||
# the following task is just to enable execution from the build dir :-/
|
||||
self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
|
||||
|
||||
if getattr(self, 'install_task', None):
|
||||
self.install_task.hasrun = Task.SKIP_ME
|
||||
bld = self.bld
|
||||
path = self.install_task.dest
|
||||
t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
|
||||
t2 = bld.symlink_as(path + os.sep + name2, name3)
|
||||
t3 = bld.symlink_as(path + os.sep + libname, name3)
|
||||
self.vnum_install_task = (t1, t2, t3)
|
||||
|
||||
if '-dynamiclib' in self.env['LINKFLAGS']:
|
||||
# this requires after(propagate_uselib_vars)
|
||||
try:
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
inst_to = self.link_task.__class__.inst_to
|
||||
if inst_to:
|
||||
p = Utils.subst_vars(inst_to, self.env)
|
||||
path = os.path.join(p, self.link_task.outputs[0].name)
|
||||
self.env.append_value('LINKFLAGS', ['-install_name', path])
|
||||
|
||||
class vnum(Task.Task):
|
||||
"""
|
||||
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
|
||||
"""
|
||||
color = 'CYAN'
|
||||
quient = True
|
||||
ext_in = ['.bin']
|
||||
def run(self):
|
||||
for x in self.outputs:
|
||||
path = x.abspath()
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.symlink(self.inputs[0].name, path)
|
||||
except OSError:
|
||||
return 1
|
||||
|
||||
class fake_shlib(link_task):
|
||||
"""
|
||||
Task used for reading a system library and adding the dependency on it
|
||||
"""
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
|
||||
for x in self.outputs:
|
||||
x.sig = Utils.h_file(x.abspath())
|
||||
return Task.SKIP_ME
|
||||
|
||||
class fake_stlib(stlink_task):
|
||||
"""
|
||||
Task used for reading a system library and adding the dependency on it
|
||||
"""
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
|
||||
for x in self.outputs:
|
||||
x.sig = Utils.h_file(x.abspath())
|
||||
return Task.SKIP_ME
|
||||
|
||||
@conf
|
||||
def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]):
|
||||
"""
|
||||
Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
|
||||
|
||||
def build(bld):
|
||||
bld.read_shlib('m')
|
||||
bld.program(source='main.c', use='m')
|
||||
"""
|
||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines)
|
||||
|
||||
@conf
|
||||
def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]):
|
||||
"""
|
||||
Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
|
||||
"""
|
||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines)
|
||||
|
||||
lib_patterns = {
|
||||
'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'],
|
||||
'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
|
||||
}
|
||||
|
||||
@feature('fake_lib')
|
||||
def process_lib(self):
|
||||
"""
|
||||
Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
|
||||
"""
|
||||
node = None
|
||||
|
||||
names = [x % self.name for x in lib_patterns[self.lib_type]]
|
||||
for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS:
|
||||
if not isinstance(x, Node.Node):
|
||||
x = self.bld.root.find_node(x) or self.path.find_node(x)
|
||||
if not x:
|
||||
continue
|
||||
|
||||
for y in names:
|
||||
node = x.find_node(y)
|
||||
if node:
|
||||
node.sig = Utils.h_file(node.abspath())
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break
|
||||
else:
|
||||
raise Errors.WafError('could not find library %r' % self.name)
|
||||
self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
|
||||
self.target = self.name
|
||||
|
||||
|
||||
class fake_o(Task.Task):
|
||||
def runnable_status(self):
|
||||
return Task.SKIP_ME
|
||||
|
||||
@extension('.o', '.obj')
|
||||
def add_those_o_files(self, node):
|
||||
tsk = self.create_task('fake_o', [], node)
|
||||
try:
|
||||
self.compiled_tasks.append(tsk)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [tsk]
|
||||
|
||||
@feature('fake_obj')
|
||||
@before_method('process_source')
|
||||
def process_objs(self):
|
||||
"""
|
||||
Puts object files in the task generator outputs
|
||||
"""
|
||||
for node in self.to_nodes(self.source):
|
||||
self.add_those_o_files(node)
|
||||
self.source = []
|
||||
|
||||
@conf
|
||||
def read_object(self, obj):
|
||||
"""
|
||||
Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes.
|
||||
|
||||
:param obj: object file path, as string or Node
|
||||
"""
|
||||
if not isinstance(obj, self.path.__class__):
|
||||
obj = self.path.find_resource(obj)
|
||||
return self(features='fake_obj', source=obj, name=obj.name)
|
||||
|
98
third_party/waf/waf-light/waflib/Tools/compiler_c.py
vendored
Normal file
98
third_party/waf/waf-light/waflib/Tools/compiler_c.py
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
|
||||
|
||||
"""
|
||||
Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc)::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c')
|
||||
def configure(cnf):
|
||||
cnf.load('compiler_c')
|
||||
def build(bld):
|
||||
bld.program(source='main.c', target='app')
|
||||
|
||||
The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register
|
||||
a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c')
|
||||
def configure(cnf):
|
||||
from waflib.Tools.compiler_c import c_compiler
|
||||
c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
|
||||
cnf.load('compiler_c')
|
||||
def build(bld):
|
||||
bld.program(source='main.c', target='app')
|
||||
|
||||
Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
|
||||
|
||||
$ CC=clang waf configure
|
||||
"""
|
||||
|
||||
import os, sys, imp, types
|
||||
from waflib.Tools import ccroot
|
||||
from waflib import Utils, Configure
|
||||
from waflib.Logs import debug
|
||||
|
||||
c_compiler = {
|
||||
'win32': ['msvc', 'gcc'],
|
||||
'cygwin': ['gcc'],
|
||||
'darwin': ['gcc'],
|
||||
'aix': ['xlc', 'gcc'],
|
||||
'linux': ['gcc', 'icc'],
|
||||
'sunos': ['suncc', 'gcc'],
|
||||
'irix': ['gcc', 'irixcc'],
|
||||
'hpux': ['gcc'],
|
||||
'gnu': ['gcc'],
|
||||
'java': ['gcc', 'msvc', 'icc'],
|
||||
'default':['gcc'],
|
||||
}
|
||||
"""
|
||||
Dict mapping the platform names to waf tools finding specific compilers::
|
||||
|
||||
from waflib.Tools.compiler_c import c_compiler
|
||||
c_compiler['linux'] = ['gcc', 'icc', 'suncc']
|
||||
"""
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
|
||||
"""
|
||||
try: test_for_compiler = conf.options.check_c_compiler
|
||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')")
|
||||
for compiler in test_for_compiler.split():
|
||||
conf.env.stash()
|
||||
conf.start_msg('Checking for %r (c compiler)' % compiler)
|
||||
try:
|
||||
conf.load(compiler)
|
||||
except conf.errors.ConfigurationError as e:
|
||||
conf.env.revert()
|
||||
conf.end_msg(False)
|
||||
debug('compiler_c: %r' % e)
|
||||
else:
|
||||
if conf.env['CC']:
|
||||
conf.end_msg(conf.env.get_flat('CC'))
|
||||
conf.env['COMPILER_CC'] = compiler
|
||||
break
|
||||
conf.end_msg(False)
|
||||
else:
|
||||
conf.fatal('could not configure a c compiler!')
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Restrict the compiler detection from the command-line::
|
||||
|
||||
$ waf configure --check-c-compiler=gcc
|
||||
"""
|
||||
opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py'])
|
||||
global c_compiler
|
||||
build_platform = Utils.unversioned_sys_platform()
|
||||
possible_compiler_list = c_compiler[build_platform in c_compiler and build_platform or 'default']
|
||||
test_for_compiler = ' '.join(possible_compiler_list)
|
||||
cc_compiler_opts = opt.add_option_group("C Compiler Options")
|
||||
cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
|
||||
help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
|
||||
dest="check_c_compiler")
|
||||
for x in test_for_compiler.split():
|
||||
opt.load('%s' % x)
|
||||
|
102
third_party/waf/waf-light/waflib/Tools/compiler_cxx.py
vendored
Normal file
102
third_party/waf/waf-light/waflib/Tools/compiler_cxx.py
vendored
Normal file
|
@ -0,0 +1,102 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
|
||||
|
||||
"""
|
||||
Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc)::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_cxx')
|
||||
def configure(cnf):
|
||||
cnf.load('compiler_cxx')
|
||||
def build(bld):
|
||||
bld.program(source='main.cpp', target='app')
|
||||
|
||||
The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register
|
||||
a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_cxx')
|
||||
def configure(cnf):
|
||||
from waflib.Tools.compiler_cxx import cxx_compiler
|
||||
cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
|
||||
cnf.load('compiler_cxx')
|
||||
def build(bld):
|
||||
bld.program(source='main.c', target='app')
|
||||
|
||||
Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
|
||||
|
||||
$ CXX=clang waf configure
|
||||
"""
|
||||
|
||||
|
||||
import os, sys, imp, types
|
||||
from waflib.Tools import ccroot
|
||||
from waflib import Utils, Configure
|
||||
from waflib.Logs import debug
|
||||
|
||||
cxx_compiler = {
|
||||
'win32': ['msvc', 'g++'],
|
||||
'cygwin': ['g++'],
|
||||
'darwin': ['g++'],
|
||||
'aix': ['xlc++', 'g++'],
|
||||
'linux': ['g++', 'icpc'],
|
||||
'sunos': ['sunc++', 'g++'],
|
||||
'irix': ['g++'],
|
||||
'hpux': ['g++'],
|
||||
'gnu': ['g++'],
|
||||
'java': ['g++', 'msvc', 'icpc'],
|
||||
'default': ['g++']
|
||||
}
|
||||
"""
|
||||
Dict mapping the platform names to waf tools finding specific compilers::
|
||||
|
||||
from waflib.Tools.compiler_cxx import cxx_compiler
|
||||
cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx']
|
||||
"""
|
||||
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
|
||||
"""
|
||||
try: test_for_compiler = conf.options.check_cxx_compiler
|
||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')")
|
||||
|
||||
for compiler in test_for_compiler.split():
|
||||
conf.env.stash()
|
||||
conf.start_msg('Checking for %r (c++ compiler)' % compiler)
|
||||
try:
|
||||
conf.load(compiler)
|
||||
except conf.errors.ConfigurationError as e:
|
||||
conf.env.revert()
|
||||
conf.end_msg(False)
|
||||
debug('compiler_cxx: %r' % e)
|
||||
else:
|
||||
if conf.env['CXX']:
|
||||
conf.end_msg(conf.env.get_flat('CXX'))
|
||||
conf.env['COMPILER_CXX'] = compiler
|
||||
break
|
||||
conf.end_msg(False)
|
||||
else:
|
||||
conf.fatal('could not configure a c++ compiler!')
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Restrict the compiler detection from the command-line::
|
||||
|
||||
$ waf configure --check-cxx-compiler=gxx
|
||||
"""
|
||||
opt.load_special_tools('cxx_*.py')
|
||||
global cxx_compiler
|
||||
build_platform = Utils.unversioned_sys_platform()
|
||||
possible_compiler_list = cxx_compiler[build_platform in cxx_compiler and build_platform or 'default']
|
||||
test_for_compiler = ' '.join(possible_compiler_list)
|
||||
cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
|
||||
cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
|
||||
help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
|
||||
dest="check_cxx_compiler")
|
||||
|
||||
for x in test_for_compiler.split():
|
||||
opt.load('%s' % x)
|
||||
|
59
third_party/waf/waf-light/waflib/Tools/compiler_d.py
vendored
Normal file
59
third_party/waf/waf-light/waflib/Tools/compiler_d.py
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
# Thomas Nagy, 2010 (ita)
|
||||
|
||||
"""
|
||||
Try to detect a D compiler from the list of supported compilers::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_d')
|
||||
def configure(cnf):
|
||||
cnf.load('compiler_d')
|
||||
def build(bld):
|
||||
bld.program(source='main.d', target='app')
|
||||
|
||||
Only three D compilers are really present at the moment:
|
||||
|
||||
* gdc
|
||||
* dmd, the ldc compiler having a very similar command-line interface
|
||||
* ldc2
|
||||
"""
|
||||
|
||||
import os, sys, imp, types
|
||||
from waflib import Utils, Configure, Options, Logs
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Try to find a suitable D compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
|
||||
"""
|
||||
for compiler in conf.options.dcheck.split(','):
|
||||
conf.env.stash()
|
||||
conf.start_msg('Checking for %r (d compiler)' % compiler)
|
||||
try:
|
||||
conf.load(compiler)
|
||||
except conf.errors.ConfigurationError as e:
|
||||
conf.env.revert()
|
||||
conf.end_msg(False)
|
||||
Logs.debug('compiler_d: %r' % e)
|
||||
else:
|
||||
if conf.env.D:
|
||||
conf.end_msg(conf.env.get_flat('D'))
|
||||
conf.env['COMPILER_D'] = compiler
|
||||
break
|
||||
conf.end_msg(False)
|
||||
else:
|
||||
conf.fatal('no suitable d compiler was found')
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Restrict the compiler detection from the command-line::
|
||||
|
||||
$ waf configure --check-d-compiler=dmd
|
||||
"""
|
||||
d_compiler_opts = opt.add_option_group('D Compiler Options')
|
||||
d_compiler_opts.add_option('--check-d-compiler', default='gdc,dmd,ldc2', action='store',
|
||||
help='check for the compiler [Default:gdc,dmd,ldc2]', dest='dcheck')
|
||||
for d_compiler in ['gdc', 'dmd', 'ldc2']:
|
||||
opt.load('%s' % d_compiler)
|
||||
|
66
third_party/waf/waf-light/waflib/Tools/compiler_fc.py
vendored
Normal file
66
third_party/waf/waf-light/waflib/Tools/compiler_fc.py
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import os, sys, imp, types
|
||||
from waflib import Utils, Configure, Options, Logs, Errors
|
||||
from waflib.Tools import fc
|
||||
|
||||
fc_compiler = {
|
||||
'win32' : ['gfortran','ifort'],
|
||||
'darwin' : ['gfortran', 'g95', 'ifort'],
|
||||
'linux' : ['gfortran', 'g95', 'ifort'],
|
||||
'java' : ['gfortran', 'g95', 'ifort'],
|
||||
'default': ['gfortran'],
|
||||
'aix' : ['gfortran']
|
||||
}
|
||||
|
||||
def __list_possible_compiler(platform):
|
||||
try:
|
||||
return fc_compiler[platform]
|
||||
except KeyError:
|
||||
return fc_compiler["default"]
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
|
||||
"""
|
||||
try: test_for_compiler = conf.options.check_fc
|
||||
except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')")
|
||||
for compiler in test_for_compiler.split():
|
||||
conf.env.stash()
|
||||
conf.start_msg('Checking for %r (fortran compiler)' % compiler)
|
||||
try:
|
||||
conf.load(compiler)
|
||||
except conf.errors.ConfigurationError as e:
|
||||
conf.env.revert()
|
||||
conf.end_msg(False)
|
||||
Logs.debug('compiler_fortran: %r' % e)
|
||||
else:
|
||||
if conf.env['FC']:
|
||||
conf.end_msg(conf.env.get_flat('FC'))
|
||||
conf.env.COMPILER_FORTRAN = compiler
|
||||
break
|
||||
conf.end_msg(False)
|
||||
else:
|
||||
conf.fatal('could not configure a fortran compiler!')
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Restrict the compiler detection from the command-line::
|
||||
|
||||
$ waf configure --check-fortran-compiler=ifort
|
||||
"""
|
||||
opt.load_special_tools('fc_*.py')
|
||||
build_platform = Utils.unversioned_sys_platform()
|
||||
detected_platform = Options.platform
|
||||
possible_compiler_list = __list_possible_compiler(detected_platform)
|
||||
test_for_compiler = ' '.join(possible_compiler_list)
|
||||
fortran_compiler_opts = opt.add_option_group("Fortran Compiler Options")
|
||||
fortran_compiler_opts.add_option('--check-fortran-compiler',
|
||||
default="%s" % test_for_compiler,
|
||||
help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"' % (detected_platform, test_for_compiler),
|
||||
dest="check_fc")
|
||||
|
||||
for compiler in test_for_compiler.split():
|
||||
opt.load('%s' % compiler)
|
||||
|
222
third_party/waf/waf-light/waflib/Tools/cs.py
vendored
Normal file
222
third_party/waf/waf-light/waflib/Tools/cs.py
vendored
Normal file
|
@ -0,0 +1,222 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
C# support. A simple example::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('cs')
|
||||
def build(bld):
|
||||
bld(features='cs', source='main.cs', gen='foo')
|
||||
|
||||
Note that the configuration may compile C# snippets::
|
||||
|
||||
FRAG = '''
|
||||
namespace Moo {
|
||||
public class Test { public static int Main(string[] args) { return 0; } }
|
||||
}'''
|
||||
def configure(conf):
|
||||
conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
|
||||
bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
|
||||
"""
|
||||
|
||||
from waflib import Utils, Task, Options, Logs, Errors
|
||||
from waflib.TaskGen import before_method, after_method, feature
|
||||
from waflib.Tools import ccroot
|
||||
from waflib.Configure import conf
|
||||
import os, tempfile
|
||||
|
||||
ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
|
||||
ccroot.lib_patterns['csshlib'] = ['%s']
|
||||
|
||||
@feature('cs')
|
||||
@before_method('process_source')
|
||||
def apply_cs(self):
|
||||
"""
|
||||
Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
|
||||
"""
|
||||
cs_nodes = []
|
||||
no_nodes = []
|
||||
for x in self.to_nodes(self.source):
|
||||
if x.name.endswith('.cs'):
|
||||
cs_nodes.append(x)
|
||||
else:
|
||||
no_nodes.append(x)
|
||||
self.source = no_nodes
|
||||
|
||||
bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
|
||||
self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
|
||||
tsk.env.CSTYPE = '/target:%s' % bintype
|
||||
tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
|
||||
self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))
|
||||
|
||||
inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
|
||||
if inst_to:
|
||||
# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
|
||||
mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
|
||||
self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod)
|
||||
|
||||
@feature('cs')
|
||||
@after_method('apply_cs')
|
||||
def use_cs(self):
|
||||
"""
|
||||
C# applications honor the **use** keyword::
|
||||
|
||||
def build(bld):
|
||||
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
|
||||
bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
|
||||
"""
|
||||
names = self.to_list(getattr(self, 'use', []))
|
||||
get = self.bld.get_tgen_by_name
|
||||
for x in names:
|
||||
try:
|
||||
y = get(x)
|
||||
except Errors.WafError:
|
||||
self.env.append_value('CSFLAGS', '/reference:%s' % x)
|
||||
continue
|
||||
y.post()
|
||||
|
||||
tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
|
||||
if not tsk:
|
||||
self.bld.fatal('cs task has no link task for use %r' % self)
|
||||
self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
|
||||
self.cs_task.set_run_after(tsk) # order (redundant, the order is infered from the nodes inputs/outputs)
|
||||
self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
|
||||
|
||||
@feature('cs')
|
||||
@after_method('apply_cs', 'use_cs')
|
||||
def debug_cs(self):
|
||||
"""
|
||||
The C# targets may create .mdb or .pdb files::
|
||||
|
||||
def build(bld):
|
||||
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
|
||||
# csdebug is a value in [True, 'full', 'pdbonly']
|
||||
"""
|
||||
csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
|
||||
if not csdebug:
|
||||
return
|
||||
|
||||
node = self.cs_task.outputs[0]
|
||||
if self.env.CS_NAME == 'mono':
|
||||
out = node.parent.find_or_declare(node.name + '.mdb')
|
||||
else:
|
||||
out = node.change_ext('.pdb')
|
||||
self.cs_task.outputs.append(out)
|
||||
try:
|
||||
self.install_task.source.append(out)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if csdebug == 'pdbonly':
|
||||
val = ['/debug+', '/debug:pdbonly']
|
||||
elif csdebug == 'full':
|
||||
val = ['/debug+', '/debug:full']
|
||||
else:
|
||||
val = ['/debug-']
|
||||
self.env.append_value('CSFLAGS', val)
|
||||
|
||||
|
||||
class mcs(Task.Task):
|
||||
"""
|
||||
Compile C# files
|
||||
"""
|
||||
color = 'YELLOW'
|
||||
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
|
||||
|
||||
def exec_command(self, cmd, **kw):
|
||||
bld = self.generator.bld
|
||||
|
||||
try:
|
||||
if not kw.get('cwd', None):
|
||||
kw['cwd'] = bld.cwd
|
||||
except AttributeError:
|
||||
bld.cwd = kw['cwd'] = bld.variant_dir
|
||||
|
||||
try:
|
||||
tmp = None
|
||||
if isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
|
||||
program = cmd[0] #unquoted program name, otherwise exec_command will fail
|
||||
cmd = [self.quote_response_command(x) for x in cmd]
|
||||
(fd, tmp) = tempfile.mkstemp()
|
||||
os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
|
||||
os.close(fd)
|
||||
cmd = [program, '@' + tmp]
|
||||
# no return here, that's on purpose
|
||||
ret = self.generator.bld.exec_command(cmd, **kw)
|
||||
finally:
|
||||
if tmp:
|
||||
try:
|
||||
os.remove(tmp)
|
||||
except OSError:
|
||||
pass # anti-virus and indexers can keep the files open -_-
|
||||
return ret
|
||||
|
||||
def quote_response_command(self, flag):
|
||||
# /noconfig is not allowed when using response files
|
||||
if flag.lower() == '/noconfig':
|
||||
return ''
|
||||
|
||||
if flag.find(' ') > -1:
|
||||
for x in ('/r:', '/reference:', '/resource:', '/lib:', '/out:'):
|
||||
if flag.startswith(x):
|
||||
flag = '%s"%s"' % (x, flag[len(x):])
|
||||
break
|
||||
else:
|
||||
flag = '"%s"' % flag
|
||||
return flag
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
|
||||
"""
|
||||
csc = getattr(Options.options, 'cscbinary', None)
|
||||
if csc:
|
||||
conf.env.MCS = csc
|
||||
conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
|
||||
conf.env.ASS_ST = '/r:%s'
|
||||
conf.env.RES_ST = '/resource:%s'
|
||||
|
||||
conf.env.CS_NAME = 'csc'
|
||||
if str(conf.env.MCS).lower().find('mcs') > -1:
|
||||
conf.env.CS_NAME = 'mono'
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add a command-line option for the configuration::
|
||||
|
||||
$ waf configure --with-csc-binary=/foo/bar/mcs
|
||||
"""
|
||||
opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
|
||||
|
||||
class fake_csshlib(Task.Task):
|
||||
"""
|
||||
Task used for reading a foreign .net assembly and adding the dependency on it
|
||||
"""
|
||||
color = 'YELLOW'
|
||||
inst_to = None
|
||||
|
||||
def runnable_status(self):
|
||||
for x in self.outputs:
|
||||
x.sig = Utils.h_file(x.abspath())
|
||||
return Task.SKIP_ME
|
||||
|
||||
@conf
|
||||
def read_csshlib(self, name, paths=[]):
|
||||
"""
|
||||
Read a foreign .net assembly for the *use* system::
|
||||
|
||||
def build(bld):
|
||||
bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
|
||||
bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')
|
||||
|
||||
:param name: Name of the library
|
||||
:type name: string
|
||||
:param paths: Folders in which the library may be found
|
||||
:type paths: list of string
|
||||
:return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
|
||||
:rtype: :py:class:`waflib.TaskGen.task_gen`
|
||||
"""
|
||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
|
||||
|
40
third_party/waf/waf-light/waflib/Tools/cxx.py
vendored
Normal file
40
third_party/waf/waf-light/waflib/Tools/cxx.py
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2010 (ita)
|
||||
|
||||
"Base for c++ programs and libraries"
|
||||
|
||||
from waflib import TaskGen, Task, Utils
|
||||
from waflib.Tools import c_preproc
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
|
||||
@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
|
||||
def cxx_hook(self, node):
|
||||
"Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance"
|
||||
return self.create_compiled_task('cxx', node)
|
||||
|
||||
if not '.c' in TaskGen.task_gen.mappings:
|
||||
TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
|
||||
|
||||
class cxx(Task.Task):
|
||||
"Compile C++ files into object files"
|
||||
run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
|
||||
vars = ['CXXDEPS'] # unused variable to depend on, just in case
|
||||
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
|
||||
scan = c_preproc.scan
|
||||
|
||||
class cxxprogram(link_task):
|
||||
"Link object files into a c++ program"
|
||||
run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
|
||||
vars = ['LINKDEPS']
|
||||
ext_out = ['.bin']
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class cxxshlib(cxxprogram):
|
||||
"Link object files into a c++ shared library"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class cxxstlib(stlink_task):
|
||||
"Link object files into a c++ static library"
|
||||
pass # do not remove
|
||||
|
97
third_party/waf/waf-light/waflib/Tools/d.py
vendored
Normal file
97
third_party/waf/waf-light/waflib/Tools/d.py
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
# Thomas Nagy, 2007-2010 (ita)
|
||||
|
||||
from waflib import Utils, Task, Errors
|
||||
from waflib.TaskGen import taskgen_method, feature, extension
|
||||
from waflib.Tools import d_scan, d_config
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
|
||||
class d(Task.Task):
|
||||
"Compile a d file into an object file"
|
||||
color = 'GREEN'
|
||||
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
|
||||
scan = d_scan.scan
|
||||
|
||||
class d_with_header(d):
|
||||
"Compile a d file and generate a header"
|
||||
run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
|
||||
|
||||
class d_header(Task.Task):
|
||||
"Compile d headers"
|
||||
color = 'BLUE'
|
||||
run_str = '${D} ${D_HEADER} ${SRC}'
|
||||
|
||||
class dprogram(link_task):
|
||||
"Link object files into a d program"
|
||||
run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class dshlib(dprogram):
|
||||
"Link object files into a d shared library"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class dstlib(stlink_task):
|
||||
"Link object files into a d static library"
|
||||
pass # do not remove
|
||||
|
||||
@extension('.d', '.di', '.D')
|
||||
def d_hook(self, node):
|
||||
"""
|
||||
Compile *D* files. To get .di files as well as .o files, set the following::
|
||||
|
||||
def build(bld):
|
||||
bld.program(source='foo.d', target='app', generate_headers=True)
|
||||
|
||||
"""
|
||||
ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
|
||||
out = '%s.%d.%s' % (node.name, self.idx, ext)
|
||||
def create_compiled_task(self, name, node):
|
||||
task = self.create_task(name, node, node.parent.find_or_declare(out))
|
||||
try:
|
||||
self.compiled_tasks.append(task)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [task]
|
||||
return task
|
||||
|
||||
if getattr(self, 'generate_headers', None):
|
||||
tsk = create_compiled_task(self, 'd_with_header', node)
|
||||
tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
|
||||
else:
|
||||
tsk = create_compiled_task(self, 'd', node)
|
||||
return tsk
|
||||
|
||||
@taskgen_method
|
||||
def generate_header(self, filename):
|
||||
"""
|
||||
See feature request #104::
|
||||
|
||||
def build(bld):
|
||||
tg = bld.program(source='foo.d', target='app')
|
||||
tg.generate_header('blah.d')
|
||||
# is equivalent to:
|
||||
#tg = bld.program(source='foo.d', target='app', header_lst='blah.d')
|
||||
|
||||
:param filename: header to create
|
||||
:type filename: string
|
||||
"""
|
||||
try:
|
||||
self.header_lst.append([filename, self.install_path])
|
||||
except AttributeError:
|
||||
self.header_lst = [[filename, self.install_path]]
|
||||
|
||||
@feature('d')
|
||||
def process_header(self):
|
||||
"""
|
||||
Process the attribute 'header_lst' to create the d header compilation tasks::
|
||||
|
||||
def build(bld):
|
||||
bld.program(source='foo.d', target='app', header_lst='blah.d')
|
||||
"""
|
||||
for i in getattr(self, 'header_lst', []):
|
||||
node = self.path.find_resource(i[0])
|
||||
if not node:
|
||||
raise Errors.WafError('file %r not found on d obj' % i[0])
|
||||
self.create_task('d_header', node, node.change_ext('.di'))
|
||||
|
63
third_party/waf/waf-light/waflib/Tools/d_config.py
vendored
Normal file
63
third_party/waf/waf-light/waflib/Tools/d_config.py
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2010 (ita)
|
||||
|
||||
from waflib import Utils
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def d_platform_flags(self):
|
||||
"""
|
||||
Set the extensions dll/so for d programs and libraries
|
||||
"""
|
||||
v = self.env
|
||||
if not v.DEST_OS:
|
||||
v.DEST_OS = Utils.unversioned_sys_platform()
|
||||
binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
|
||||
if binfmt == 'pe':
|
||||
v['dprogram_PATTERN'] = '%s.exe'
|
||||
v['dshlib_PATTERN'] = 'lib%s.dll'
|
||||
v['dstlib_PATTERN'] = 'lib%s.a'
|
||||
elif binfmt == 'mac-o':
|
||||
v['dprogram_PATTERN'] = '%s'
|
||||
v['dshlib_PATTERN'] = 'lib%s.dylib'
|
||||
v['dstlib_PATTERN'] = 'lib%s.a'
|
||||
else:
|
||||
v['dprogram_PATTERN'] = '%s'
|
||||
v['dshlib_PATTERN'] = 'lib%s.so'
|
||||
v['dstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
DLIB = '''
|
||||
version(D_Version2) {
|
||||
import std.stdio;
|
||||
int main() {
|
||||
writefln("phobos2");
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
version(Tango) {
|
||||
import tango.stdc.stdio;
|
||||
int main() {
|
||||
printf("tango");
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
import std.stdio;
|
||||
int main() {
|
||||
writefln("phobos1");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
'''
|
||||
"""Detection string for the D standard library"""
|
||||
|
||||
@conf
|
||||
def check_dlibrary(self, execute=True):
|
||||
"""
|
||||
Detect the kind of standard library that comes with the compiler, will set conf.env.DLIBRARY to tango, phobos1 or phobos2.
|
||||
"""
|
||||
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
|
||||
if execute:
|
||||
self.env.DLIBRARY = ret.strip()
|
||||
|
209
third_party/waf/waf-light/waflib/Tools/d_scan.py
vendored
Normal file
209
third_party/waf/waf-light/waflib/Tools/d_scan.py
vendored
Normal file
|
@ -0,0 +1,209 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2010 (ita)
|
||||
|
||||
"""
|
||||
Provide a scanner for finding dependencies on d files
|
||||
"""
|
||||
|
||||
import re
|
||||
from waflib import Utils, Logs
|
||||
|
||||
def filter_comments(filename):
|
||||
"""
|
||||
:param filename: d file name
|
||||
:type filename: string
|
||||
:rtype: list
|
||||
:return: a list of characters
|
||||
"""
|
||||
txt = Utils.readf(filename)
|
||||
i = 0
|
||||
buf = []
|
||||
max = len(txt)
|
||||
begin = 0
|
||||
while i < max:
|
||||
c = txt[i]
|
||||
if c == '"' or c == "'": # skip a string or character literal
|
||||
buf.append(txt[begin:i])
|
||||
delim = c
|
||||
i += 1
|
||||
while i < max:
|
||||
c = txt[i]
|
||||
if c == delim: break
|
||||
elif c == '\\': # skip the character following backslash
|
||||
i += 1
|
||||
i += 1
|
||||
i += 1
|
||||
begin = i
|
||||
elif c == '/': # try to replace a comment with whitespace
|
||||
buf.append(txt[begin:i])
|
||||
i += 1
|
||||
if i == max: break
|
||||
c = txt[i]
|
||||
if c == '+': # eat nesting /+ +/ comment
|
||||
i += 1
|
||||
nesting = 1
|
||||
c = None
|
||||
while i < max:
|
||||
prev = c
|
||||
c = txt[i]
|
||||
if prev == '/' and c == '+':
|
||||
nesting += 1
|
||||
c = None
|
||||
elif prev == '+' and c == '/':
|
||||
nesting -= 1
|
||||
if nesting == 0: break
|
||||
c = None
|
||||
i += 1
|
||||
elif c == '*': # eat /* */ comment
|
||||
i += 1
|
||||
c = None
|
||||
while i < max:
|
||||
prev = c
|
||||
c = txt[i]
|
||||
if prev == '*' and c == '/': break
|
||||
i += 1
|
||||
elif c == '/': # eat // comment
|
||||
i += 1
|
||||
while i < max and txt[i] != '\n':
|
||||
i += 1
|
||||
else: # no comment
|
||||
begin = i - 1
|
||||
continue
|
||||
i += 1
|
||||
begin = i
|
||||
buf.append(' ')
|
||||
else:
|
||||
i += 1
|
||||
buf.append(txt[begin:])
|
||||
return buf
|
||||
|
||||
class d_parser(object):
|
||||
"""
|
||||
Parser for d files
|
||||
"""
|
||||
def __init__(self, env, incpaths):
|
||||
#self.code = ''
|
||||
#self.module = ''
|
||||
#self.imports = []
|
||||
|
||||
self.allnames = []
|
||||
|
||||
self.re_module = re.compile("module\s+([^;]+)")
|
||||
self.re_import = re.compile("import\s+([^;]+)")
|
||||
self.re_import_bindings = re.compile("([^:]+):(.*)")
|
||||
self.re_import_alias = re.compile("[^=]+=(.+)")
|
||||
|
||||
self.env = env
|
||||
|
||||
self.nodes = []
|
||||
self.names = []
|
||||
|
||||
self.incpaths = incpaths
|
||||
|
||||
def tryfind(self, filename):
|
||||
"""
|
||||
Search file a file matching an module/import directive
|
||||
|
||||
:param filename: file to read
|
||||
:type filename: string
|
||||
"""
|
||||
found = 0
|
||||
for n in self.incpaths:
|
||||
found = n.find_resource(filename.replace('.', '/') + '.d')
|
||||
if found:
|
||||
self.nodes.append(found)
|
||||
self.waiting.append(found)
|
||||
break
|
||||
if not found:
|
||||
if not filename in self.names:
|
||||
self.names.append(filename)
|
||||
|
||||
def get_strings(self, code):
|
||||
"""
|
||||
:param code: d code to parse
|
||||
:type code: string
|
||||
:return: the modules that the code uses
|
||||
:rtype: a list of match objects
|
||||
"""
|
||||
#self.imports = []
|
||||
self.module = ''
|
||||
lst = []
|
||||
|
||||
# get the module name (if present)
|
||||
|
||||
mod_name = self.re_module.search(code)
|
||||
if mod_name:
|
||||
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
|
||||
|
||||
# go through the code, have a look at all import occurrences
|
||||
|
||||
# first, lets look at anything beginning with "import" and ending with ";"
|
||||
import_iterator = self.re_import.finditer(code)
|
||||
if import_iterator:
|
||||
for import_match in import_iterator:
|
||||
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
|
||||
|
||||
# does this end with an import bindings declaration?
|
||||
# (import bindings always terminate the list of imports)
|
||||
bindings_match = self.re_import_bindings.match(import_match_str)
|
||||
if bindings_match:
|
||||
import_match_str = bindings_match.group(1)
|
||||
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
|
||||
|
||||
# split the matching string into a bunch of strings, separated by a comma
|
||||
matches = import_match_str.split(',')
|
||||
|
||||
for match in matches:
|
||||
alias_match = self.re_import_alias.match(match)
|
||||
if alias_match:
|
||||
# is this an alias declaration? (alias = module name) if so, extract the module name
|
||||
match = alias_match.group(1)
|
||||
|
||||
lst.append(match)
|
||||
return lst
|
||||
|
||||
def start(self, node):
|
||||
"""
|
||||
The parsing starts here
|
||||
|
||||
:param node: input file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
self.waiting = [node]
|
||||
# while the stack is not empty, add the dependencies
|
||||
while self.waiting:
|
||||
nd = self.waiting.pop(0)
|
||||
self.iter(nd)
|
||||
|
||||
def iter(self, node):
|
||||
"""
|
||||
Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files
|
||||
|
||||
:param node: input file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
path = node.abspath() # obtain the absolute path
|
||||
code = "".join(filter_comments(path)) # read the file and filter the comments
|
||||
names = self.get_strings(code) # obtain the import strings
|
||||
for x in names:
|
||||
# optimization
|
||||
if x in self.allnames: continue
|
||||
self.allnames.append(x)
|
||||
|
||||
# for each name, see if it is like a node or not
|
||||
self.tryfind(x)
|
||||
|
||||
def scan(self):
|
||||
"look for .d/.di used by a d file"
|
||||
env = self.env
|
||||
gruik = d_parser(env, self.generator.includes_nodes)
|
||||
node = self.inputs[0]
|
||||
gruik.start(node)
|
||||
nodes = gruik.nodes
|
||||
names = gruik.names
|
||||
|
||||
if Logs.verbose:
|
||||
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
|
||||
return (nodes, names)
|
||||
|
70
third_party/waf/waf-light/waflib/Tools/dbus.py
vendored
Normal file
70
third_party/waf/waf-light/waflib/Tools/dbus.py
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
"""
|
||||
Compile dbus files with **dbus-binding-tool**
|
||||
|
||||
Typical usage::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c dbus')
|
||||
def configure(conf):
|
||||
conf.load('compiler_c dbus')
|
||||
def build(bld):
|
||||
tg = bld.program(
|
||||
includes = '.',
|
||||
source = bld.path.ant_glob('*.c'),
|
||||
target = 'gnome-hello')
|
||||
tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
|
||||
"""
|
||||
|
||||
from waflib import Task, Errors
|
||||
from waflib.TaskGen import taskgen_method, before_method
|
||||
|
||||
@taskgen_method
|
||||
def add_dbus_file(self, filename, prefix, mode):
|
||||
"""
|
||||
Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
|
||||
|
||||
:param filename: xml file to compile
|
||||
:type filename: string
|
||||
:param prefix: dbus binding tool prefix (--prefix=prefix)
|
||||
:type prefix: string
|
||||
:param mode: dbus binding tool mode (--mode=mode)
|
||||
:type mode: string
|
||||
"""
|
||||
if not hasattr(self, 'dbus_lst'):
|
||||
self.dbus_lst = []
|
||||
if not 'process_dbus' in self.meths:
|
||||
self.meths.append('process_dbus')
|
||||
self.dbus_lst.append([filename, prefix, mode])
|
||||
|
||||
@before_method('apply_core')
|
||||
def process_dbus(self):
|
||||
"""
|
||||
Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
|
||||
"""
|
||||
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
|
||||
node = self.path.find_resource(filename)
|
||||
if not node:
|
||||
raise Errors.WafError('file not found ' + filename)
|
||||
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
|
||||
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
|
||||
tsk.env.DBUS_BINDING_TOOL_MODE = mode
|
||||
|
||||
class dbus_binding_tool(Task.Task):
|
||||
"""
|
||||
Compile a dbus file
|
||||
"""
|
||||
color = 'BLUE'
|
||||
ext_out = ['.h']
|
||||
run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
|
||||
shell = True # temporary workaround for #795
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL*
|
||||
"""
|
||||
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
|
||||
|
88
third_party/waf/waf-light/waflib/Tools/dmd.py
vendored
Normal file
88
third_party/waf/waf-light/waflib/Tools/dmd.py
vendored
Normal file
|
@ -0,0 +1,88 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
import sys
|
||||
from waflib.Tools import ar, d
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_dmd(conf):
|
||||
"""
|
||||
Find the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
|
||||
"""
|
||||
conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
|
||||
|
||||
# make sure that we're dealing with dmd1, dmd2, or ldc(1)
|
||||
out = conf.cmd_and_log([conf.env.D, '--help'])
|
||||
if out.find("D Compiler v") == -1:
|
||||
out = conf.cmd_and_log([conf.env.D, '-version'])
|
||||
if out.find("based on DMD v1.") == -1:
|
||||
conf.fatal("detected compiler is not dmd/ldc")
|
||||
|
||||
@conf
|
||||
def common_flags_ldc(conf):
|
||||
"""
|
||||
Set the D flags required by *ldc*
|
||||
"""
|
||||
v = conf.env
|
||||
v['DFLAGS'] = ['-d-version=Posix']
|
||||
v['LINKFLAGS'] = []
|
||||
v['DFLAGS_dshlib'] = ['-relocation-model=pic']
|
||||
|
||||
@conf
|
||||
def common_flags_dmd(conf):
|
||||
"""
|
||||
Set the flags required by *dmd* or *dmd2*
|
||||
"""
|
||||
|
||||
v = conf.env
|
||||
|
||||
# _DFLAGS _DIMPORTFLAGS
|
||||
|
||||
# Compiler is dmd so 'gdc' part will be ignored, just
|
||||
# ensure key is there, so wscript can append flags to it
|
||||
#v['DFLAGS'] = ['-version=Posix']
|
||||
|
||||
v['D_SRC_F'] = ['-c']
|
||||
v['D_TGT_F'] = '-of%s'
|
||||
|
||||
# linker
|
||||
v['D_LINKER'] = v['D']
|
||||
v['DLNK_SRC_F'] = ''
|
||||
v['DLNK_TGT_F'] = '-of%s'
|
||||
v['DINC_ST'] = '-I%s'
|
||||
|
||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
|
||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s'
|
||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s'
|
||||
|
||||
v['LINKFLAGS_dprogram']= ['-quiet']
|
||||
|
||||
v['DFLAGS_dshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_dshlib'] = ['-L-shared']
|
||||
|
||||
v['DHEADER_ext'] = '.di'
|
||||
v.DFLAGS_d_with_header = ['-H', '-Hf']
|
||||
v['D_HDR_F'] = '%s'
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Configuration for *dmd*, *dmd2*, and *ldc*
|
||||
"""
|
||||
conf.find_dmd()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
out = conf.cmd_and_log([conf.env.D, '--help'])
|
||||
if out.find("D Compiler v2.") > -1:
|
||||
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
|
||||
|
||||
conf.load('ar')
|
||||
conf.load('d')
|
||||
conf.common_flags_dmd()
|
||||
conf.d_platform_flags()
|
||||
|
||||
if str(conf.env.D).find('ldc') > -1:
|
||||
conf.common_flags_ldc()
|
||||
|
220
third_party/waf/waf-light/waflib/Tools/errcheck.py
vendored
Normal file
220
third_party/waf/waf-light/waflib/Tools/errcheck.py
vendored
Normal file
|
@ -0,0 +1,220 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2011 (ita)
|
||||
|
||||
"""
|
||||
errcheck: highlight common mistakes
|
||||
|
||||
There is a performance hit, so this tool is only loaded when running "waf -v"
|
||||
"""
|
||||
|
||||
typos = {
|
||||
'feature':'features',
|
||||
'sources':'source',
|
||||
'targets':'target',
|
||||
'include':'includes',
|
||||
'export_include':'export_includes',
|
||||
'define':'defines',
|
||||
'importpath':'includes',
|
||||
'installpath':'install_path',
|
||||
'iscopy':'is_copy',
|
||||
}
|
||||
|
||||
meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
|
||||
|
||||
from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
|
||||
import waflib.Tools.ccroot
|
||||
|
||||
def check_same_targets(self):
|
||||
mp = Utils.defaultdict(list)
|
||||
uids = {}
|
||||
|
||||
def check_task(tsk):
|
||||
if not isinstance(tsk, Task.Task):
|
||||
return
|
||||
|
||||
for node in tsk.outputs:
|
||||
mp[node].append(tsk)
|
||||
try:
|
||||
uids[tsk.uid()].append(tsk)
|
||||
except KeyError:
|
||||
uids[tsk.uid()] = [tsk]
|
||||
|
||||
for g in self.groups:
|
||||
for tg in g:
|
||||
try:
|
||||
for tsk in tg.tasks:
|
||||
check_task(tsk)
|
||||
except AttributeError:
|
||||
# raised if not a task generator, which should be uncommon
|
||||
check_task(tg)
|
||||
|
||||
dupe = False
|
||||
for (k, v) in mp.items():
|
||||
if len(v) > 1:
|
||||
dupe = True
|
||||
msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "")
|
||||
Logs.error(msg)
|
||||
for x in v:
|
||||
if Logs.verbose > 1:
|
||||
Logs.error(' %d. %r' % (1 + v.index(x), x.generator))
|
||||
else:
|
||||
Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)))
|
||||
|
||||
if not dupe:
|
||||
for (k, v) in uids.items():
|
||||
if len(v) > 1:
|
||||
Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid')
|
||||
for tsk in v:
|
||||
Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
|
||||
|
||||
def check_invalid_constraints(self):
|
||||
feat = set([])
|
||||
for x in list(TaskGen.feats.values()):
|
||||
feat.union(set(x))
|
||||
for (x, y) in TaskGen.task_gen.prec.items():
|
||||
feat.add(x)
|
||||
feat.union(set(y))
|
||||
ext = set([])
|
||||
for x in TaskGen.task_gen.mappings.values():
|
||||
ext.add(x.__name__)
|
||||
invalid = ext & feat
|
||||
if invalid:
|
||||
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid))
|
||||
|
||||
# the build scripts have been read, so we can check for invalid after/before attributes on task classes
|
||||
for cls in list(Task.classes.values()):
|
||||
for x in ('before', 'after'):
|
||||
for y in Utils.to_list(getattr(cls, x, [])):
|
||||
if not Task.classes.get(y, None):
|
||||
Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__))
|
||||
if getattr(cls, 'rule', None):
|
||||
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__)
|
||||
|
||||
def replace(m):
|
||||
"""
|
||||
We could add properties, but they would not work in some cases:
|
||||
bld.program(...) requires 'source' in the attributes
|
||||
"""
|
||||
oldcall = getattr(Build.BuildContext, m)
|
||||
def call(self, *k, **kw):
|
||||
ret = oldcall(self, *k, **kw)
|
||||
for x in typos:
|
||||
if x in kw:
|
||||
if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
|
||||
continue
|
||||
err = True
|
||||
Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret))
|
||||
return ret
|
||||
setattr(Build.BuildContext, m, call)
|
||||
|
||||
def enhance_lib():
|
||||
"""
|
||||
modify existing classes and methods
|
||||
"""
|
||||
for m in meths_typos:
|
||||
replace(m)
|
||||
|
||||
# catch '..' in ant_glob patterns
|
||||
def ant_glob(self, *k, **kw):
|
||||
if k:
|
||||
lst=Utils.to_list(k[0])
|
||||
for pat in lst:
|
||||
if '..' in pat.split('/'):
|
||||
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0])
|
||||
if kw.get('remove', True):
|
||||
try:
|
||||
if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
|
||||
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self)
|
||||
except AttributeError:
|
||||
pass
|
||||
return self.old_ant_glob(*k, **kw)
|
||||
Node.Node.old_ant_glob = Node.Node.ant_glob
|
||||
Node.Node.ant_glob = ant_glob
|
||||
|
||||
# catch conflicting ext_in/ext_out/before/after declarations
|
||||
old = Task.is_before
|
||||
def is_before(t1, t2):
|
||||
ret = old(t1, t2)
|
||||
if ret and old(t2, t1):
|
||||
Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2))
|
||||
return ret
|
||||
Task.is_before = is_before
|
||||
|
||||
# check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose
|
||||
# so we only issue a warning
|
||||
def check_err_features(self):
|
||||
lst = self.to_list(self.features)
|
||||
if 'shlib' in lst:
|
||||
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
|
||||
for x in ('c', 'cxx', 'd', 'fc'):
|
||||
if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
|
||||
Logs.error('%r features is probably missing %r' % (self, x))
|
||||
TaskGen.feature('*')(check_err_features)
|
||||
|
||||
# check for erroneous order constraints
|
||||
def check_err_order(self):
|
||||
if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
|
||||
for x in ('before', 'after', 'ext_in', 'ext_out'):
|
||||
if hasattr(self, x):
|
||||
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self))
|
||||
else:
|
||||
for x in ('before', 'after'):
|
||||
for y in self.to_list(getattr(self, x, [])):
|
||||
if not Task.classes.get(y, None):
|
||||
Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self))
|
||||
TaskGen.feature('*')(check_err_order)
|
||||
|
||||
# check for @extension used with @feature/@before_method/@after_method
|
||||
def check_compile(self):
|
||||
check_invalid_constraints(self)
|
||||
try:
|
||||
ret = self.orig_compile()
|
||||
finally:
|
||||
check_same_targets(self)
|
||||
return ret
|
||||
Build.BuildContext.orig_compile = Build.BuildContext.compile
|
||||
Build.BuildContext.compile = check_compile
|
||||
|
||||
# check for invalid build groups #914
|
||||
def use_rec(self, name, **kw):
|
||||
try:
|
||||
y = self.bld.get_tgen_by_name(name)
|
||||
except Errors.WafError:
|
||||
pass
|
||||
else:
|
||||
idx = self.bld.get_group_idx(self)
|
||||
odx = self.bld.get_group_idx(y)
|
||||
if odx > idx:
|
||||
msg = "Invalid 'use' across build groups:"
|
||||
if Logs.verbose > 1:
|
||||
msg += '\n target %r\n uses:\n %r' % (self, y)
|
||||
else:
|
||||
msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name)
|
||||
raise Errors.WafError(msg)
|
||||
self.orig_use_rec(name, **kw)
|
||||
TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec
|
||||
TaskGen.task_gen.use_rec = use_rec
|
||||
|
||||
# check for env.append
|
||||
def getattri(self, name, default=None):
|
||||
if name == 'append' or name == 'add':
|
||||
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
|
||||
elif name == 'prepend':
|
||||
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
|
||||
if name in self.__slots__:
|
||||
return object.__getattr__(self, name, default)
|
||||
else:
|
||||
return self[name]
|
||||
ConfigSet.ConfigSet.__getattr__ = getattri
|
||||
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add a few methods
|
||||
"""
|
||||
enhance_lib()
|
||||
|
||||
def configure(conf):
|
||||
pass
|
||||
|
199
third_party/waf/waf-light/waflib/Tools/fc.py
vendored
Normal file
199
third_party/waf/waf-light/waflib/Tools/fc.py
vendored
Normal file
|
@ -0,0 +1,199 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# DC 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
"""
|
||||
fortran support
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from waflib import Utils, Task, TaskGen, Logs
|
||||
from waflib.Tools import ccroot, fc_config, fc_scan
|
||||
from waflib.TaskGen import feature, before_method, after_method, extension
|
||||
from waflib.Configure import conf
|
||||
|
||||
ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES'])
|
||||
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
|
||||
|
||||
@feature('fcprogram', 'fcshlib', 'fcstlib', 'fcprogram_test')
|
||||
def dummy(self):
|
||||
pass
|
||||
|
||||
@extension('.f', '.f90', '.F', '.F90', '.for', '.FOR')
|
||||
def fc_hook(self, node):
|
||||
"Bind the typical Fortran file extensions to the creation of a :py:class:`waflib.Tools.fc.fc` instance"
|
||||
return self.create_compiled_task('fc', node)
|
||||
|
||||
@conf
|
||||
def modfile(conf, name):
|
||||
"""
|
||||
Turn a module name into the right module file name.
|
||||
Defaults to all lower case.
|
||||
"""
|
||||
return {'lower' :name.lower() + '.mod',
|
||||
'lower.MOD' :name.upper() + '.MOD',
|
||||
'UPPER.mod' :name.upper() + '.mod',
|
||||
'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
|
||||
|
||||
def get_fortran_tasks(tsk):
|
||||
"""
|
||||
Obtain all other fortran tasks from the same build group. Those tasks must not have
|
||||
the attribute 'nomod' or 'mod_fortran_done'
|
||||
"""
|
||||
bld = tsk.generator.bld
|
||||
tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
|
||||
return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]
|
||||
|
||||
class fc(Task.Task):
|
||||
"""
|
||||
The fortran tasks can only run when all fortran tasks in the current group are ready to be executed
|
||||
This may cause a deadlock if another fortran task is waiting for something that cannot happen (circular dependency)
|
||||
in this case, set the 'nomod=True' on those tasks instances to break the loop
|
||||
"""
|
||||
|
||||
color = 'GREEN'
|
||||
run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
|
||||
vars = ["FORTRANMODPATHFLAG"]
|
||||
|
||||
def scan(self):
|
||||
"""scanner for fortran dependencies"""
|
||||
tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
|
||||
tmp.task = self
|
||||
tmp.start(self.inputs[0])
|
||||
if Logs.verbose:
|
||||
Logs.debug('deps: deps for %r: %r; unresolved %r' % (self.inputs, tmp.nodes, tmp.names))
|
||||
return (tmp.nodes, tmp.names)
|
||||
|
||||
def runnable_status(self):
|
||||
"""
|
||||
Set the mod file outputs and the dependencies on the mod files over all the fortran tasks
|
||||
executed by the main thread so there are no concurrency issues
|
||||
"""
|
||||
if getattr(self, 'mod_fortran_done', None):
|
||||
return super(fc, self).runnable_status()
|
||||
|
||||
# now, if we reach this part it is because this fortran task is the first in the list
|
||||
bld = self.generator.bld
|
||||
|
||||
# obtain the fortran tasks
|
||||
lst = get_fortran_tasks(self)
|
||||
|
||||
# disable this method for other tasks
|
||||
for tsk in lst:
|
||||
tsk.mod_fortran_done = True
|
||||
|
||||
# wait for all the .f tasks to be ready for execution
|
||||
# and ensure that the scanners are called at least once
|
||||
for tsk in lst:
|
||||
ret = tsk.runnable_status()
|
||||
if ret == Task.ASK_LATER:
|
||||
# we have to wait for one of the other fortran tasks to be ready
|
||||
# this may deadlock if there are dependencies between the fortran tasks
|
||||
# but this should not happen (we are setting them here!)
|
||||
for x in lst:
|
||||
x.mod_fortran_done = None
|
||||
|
||||
# TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
|
||||
return Task.ASK_LATER
|
||||
|
||||
ins = Utils.defaultdict(set)
|
||||
outs = Utils.defaultdict(set)
|
||||
|
||||
# the .mod files to create
|
||||
for tsk in lst:
|
||||
key = tsk.uid()
|
||||
for x in bld.raw_deps[key]:
|
||||
if x.startswith('MOD@'):
|
||||
name = bld.modfile(x.replace('MOD@', ''))
|
||||
node = bld.srcnode.find_or_declare(name)
|
||||
tsk.set_outputs(node)
|
||||
outs[id(node)].add(tsk)
|
||||
|
||||
# the .mod files to use
|
||||
for tsk in lst:
|
||||
key = tsk.uid()
|
||||
for x in bld.raw_deps[key]:
|
||||
if x.startswith('USE@'):
|
||||
name = bld.modfile(x.replace('USE@', ''))
|
||||
node = bld.srcnode.find_resource(name)
|
||||
if node and node not in tsk.outputs:
|
||||
if not node in bld.node_deps[key]:
|
||||
bld.node_deps[key].append(node)
|
||||
ins[id(node)].add(tsk)
|
||||
|
||||
# if the intersection matches, set the order
|
||||
for k in ins.keys():
|
||||
for a in ins[k]:
|
||||
a.run_after.update(outs[k])
|
||||
|
||||
# the scanner cannot output nodes, so we have to set them
|
||||
# ourselves as task.dep_nodes (additional input nodes)
|
||||
tmp = []
|
||||
for t in outs[k]:
|
||||
tmp.extend(t.outputs)
|
||||
a.dep_nodes.extend(tmp)
|
||||
a.dep_nodes.sort(key=lambda x: x.abspath())
|
||||
|
||||
# the task objects have changed: clear the signature cache
|
||||
for tsk in lst:
|
||||
try:
|
||||
delattr(tsk, 'cache_sig')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return super(fc, self).runnable_status()
|
||||
|
||||
class fcprogram(ccroot.link_task):
|
||||
"""Link fortran programs"""
|
||||
color = 'YELLOW'
|
||||
run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}'
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class fcshlib(fcprogram):
|
||||
"""Link fortran libraries"""
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class fcprogram_test(fcprogram):
|
||||
"""Custom link task to obtain the compiler outputs for fortran configuration tests"""
|
||||
|
||||
def can_retrieve_cache(self):
|
||||
"""This task is always executed"""
|
||||
return False
|
||||
|
||||
def runnable_status(self):
|
||||
"""This task is always executed"""
|
||||
ret = super(fcprogram_test, self).runnable_status()
|
||||
if ret == Task.SKIP_ME:
|
||||
ret = Task.RUN_ME
|
||||
return ret
|
||||
|
||||
def exec_command(self, cmd, **kw):
|
||||
"""Store the compiler std our/err onto the build context, to bld.out + bld.err"""
|
||||
bld = self.generator.bld
|
||||
|
||||
kw['shell'] = isinstance(cmd, str)
|
||||
kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
|
||||
kw['cwd'] = bld.variant_dir
|
||||
bld.out = bld.err = ''
|
||||
|
||||
bld.to_log('command: %s\n' % cmd)
|
||||
|
||||
kw['output'] = 0
|
||||
try:
|
||||
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
|
||||
except Exception as e:
|
||||
return -1
|
||||
|
||||
if bld.out:
|
||||
bld.to_log("out: %s\n" % bld.out)
|
||||
if bld.err:
|
||||
bld.to_log("err: %s\n" % bld.err)
|
||||
|
||||
class fcstlib(ccroot.stlink_task):
|
||||
"""Link fortran static libraries (uses ar by default)"""
|
||||
pass # do not remove the pass statement
|
||||
|
468
third_party/waf/waf-light/waflib/Tools/fc_config.py
vendored
Normal file
468
third_party/waf/waf-light/waflib/Tools/fc_config.py
vendored
Normal file
|
@ -0,0 +1,468 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# DC 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
"""
|
||||
Fortran configuration helpers
|
||||
"""
|
||||
|
||||
import re, shutil, os, sys, string, shlex
|
||||
from waflib.Configure import conf
|
||||
from waflib.TaskGen import feature, after_method, before_method
|
||||
from waflib import Build, Utils
|
||||
|
||||
FC_FRAGMENT = ' program main\n end program main\n'
|
||||
FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these?
|
||||
|
||||
@conf
|
||||
def fc_flags(conf):
|
||||
"""
|
||||
Define common fortran configuration flags and file extensions
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['FC_SRC_F'] = []
|
||||
v['FC_TGT_F'] = ['-c', '-o']
|
||||
v['FCINCPATH_ST'] = '-I%s'
|
||||
v['FCDEFINES_ST'] = '-D%s'
|
||||
|
||||
if not v['LINK_FC']: v['LINK_FC'] = v['FC']
|
||||
v['FCLNK_SRC_F'] = []
|
||||
v['FCLNK_TGT_F'] = ['-o']
|
||||
|
||||
v['FCFLAGS_fcshlib'] = ['-fpic']
|
||||
v['LINKFLAGS_fcshlib'] = ['-shared']
|
||||
v['fcshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
v['fcstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
v['FCLIB_ST'] = '-l%s'
|
||||
v['FCLIBPATH_ST'] = '-L%s'
|
||||
v['FCSTLIB_ST'] = '-l%s'
|
||||
v['FCSTLIBPATH_ST'] = '-L%s'
|
||||
v['FCSTLIB_MARKER'] = '-Wl,-Bstatic'
|
||||
v['FCSHLIB_MARKER'] = '-Wl,-Bdynamic'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
|
||||
@conf
|
||||
def fc_add_flags(conf):
|
||||
"""
|
||||
FCFLAGS?
|
||||
"""
|
||||
conf.add_os_flags('FCFLAGS')
|
||||
conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
|
||||
|
||||
@conf
|
||||
def check_fortran(self, *k, **kw):
|
||||
"""See if the fortran compiler works by compiling a simple fortran program"""
|
||||
self.check_cc(
|
||||
fragment = FC_FRAGMENT,
|
||||
compile_filename = 'test.f',
|
||||
features = 'fc fcprogram',
|
||||
msg = 'Compiling a simple fortran app')
|
||||
|
||||
@conf
|
||||
def check_fc(self, *k, **kw):
|
||||
"""
|
||||
Same as :py:func:`waflib.Tools.c_config.check` but default to the *Fortran* programming language
|
||||
(Overriding the C defaults in :py:func:`waflib.Tools.c_config.validate_c` here)
|
||||
"""
|
||||
kw['compiler'] = 'fc'
|
||||
if not 'compile_mode' in kw:
|
||||
kw['compile_mode'] = 'fc'
|
||||
if not 'type' in kw:
|
||||
kw['type'] = 'fcprogram'
|
||||
if not 'compile_filename' in kw:
|
||||
kw['compile_filename'] = 'test.f90'
|
||||
if not 'code' in kw:
|
||||
kw['code'] = FC_FRAGMENT
|
||||
return self.check(*k, **kw)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# --- These are the default platform modifiers, refactored here for
|
||||
# convenience. gfortran and g95 have much overlap.
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
@conf
|
||||
def fortran_modifier_darwin(conf):
|
||||
"""
|
||||
Define fortran flags and extensions for the OSX systems
|
||||
"""
|
||||
v = conf.env
|
||||
v['FCFLAGS_fcshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_fcshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
|
||||
v['fcshlib_PATTERN'] = 'lib%s.dylib'
|
||||
v['FRAMEWORKPATH_ST'] = '-F%s'
|
||||
v['FRAMEWORK_ST'] = '-framework %s'
|
||||
|
||||
v['LINKFLAGS_fcstlib'] = []
|
||||
|
||||
v['FCSHLIB_MARKER'] = ''
|
||||
v['FCSTLIB_MARKER'] = ''
|
||||
v['SONAME_ST'] = ''
|
||||
|
||||
|
||||
@conf
|
||||
def fortran_modifier_win32(conf):
|
||||
"""Define fortran flags for the windows platforms"""
|
||||
v = conf.env
|
||||
v['fcprogram_PATTERN'] = v['fcprogram_test_PATTERN'] = '%s.exe'
|
||||
|
||||
v['fcshlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = 'lib%s.dll.a'
|
||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
|
||||
|
||||
v['FCFLAGS_fcshlib'] = []
|
||||
|
||||
v.append_value('FCFLAGS_fcshlib', ['-DDLL_EXPORT']) # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
|
||||
|
||||
# Auto-import is enabled by default even without this option,
|
||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
|
||||
# that the linker emits otherwise.
|
||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
|
||||
|
||||
@conf
|
||||
def fortran_modifier_cygwin(conf):
|
||||
"""Define fortran flags for use on cygwin"""
|
||||
fortran_modifier_win32(conf)
|
||||
v = conf.env
|
||||
v['fcshlib_PATTERN'] = 'cyg%s.dll'
|
||||
v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
|
||||
v['FCFLAGS_fcshlib'] = []
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
@conf
|
||||
def check_fortran_dummy_main(self, *k, **kw):
|
||||
"""
|
||||
Guess if a main function is needed by compiling a code snippet with
|
||||
the C compiler and link with the Fortran compiler
|
||||
|
||||
TODO: (DC)
|
||||
- handling dialects (F77, F90, etc... -> needs core support first)
|
||||
- fix dummy main check (AC_FC_DUMMY_MAIN vs AC_FC_MAIN)
|
||||
|
||||
TODO: what does the above mean? (ita)
|
||||
"""
|
||||
|
||||
if not self.env.CC:
|
||||
self.fatal('A c compiler is required for check_fortran_dummy_main')
|
||||
|
||||
lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
|
||||
lst.extend([m.lower() for m in lst])
|
||||
lst.append('')
|
||||
|
||||
self.start_msg('Detecting whether we need a dummy main')
|
||||
for main in lst:
|
||||
kw['fortran_main'] = main
|
||||
try:
|
||||
self.check_cc(
|
||||
fragment = 'int %s() { return 0; }\n' % (main or 'test'),
|
||||
features = 'c fcprogram',
|
||||
mandatory = True
|
||||
)
|
||||
if not main:
|
||||
self.env.FC_MAIN = -1
|
||||
self.end_msg('no')
|
||||
else:
|
||||
self.env.FC_MAIN = main
|
||||
self.end_msg('yes %s' % main)
|
||||
break
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
self.end_msg('not found')
|
||||
self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
GCC_DRIVER_LINE = re.compile('^Driving:')
|
||||
POSIX_STATIC_EXT = re.compile('\S+\.a')
|
||||
POSIX_LIB_FLAGS = re.compile('-l\S+')
|
||||
|
||||
@conf
|
||||
def is_link_verbose(self, txt):
|
||||
"""Return True if 'useful' link options can be found in txt"""
|
||||
assert isinstance(txt, str)
|
||||
for line in txt.splitlines():
|
||||
if not GCC_DRIVER_LINE.search(line):
|
||||
if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
|
||||
return True
|
||||
return False
|
||||
|
||||
@conf
|
||||
def check_fortran_verbose_flag(self, *k, **kw):
|
||||
"""
|
||||
Check what kind of verbose (-v) flag works, then set it to env.FC_VERBOSE_FLAG
|
||||
"""
|
||||
self.start_msg('fortran link verbose flag')
|
||||
for x in ['-v', '--verbose', '-verbose', '-V']:
|
||||
try:
|
||||
self.check_cc(
|
||||
features = 'fc fcprogram_test',
|
||||
fragment = FC_FRAGMENT2,
|
||||
compile_filename = 'test.f',
|
||||
linkflags = [x],
|
||||
mandatory=True
|
||||
)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
# output is on stderr or stdout (for xlf)
|
||||
if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
|
||||
self.end_msg(x)
|
||||
break
|
||||
else:
|
||||
self.end_msg('failure')
|
||||
self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
|
||||
|
||||
self.env.FC_VERBOSE_FLAG = x
|
||||
return x
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# linkflags which match those are ignored
|
||||
LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
|
||||
if os.name == 'nt':
|
||||
LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
|
||||
else:
|
||||
LINKFLAGS_IGNORED.append(r'-lgcc*')
|
||||
RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
|
||||
|
||||
def _match_ignore(line):
|
||||
"""Returns True if the line should be ignored (fortran test for verbosity)."""
|
||||
for i in RLINKFLAGS_IGNORED:
|
||||
if i.match(line):
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse_fortran_link(lines):
|
||||
"""Given the output of verbose link of Fortran compiler, this returns a
|
||||
list of flags necessary for linking using the standard linker."""
|
||||
# TODO: On windows ?
|
||||
final_flags = []
|
||||
for line in lines:
|
||||
if not GCC_DRIVER_LINE.match(line):
|
||||
_parse_flink_line(line, final_flags)
|
||||
return final_flags
|
||||
|
||||
SPACE_OPTS = re.compile('^-[LRuYz]$')
|
||||
NOSPACE_OPTS = re.compile('^-[RL]')
|
||||
|
||||
def _parse_flink_line(line, final_flags):
|
||||
"""private"""
|
||||
lexer = shlex.shlex(line, posix = True)
|
||||
lexer.whitespace_split = True
|
||||
|
||||
t = lexer.get_token()
|
||||
tmp_flags = []
|
||||
while t:
|
||||
def parse(token):
|
||||
# Here we go (convention for wildcard is shell, not regex !)
|
||||
# 1 TODO: we first get some root .a libraries
|
||||
# 2 TODO: take everything starting by -bI:*
|
||||
# 3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
|
||||
# -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
|
||||
# 4 take into account -lkernel32
|
||||
# 5 For options of the kind -[[LRuYz]], as they take one argument
|
||||
# after, the actual option is the next token
|
||||
# 6 For -YP,*: take and replace by -Larg where arg is the old
|
||||
# argument
|
||||
# 7 For -[lLR]*: take
|
||||
|
||||
# step 3
|
||||
if _match_ignore(token):
|
||||
pass
|
||||
# step 4
|
||||
elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
|
||||
tmp_flags.append(token)
|
||||
# step 5
|
||||
elif SPACE_OPTS.match(token):
|
||||
t = lexer.get_token()
|
||||
if t.startswith('P,'):
|
||||
t = t[2:]
|
||||
for opt in t.split(os.pathsep):
|
||||
tmp_flags.append('-L%s' % opt)
|
||||
# step 6
|
||||
elif NOSPACE_OPTS.match(token):
|
||||
tmp_flags.append(token)
|
||||
# step 7
|
||||
elif POSIX_LIB_FLAGS.match(token):
|
||||
tmp_flags.append(token)
|
||||
else:
|
||||
# ignore anything not explicitely taken into account
|
||||
pass
|
||||
|
||||
t = lexer.get_token()
|
||||
return t
|
||||
t = parse(t)
|
||||
|
||||
final_flags.extend(tmp_flags)
|
||||
return final_flags
|
||||
|
||||
@conf
|
||||
def check_fortran_clib(self, autoadd=True, *k, **kw):
|
||||
"""
|
||||
Obtain the flags for linking with the C library
|
||||
if this check works, add uselib='CLIB' to your task generators
|
||||
"""
|
||||
if not self.env.FC_VERBOSE_FLAG:
|
||||
self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
|
||||
|
||||
self.start_msg('Getting fortran runtime link flags')
|
||||
try:
|
||||
self.check_cc(
|
||||
fragment = FC_FRAGMENT2,
|
||||
compile_filename = 'test.f',
|
||||
features = 'fc fcprogram_test',
|
||||
linkflags = [self.env.FC_VERBOSE_FLAG]
|
||||
)
|
||||
except Exception:
|
||||
self.end_msg(False)
|
||||
if kw.get('mandatory', True):
|
||||
conf.fatal('Could not find the c library flags')
|
||||
else:
|
||||
out = self.test_bld.err
|
||||
flags = parse_fortran_link(out.splitlines())
|
||||
self.end_msg('ok (%s)' % ' '.join(flags))
|
||||
self.env.LINKFLAGS_CLIB = flags
|
||||
return flags
|
||||
return []
|
||||
|
||||
def getoutput(conf, cmd, stdin=False):
|
||||
"""
|
||||
TODO a bit redundant, can be removed anytime
|
||||
"""
|
||||
if stdin:
|
||||
stdin = Utils.subprocess.PIPE
|
||||
else:
|
||||
stdin = None
|
||||
env = conf.env.env or None
|
||||
try:
|
||||
p = Utils.subprocess.Popen(cmd, stdin=stdin, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env)
|
||||
if stdin:
|
||||
p.stdin.write('\n'.encode())
|
||||
out, err = p.communicate()
|
||||
except Exception:
|
||||
conf.fatal('could not determine the compiler version %r' % cmd)
|
||||
if not isinstance(out, str):
|
||||
out = out.decode(sys.stdout.encoding or 'iso8859-1')
|
||||
if not isinstance(err, str):
|
||||
err = err.decode(sys.stdout.encoding or 'iso8859-1')
|
||||
return (out, err)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
ROUTINES_CODE = """\
|
||||
subroutine foobar()
|
||||
return
|
||||
end
|
||||
subroutine foo_bar()
|
||||
return
|
||||
end
|
||||
"""
|
||||
|
||||
MAIN_CODE = """
|
||||
void %(dummy_func_nounder)s(void);
|
||||
void %(dummy_func_under)s(void);
|
||||
int %(main_func_name)s() {
|
||||
%(dummy_func_nounder)s();
|
||||
%(dummy_func_under)s();
|
||||
return 0;
|
||||
}
|
||||
"""
|
||||
|
||||
@feature('link_main_routines_func')
|
||||
@before_method('process_source')
|
||||
def link_main_routines_tg_method(self):
|
||||
"""
|
||||
The configuration test declares a unique task generator,
|
||||
so we create other task generators from there for fortran link tests
|
||||
"""
|
||||
def write_test_file(task):
|
||||
task.outputs[0].write(task.generator.code)
|
||||
bld = self.bld
|
||||
bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
|
||||
bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
|
||||
bld(features='fc fcstlib', source='test.f', target='test')
|
||||
bld(features='c fcprogram', source='main.c', target='app', use='test')
|
||||
|
||||
def mangling_schemes():
|
||||
"""
|
||||
Generate triplets for use with mangle_name
|
||||
(used in check_fortran_mangling)
|
||||
the order is tuned for gfortan
|
||||
"""
|
||||
for u in ['_', '']:
|
||||
for du in ['', '_']:
|
||||
for c in ["lower", "upper"]:
|
||||
yield (u, du, c)
|
||||
|
||||
def mangle_name(u, du, c, name):
|
||||
"""Mangle a name from a triplet (used in check_fortran_mangling)"""
|
||||
return getattr(name, c)() + u + (name.find('_') != -1 and du or '')
|
||||
|
||||
@conf
|
||||
def check_fortran_mangling(self, *k, **kw):
|
||||
"""
|
||||
Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found
|
||||
|
||||
This test will compile a fortran static library, then link a c app against it
|
||||
"""
|
||||
if not self.env.CC:
|
||||
self.fatal('A c compiler is required for link_main_routines')
|
||||
if not self.env.FC:
|
||||
self.fatal('A fortran compiler is required for link_main_routines')
|
||||
if not self.env.FC_MAIN:
|
||||
self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
|
||||
|
||||
self.start_msg('Getting fortran mangling scheme')
|
||||
for (u, du, c) in mangling_schemes():
|
||||
try:
|
||||
self.check_cc(
|
||||
compile_filename = [],
|
||||
features = 'link_main_routines_func',
|
||||
msg = 'nomsg',
|
||||
errmsg = 'nomsg',
|
||||
mandatory=True,
|
||||
dummy_func_nounder = mangle_name(u, du, c, "foobar"),
|
||||
dummy_func_under = mangle_name(u, du, c, "foo_bar"),
|
||||
main_func_name = self.env.FC_MAIN
|
||||
)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
|
||||
self.env.FORTRAN_MANGLING = (u, du, c)
|
||||
break
|
||||
else:
|
||||
self.end_msg(False)
|
||||
self.fatal('mangler not found')
|
||||
|
||||
return (u, du, c)
|
||||
|
||||
@feature('pyext')
|
||||
@before_method('propagate_uselib_vars', 'apply_link')
|
||||
def set_lib_pat(self):
|
||||
"""Set the fortran flags for linking with the python library"""
|
||||
self.env['fcshlib_PATTERN'] = self.env['pyext_PATTERN']
|
||||
|
||||
@conf
|
||||
def detect_openmp(self):
|
||||
for x in ['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
|
||||
try:
|
||||
self.check_fc(
|
||||
msg='Checking for OpenMP flag %s' % x,
|
||||
fragment='program main\n call omp_get_num_threads()\nend program main',
|
||||
fcflags=x,
|
||||
linkflags=x,
|
||||
uselib_store='OPENMP'
|
||||
)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self.fatal('Could not find OpenMP')
|
||||
|
121
third_party/waf/waf-light/waflib/Tools/fc_scan.py
vendored
Normal file
121
third_party/waf/waf-light/waflib/Tools/fc_scan.py
vendored
Normal file
|
@ -0,0 +1,121 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# DC 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
import re
|
||||
|
||||
from waflib import Utils, Task, TaskGen, Logs
|
||||
from waflib.TaskGen import feature, before_method, after_method, extension
|
||||
from waflib.Configure import conf
|
||||
|
||||
INC_REGEX = """(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
|
||||
USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
|
||||
MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
|
||||
|
||||
re_inc = re.compile(INC_REGEX, re.I)
|
||||
re_use = re.compile(USE_REGEX, re.I)
|
||||
re_mod = re.compile(MOD_REGEX, re.I)
|
||||
|
||||
class fortran_parser(object):
|
||||
"""
|
||||
This parser will return:
|
||||
|
||||
* the nodes corresponding to the module names that will be produced
|
||||
* the nodes corresponding to the include files used
|
||||
* the module names used by the fortran file
|
||||
"""
|
||||
|
||||
def __init__(self, incpaths):
|
||||
self.seen = []
|
||||
"""Files already parsed"""
|
||||
|
||||
self.nodes = []
|
||||
"""List of :py:class:`waflib.Node.Node` representing the dependencies to return"""
|
||||
|
||||
self.names = []
|
||||
"""List of module names to return"""
|
||||
|
||||
self.incpaths = incpaths
|
||||
"""List of :py:class:`waflib.Node.Node` representing the include paths"""
|
||||
|
||||
def find_deps(self, node):
|
||||
"""
|
||||
Parse a fortran file to read the dependencies used and provided
|
||||
|
||||
:param node: fortran file to read
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
:return: lists representing the includes, the modules used, and the modules created by a fortran file
|
||||
:rtype: tuple of list of strings
|
||||
"""
|
||||
txt = node.read()
|
||||
incs = []
|
||||
uses = []
|
||||
mods = []
|
||||
for line in txt.splitlines():
|
||||
# line by line regexp search? optimize?
|
||||
m = re_inc.search(line)
|
||||
if m:
|
||||
incs.append(m.group(1))
|
||||
m = re_use.search(line)
|
||||
if m:
|
||||
uses.append(m.group(1))
|
||||
m = re_mod.search(line)
|
||||
if m:
|
||||
mods.append(m.group(1))
|
||||
return (incs, uses, mods)
|
||||
|
||||
def start(self, node):
|
||||
"""
|
||||
Start the parsing. Use the stack self.waiting to hold the nodes to iterate on
|
||||
|
||||
:param node: fortran file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
self.waiting = [node]
|
||||
while self.waiting:
|
||||
nd = self.waiting.pop(0)
|
||||
self.iter(nd)
|
||||
|
||||
def iter(self, node):
|
||||
"""
|
||||
Process a single file in the search for dependencies, extract the files used
|
||||
the modules used, and the modules provided.
|
||||
"""
|
||||
path = node.abspath()
|
||||
incs, uses, mods = self.find_deps(node)
|
||||
for x in incs:
|
||||
if x in self.seen:
|
||||
continue
|
||||
self.seen.append(x)
|
||||
self.tryfind_header(x)
|
||||
|
||||
for x in uses:
|
||||
name = "USE@%s" % x
|
||||
if not name in self.names:
|
||||
self.names.append(name)
|
||||
|
||||
for x in mods:
|
||||
name = "MOD@%s" % x
|
||||
if not name in self.names:
|
||||
self.names.append(name)
|
||||
|
||||
def tryfind_header(self, filename):
|
||||
"""
|
||||
Try to find an include and add it the nodes to process
|
||||
|
||||
:param filename: file name
|
||||
:type filename: string
|
||||
"""
|
||||
found = None
|
||||
for n in self.incpaths:
|
||||
found = n.find_resource(filename)
|
||||
if found:
|
||||
self.nodes.append(found)
|
||||
self.waiting.append(found)
|
||||
break
|
||||
if not found:
|
||||
if not filename in self.names:
|
||||
self.names.append(filename)
|
||||
|
||||
|
52
third_party/waf/waf-light/waflib/Tools/flex.py
vendored
Normal file
52
third_party/waf/waf-light/waflib/Tools/flex.py
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# John O'Meara, 2006
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
The **flex** program is a code generator which creates C or C++ files.
|
||||
The generated files are compiled into object files.
|
||||
"""
|
||||
|
||||
import waflib.TaskGen, os, re
|
||||
|
||||
def decide_ext(self, node):
|
||||
if 'cxx' in self.features:
|
||||
return ['.lex.cc']
|
||||
return ['.lex.c']
|
||||
|
||||
def flexfun(tsk):
|
||||
env = tsk.env
|
||||
bld = tsk.generator.bld
|
||||
wd = bld.variant_dir
|
||||
def to_list(xx):
|
||||
if isinstance(xx, str): return [xx]
|
||||
return xx
|
||||
tsk.last_cmd = lst = []
|
||||
lst.extend(to_list(env['FLEX']))
|
||||
lst.extend(to_list(env['FLEXFLAGS']))
|
||||
inputs = [a.path_from(bld.bldnode) for a in tsk.inputs]
|
||||
if env.FLEX_MSYS:
|
||||
inputs = [x.replace(os.sep, '/') for x in inputs]
|
||||
lst.extend(inputs)
|
||||
lst = [x for x in lst if x]
|
||||
txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
|
||||
tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
|
||||
|
||||
waflib.TaskGen.declare_chain(
|
||||
name = 'flex',
|
||||
rule = flexfun, # issue #854
|
||||
ext_in = '.l',
|
||||
decider = decide_ext,
|
||||
)
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the *flex* program
|
||||
"""
|
||||
conf.find_program('flex', var='FLEX')
|
||||
conf.env.FLEXFLAGS = ['-t']
|
||||
|
||||
if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX):
|
||||
# this is the flex shipped with MSYS
|
||||
conf.env.FLEX_MSYS = True
|
67
third_party/waf/waf-light/waflib/Tools/g95.py
vendored
Normal file
67
third_party/waf/waf-light/waflib/Tools/g95.py
vendored
Normal file
|
@ -0,0 +1,67 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# KWS 2010
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
import re
|
||||
from waflib import Utils
|
||||
from waflib.Tools import fc, fc_config, fc_scan, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_g95(conf):
|
||||
fc = conf.find_program('g95', var='FC')
|
||||
fc = conf.cmd_to_list(fc)
|
||||
conf.get_g95_version(fc)
|
||||
conf.env.FC_NAME = 'G95'
|
||||
|
||||
@conf
|
||||
def g95_flags(conf):
|
||||
v = conf.env
|
||||
v['FCFLAGS_fcshlib'] = ['-fPIC']
|
||||
v['FORTRANMODFLAG'] = ['-fmod=', ''] # template for module path
|
||||
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not
|
||||
|
||||
@conf
|
||||
def g95_modifier_win32(conf):
|
||||
fc_config.fortran_modifier_win32(conf)
|
||||
|
||||
@conf
|
||||
def g95_modifier_cygwin(conf):
|
||||
fc_config.fortran_modifier_cygwin(conf)
|
||||
|
||||
@conf
|
||||
def g95_modifier_darwin(conf):
|
||||
fc_config.fortran_modifier_darwin(conf)
|
||||
|
||||
@conf
|
||||
def g95_modifier_platform(conf):
|
||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
|
||||
g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
|
||||
if g95_modifier_func:
|
||||
g95_modifier_func()
|
||||
|
||||
@conf
|
||||
def get_g95_version(conf, fc):
|
||||
"""get the compiler version"""
|
||||
|
||||
version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
|
||||
cmd = fc + ['--version']
|
||||
out, err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('cannot determine g95 version')
|
||||
k = match.groupdict()
|
||||
conf.env['FC_VERSION'] = (k['major'], k['minor'])
|
||||
|
||||
def configure(conf):
|
||||
conf.find_g95()
|
||||
conf.find_ar()
|
||||
conf.fc_flags()
|
||||
conf.fc_add_flags()
|
||||
conf.g95_flags()
|
||||
conf.g95_modifier_platform()
|
||||
|
18
third_party/waf/waf-light/waflib/Tools/gas.py
vendored
Normal file
18
third_party/waf/waf-light/waflib/Tools/gas.py
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"Detect as/gas/gcc for compiling assembly files"
|
||||
|
||||
import waflib.Tools.asm # - leave this
|
||||
from waflib.Tools import ar
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Find the programs gas/as/gcc and set the variable *AS*
|
||||
"""
|
||||
conf.find_program(['gas', 'gcc'], var='AS')
|
||||
conf.env.AS_TGT_F = ['-c', '-o']
|
||||
conf.env.ASLNK_TGT_F = ['-o']
|
||||
conf.find_ar()
|
||||
conf.load('asm')
|
152
third_party/waf/waf-light/waflib/Tools/gcc.py
vendored
Normal file
152
third_party/waf/waf-light/waflib/Tools/gcc.py
vendored
Normal file
|
@ -0,0 +1,152 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
|
||||
"""
|
||||
gcc/llvm detection.
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib import Configure, Options, Utils
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_gcc(conf):
|
||||
"""
|
||||
Find the program gcc, and if present, try to detect its version number
|
||||
"""
|
||||
cc = conf.find_program(['gcc', 'cc'], var='CC')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
conf.get_cc_version(cc, gcc=True)
|
||||
conf.env.CC_NAME = 'gcc'
|
||||
conf.env.CC = cc
|
||||
|
||||
@conf
|
||||
def gcc_common_flags(conf):
|
||||
"""
|
||||
Common flags for gcc on nearly all platforms
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CC_SRC_F'] = []
|
||||
v['CC_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = []
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
|
||||
v['STLIB_MARKER'] = '-Wl,-Bstatic'
|
||||
|
||||
# program
|
||||
v['cprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared librar
|
||||
v['CFLAGS_cshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cshlib'] = ['-shared']
|
||||
v['cshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
|
||||
v['cstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
# osx stuff
|
||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
|
||||
v['CFLAGS_MACBUNDLE'] = ['-fPIC']
|
||||
v['macbundle_PATTERN'] = '%s.bundle'
|
||||
|
||||
@conf
|
||||
def gcc_modifier_win32(conf):
|
||||
"""Configuration flags for executing gcc on Windows"""
|
||||
v = conf.env
|
||||
v['cprogram_PATTERN'] = '%s.exe'
|
||||
|
||||
v['cshlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = 'lib%s.dll.a'
|
||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
|
||||
|
||||
v['CFLAGS_cshlib'] = []
|
||||
|
||||
# Auto-import is enabled by default even without this option,
|
||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
|
||||
# that the linker emits otherwise.
|
||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
|
||||
|
||||
@conf
|
||||
def gcc_modifier_cygwin(conf):
|
||||
"""Configuration flags for executing gcc on Cygwin"""
|
||||
gcc_modifier_win32(conf)
|
||||
v = conf.env
|
||||
v['cshlib_PATTERN'] = 'cyg%s.dll'
|
||||
v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
|
||||
v['CFLAGS_cshlib'] = []
|
||||
|
||||
@conf
|
||||
def gcc_modifier_darwin(conf):
|
||||
"""Configuration flags for executing gcc on MacOS"""
|
||||
v = conf.env
|
||||
v['CFLAGS_cshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
|
||||
v['cshlib_PATTERN'] = 'lib%s.dylib'
|
||||
v['FRAMEWORKPATH_ST'] = '-F%s'
|
||||
v['FRAMEWORK_ST'] = ['-framework']
|
||||
v['ARCH_ST'] = ['-arch']
|
||||
|
||||
v['LINKFLAGS_cstlib'] = []
|
||||
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = []
|
||||
v['SONAME_ST'] = []
|
||||
|
||||
@conf
|
||||
def gcc_modifier_aix(conf):
|
||||
"""Configuration flags for executing gcc on AIX"""
|
||||
v = conf.env
|
||||
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
|
||||
v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull']
|
||||
v['SHLIB_MARKER'] = []
|
||||
|
||||
@conf
|
||||
def gcc_modifier_hpux(conf):
|
||||
v = conf.env
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = '-Bstatic'
|
||||
v['CFLAGS_cshlib'] = ['-fPIC','-DPIC']
|
||||
v['cshlib_PATTERN'] = 'lib%s.sl'
|
||||
|
||||
@conf
|
||||
def gcc_modifier_platform(conf):
|
||||
"""Execute platform-specific functions based on *gcc_modifier_+NAME*"""
|
||||
# * set configurations specific for a platform.
|
||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
|
||||
# and if it's not recognised, it fallbacks to sys.platform.
|
||||
gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None)
|
||||
if gcc_modifier_func:
|
||||
gcc_modifier_func()
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Configuration for gcc
|
||||
"""
|
||||
conf.find_gcc()
|
||||
conf.find_ar()
|
||||
conf.gcc_common_flags()
|
||||
conf.gcc_modifier_platform()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
||||
|
60
third_party/waf/waf-light/waflib/Tools/gdc.py
vendored
Normal file
60
third_party/waf/waf-light/waflib/Tools/gdc.py
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
|
||||
import sys
|
||||
from waflib.Tools import ar, d
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_gdc(conf):
|
||||
"""
|
||||
Find the program gdc and set the variable *D*
|
||||
"""
|
||||
conf.find_program('gdc', var='D')
|
||||
|
||||
out = conf.cmd_and_log([conf.env.D, '--version'])
|
||||
if out.find("gdc ") == -1:
|
||||
conf.fatal("detected compiler is not gdc")
|
||||
|
||||
@conf
|
||||
def common_flags_gdc(conf):
|
||||
"""
|
||||
Set the flags required by *gdc*
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
# _DFLAGS _DIMPORTFLAGS
|
||||
|
||||
# for mory info about the meaning of this dict see dmd.py
|
||||
v['DFLAGS'] = []
|
||||
|
||||
v['D_SRC_F'] = ['-c']
|
||||
v['D_TGT_F'] = '-o%s'
|
||||
|
||||
# linker
|
||||
v['D_LINKER'] = v['D']
|
||||
v['DLNK_SRC_F'] = ''
|
||||
v['DLNK_TGT_F'] = '-o%s'
|
||||
v['DINC_ST'] = '-I%s'
|
||||
|
||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
|
||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-l%s'
|
||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L%s'
|
||||
|
||||
v['LINKFLAGS_dshlib'] = ['-shared']
|
||||
|
||||
v['DHEADER_ext'] = '.di'
|
||||
v.DFLAGS_d_with_header = '-fintfc'
|
||||
v['D_HDR_F'] = '-fintfc-file=%s'
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Configuration for gdc
|
||||
"""
|
||||
conf.find_gdc()
|
||||
conf.load('ar')
|
||||
conf.load('d')
|
||||
conf.common_flags_gdc()
|
||||
conf.d_platform_flags()
|
||||
|
91
third_party/waf/waf-light/waflib/Tools/gfortran.py
vendored
Normal file
91
third_party/waf/waf-light/waflib/Tools/gfortran.py
vendored
Normal file
|
@ -0,0 +1,91 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# DC 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
import re
|
||||
from waflib import Utils
|
||||
from waflib.Tools import fc, fc_config, fc_scan, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_gfortran(conf):
|
||||
"""Find the gfortran program (will look in the environment variable 'FC')"""
|
||||
fc = conf.find_program(['gfortran','g77'], var='FC')
|
||||
# (fallback to g77 for systems, where no gfortran is available)
|
||||
fc = conf.cmd_to_list(fc)
|
||||
conf.get_gfortran_version(fc)
|
||||
conf.env.FC_NAME = 'GFORTRAN'
|
||||
|
||||
@conf
|
||||
def gfortran_flags(conf):
|
||||
v = conf.env
|
||||
v['FCFLAGS_fcshlib'] = ['-fPIC']
|
||||
v['FORTRANMODFLAG'] = ['-J', ''] # template for module path
|
||||
v['FCFLAGS_DEBUG'] = ['-Werror'] # why not
|
||||
|
||||
@conf
|
||||
def gfortran_modifier_win32(conf):
|
||||
fc_config.fortran_modifier_win32(conf)
|
||||
|
||||
@conf
|
||||
def gfortran_modifier_cygwin(conf):
|
||||
fc_config.fortran_modifier_cygwin(conf)
|
||||
|
||||
@conf
|
||||
def gfortran_modifier_darwin(conf):
|
||||
fc_config.fortran_modifier_darwin(conf)
|
||||
|
||||
@conf
|
||||
def gfortran_modifier_platform(conf):
|
||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
|
||||
gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
|
||||
if gfortran_modifier_func:
|
||||
gfortran_modifier_func()
|
||||
|
||||
@conf
|
||||
def get_gfortran_version(conf, fc):
|
||||
"""Get the compiler version"""
|
||||
|
||||
# ensure this is actually gfortran, not an imposter.
|
||||
version_re = re.compile(r"GNU\s*Fortran", re.I).search
|
||||
cmd = fc + ['--version']
|
||||
out, err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the compiler type')
|
||||
|
||||
# --- now get more detailed info -- see c_config.get_cc_version
|
||||
cmd = fc + ['-dM', '-E', '-']
|
||||
out, err = fc_config.getoutput(conf, cmd, stdin=True)
|
||||
|
||||
if out.find('__GNUC__') < 0:
|
||||
conf.fatal('Could not determine the compiler type')
|
||||
|
||||
k = {}
|
||||
out = out.split('\n')
|
||||
import shlex
|
||||
|
||||
for line in out:
|
||||
lst = shlex.split(line)
|
||||
if len(lst)>2:
|
||||
key = lst[1]
|
||||
val = lst[2]
|
||||
k[key] = val
|
||||
|
||||
def isD(var):
|
||||
return var in k
|
||||
|
||||
def isT(var):
|
||||
return var in k and k[var] != '0'
|
||||
|
||||
conf.env['FC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
|
||||
|
||||
def configure(conf):
|
||||
conf.find_gfortran()
|
||||
conf.find_ar()
|
||||
conf.fc_flags()
|
||||
conf.fc_add_flags()
|
||||
conf.gfortran_flags()
|
||||
conf.gfortran_modifier_platform()
|
378
third_party/waf/waf-light/waflib/Tools/glib2.py
vendored
Normal file
378
third_party/waf/waf-light/waflib/Tools/glib2.py
vendored
Normal file
|
@ -0,0 +1,378 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
Support for GLib2 tools:
|
||||
|
||||
* marshal
|
||||
* enums
|
||||
* gsettings
|
||||
"""
|
||||
|
||||
import os
|
||||
from waflib import Task, Utils, Options, Errors, Logs
|
||||
from waflib.TaskGen import taskgen_method, before_method, after_method, feature
|
||||
|
||||
################## marshal files
|
||||
|
||||
@taskgen_method
|
||||
def add_marshal_file(self, filename, prefix):
|
||||
"""
|
||||
Add a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
|
||||
|
||||
:param filename: xml file to compile
|
||||
:type filename: string
|
||||
:param prefix: marshal prefix (--prefix=prefix)
|
||||
:type prefix: string
|
||||
"""
|
||||
if not hasattr(self, 'marshal_list'):
|
||||
self.marshal_list = []
|
||||
self.meths.append('process_marshal')
|
||||
self.marshal_list.append((filename, prefix))
|
||||
|
||||
@before_method('process_source')
|
||||
def process_marshal(self):
|
||||
"""
|
||||
Process the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
|
||||
Add the c file created to the list of source to process.
|
||||
"""
|
||||
for f, prefix in getattr(self, 'marshal_list', []):
|
||||
node = self.path.find_resource(f)
|
||||
|
||||
if not node:
|
||||
raise Errors.WafError('file not found %r' % f)
|
||||
|
||||
h_node = node.change_ext('.h')
|
||||
c_node = node.change_ext('.c')
|
||||
|
||||
task = self.create_task('glib_genmarshal', node, [h_node, c_node])
|
||||
task.env.GLIB_GENMARSHAL_PREFIX = prefix
|
||||
self.source = self.to_nodes(getattr(self, 'source', []))
|
||||
self.source.append(c_node)
|
||||
|
||||
class glib_genmarshal(Task.Task):
|
||||
|
||||
def run(self):
|
||||
|
||||
bld = self.inputs[0].__class__.ctx
|
||||
|
||||
get = self.env.get_flat
|
||||
cmd1 = "%s %s --prefix=%s --header > %s" % (
|
||||
get('GLIB_GENMARSHAL'),
|
||||
self.inputs[0].srcpath(),
|
||||
get('GLIB_GENMARSHAL_PREFIX'),
|
||||
self.outputs[0].abspath()
|
||||
)
|
||||
|
||||
ret = bld.exec_command(cmd1)
|
||||
if ret: return ret
|
||||
|
||||
#print self.outputs[1].abspath()
|
||||
c = '''#include "%s"\n''' % self.outputs[0].name
|
||||
self.outputs[1].write(c)
|
||||
|
||||
cmd2 = "%s %s --prefix=%s --body >> %s" % (
|
||||
get('GLIB_GENMARSHAL'),
|
||||
self.inputs[0].srcpath(),
|
||||
get('GLIB_GENMARSHAL_PREFIX'),
|
||||
self.outputs[1].abspath()
|
||||
)
|
||||
return bld.exec_command(cmd2)
|
||||
|
||||
vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
|
||||
color = 'BLUE'
|
||||
ext_out = ['.h']
|
||||
|
||||
########################## glib-mkenums
|
||||
|
||||
@taskgen_method
|
||||
def add_enums_from_template(self, source='', target='', template='', comments=''):
|
||||
"""
|
||||
Add a file to the list of enum files to process. Store them in the attribute *enums_list*.
|
||||
|
||||
:param source: enum file to process
|
||||
:type source: string
|
||||
:param target: target file
|
||||
:type target: string
|
||||
:param template: template file
|
||||
:type template: string
|
||||
:param comments: comments
|
||||
:type comments: string
|
||||
"""
|
||||
if not hasattr(self, 'enums_list'):
|
||||
self.enums_list = []
|
||||
self.meths.append('process_enums')
|
||||
self.enums_list.append({'source': source,
|
||||
'target': target,
|
||||
'template': template,
|
||||
'file-head': '',
|
||||
'file-prod': '',
|
||||
'file-tail': '',
|
||||
'enum-prod': '',
|
||||
'value-head': '',
|
||||
'value-prod': '',
|
||||
'value-tail': '',
|
||||
'comments': comments})
|
||||
|
||||
@taskgen_method
|
||||
def add_enums(self, source='', target='',
|
||||
file_head='', file_prod='', file_tail='', enum_prod='',
|
||||
value_head='', value_prod='', value_tail='', comments=''):
|
||||
"""
|
||||
Add a file to the list of enum files to process. Store them in the attribute *enums_list*.
|
||||
|
||||
:param source: enum file to process
|
||||
:type source: string
|
||||
:param target: target file
|
||||
:type target: string
|
||||
:param file_head: unused
|
||||
:param file_prod: unused
|
||||
:param file_tail: unused
|
||||
:param enum_prod: unused
|
||||
:param value_head: unused
|
||||
:param value_prod: unused
|
||||
:param value_tail: unused
|
||||
:param comments: comments
|
||||
:type comments: string
|
||||
"""
|
||||
if not hasattr(self, 'enums_list'):
|
||||
self.enums_list = []
|
||||
self.meths.append('process_enums')
|
||||
self.enums_list.append({'source': source,
|
||||
'template': '',
|
||||
'target': target,
|
||||
'file-head': file_head,
|
||||
'file-prod': file_prod,
|
||||
'file-tail': file_tail,
|
||||
'enum-prod': enum_prod,
|
||||
'value-head': value_head,
|
||||
'value-prod': value_prod,
|
||||
'value-tail': value_tail,
|
||||
'comments': comments})
|
||||
|
||||
@before_method('process_source')
|
||||
def process_enums(self):
|
||||
"""
|
||||
Process the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
|
||||
"""
|
||||
for enum in getattr(self, 'enums_list', []):
|
||||
task = self.create_task('glib_mkenums')
|
||||
env = task.env
|
||||
|
||||
inputs = []
|
||||
|
||||
# process the source
|
||||
source_list = self.to_list(enum['source'])
|
||||
if not source_list:
|
||||
raise Errors.WafError('missing source ' + str(enum))
|
||||
source_list = [self.path.find_resource(k) for k in source_list]
|
||||
inputs += source_list
|
||||
env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]
|
||||
|
||||
# find the target
|
||||
if not enum['target']:
|
||||
raise Errors.WafError('missing target ' + str(enum))
|
||||
tgt_node = self.path.find_or_declare(enum['target'])
|
||||
if tgt_node.name.endswith('.c'):
|
||||
self.source.append(tgt_node)
|
||||
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()
|
||||
|
||||
|
||||
options = []
|
||||
|
||||
if enum['template']: # template, if provided
|
||||
template_node = self.path.find_resource(enum['template'])
|
||||
options.append('--template %s' % (template_node.abspath()))
|
||||
inputs.append(template_node)
|
||||
params = {'file-head' : '--fhead',
|
||||
'file-prod' : '--fprod',
|
||||
'file-tail' : '--ftail',
|
||||
'enum-prod' : '--eprod',
|
||||
'value-head' : '--vhead',
|
||||
'value-prod' : '--vprod',
|
||||
'value-tail' : '--vtail',
|
||||
'comments': '--comments'}
|
||||
for param, option in params.items():
|
||||
if enum[param]:
|
||||
options.append('%s %r' % (option, enum[param]))
|
||||
|
||||
env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
|
||||
|
||||
# update the task instance
|
||||
task.set_inputs(inputs)
|
||||
task.set_outputs(tgt_node)
|
||||
|
||||
class glib_mkenums(Task.Task):
|
||||
"""
|
||||
Process enum files
|
||||
"""
|
||||
run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
|
||||
color = 'PINK'
|
||||
ext_out = ['.h']
|
||||
|
||||
######################################### gsettings
|
||||
|
||||
@taskgen_method
|
||||
def add_settings_schemas(self, filename_list):
|
||||
"""
|
||||
Add settings files to process, add them to *settings_schema_files*
|
||||
|
||||
:param filename_list: files
|
||||
:type filename_list: list of string
|
||||
"""
|
||||
if not hasattr(self, 'settings_schema_files'):
|
||||
self.settings_schema_files = []
|
||||
|
||||
if not isinstance(filename_list, list):
|
||||
filename_list = [filename_list]
|
||||
|
||||
self.settings_schema_files.extend(filename_list)
|
||||
|
||||
@taskgen_method
|
||||
def add_settings_enums(self, namespace, filename_list):
|
||||
"""
|
||||
This function may be called only once by task generator to set the enums namespace.
|
||||
|
||||
:param namespace: namespace
|
||||
:type namespace: string
|
||||
:param filename_list: enum files to process
|
||||
:type filename_list: file list
|
||||
"""
|
||||
if hasattr(self, 'settings_enum_namespace'):
|
||||
raise Errors.WafError("Tried to add gsettings enums to '%s' more than once" % self.name)
|
||||
self.settings_enum_namespace = namespace
|
||||
|
||||
if type(filename_list) != 'list':
|
||||
filename_list = [filename_list]
|
||||
self.settings_enum_files = filename_list
|
||||
|
||||
|
||||
def r_change_ext(self, ext):
|
||||
"""
|
||||
Change the extension from the *last* dot in the filename. The gsettings schemas
|
||||
often have names of the form org.gsettings.test.gschema.xml
|
||||
"""
|
||||
name = self.name
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + ext
|
||||
else:
|
||||
name = name + ext
|
||||
return self.parent.find_or_declare([name])
|
||||
|
||||
@feature('glib2')
|
||||
def process_settings(self):
|
||||
"""
|
||||
Process the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
|
||||
same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
|
||||
|
||||
"""
|
||||
enums_tgt_node = []
|
||||
install_files = []
|
||||
|
||||
settings_schema_files = getattr(self, 'settings_schema_files', [])
|
||||
if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
|
||||
raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
|
||||
|
||||
# 1. process gsettings_enum_files (generate .enums.xml)
|
||||
#
|
||||
if hasattr(self, 'settings_enum_files'):
|
||||
enums_task = self.create_task('glib_mkenums')
|
||||
|
||||
source_list = self.settings_enum_files
|
||||
source_list = [self.path.find_resource(k) for k in source_list]
|
||||
enums_task.set_inputs(source_list)
|
||||
enums_task.env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]
|
||||
|
||||
target = self.settings_enum_namespace + '.enums.xml'
|
||||
tgt_node = self.path.find_or_declare(target)
|
||||
enums_task.set_outputs(tgt_node)
|
||||
enums_task.env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()
|
||||
enums_tgt_node = [tgt_node]
|
||||
|
||||
install_files.append (tgt_node)
|
||||
|
||||
options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
|
||||
enums_task.env['GLIB_MKENUMS_OPTIONS'] = options
|
||||
|
||||
# 2. process gsettings_schema_files (validate .gschema.xml files)
|
||||
#
|
||||
for schema in settings_schema_files:
|
||||
schema_task = self.create_task ('glib_validate_schema')
|
||||
|
||||
schema_node = self.path.find_resource(schema)
|
||||
if not schema_node:
|
||||
raise Errors.WafError("Cannot find the schema file '%s'" % schema)
|
||||
install_files.append(schema_node)
|
||||
source_list = enums_tgt_node + [schema_node]
|
||||
|
||||
schema_task.set_inputs (source_list)
|
||||
schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS'] = [("--schema-file=" + k.abspath()) for k in source_list]
|
||||
|
||||
target_node = r_change_ext (schema_node, '.xml.valid')
|
||||
schema_task.set_outputs (target_node)
|
||||
schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT'] = target_node.abspath()
|
||||
|
||||
# 3. schemas install task
|
||||
def compile_schemas_callback(bld):
|
||||
if not bld.is_install: return
|
||||
Logs.pprint ('YELLOW','Updating GSettings schema cache')
|
||||
command = Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}", bld.env)
|
||||
ret = self.bld.exec_command(command)
|
||||
|
||||
if self.bld.is_install:
|
||||
if not self.env['GSETTINGSSCHEMADIR']:
|
||||
raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
|
||||
|
||||
if install_files:
|
||||
self.bld.install_files (self.env['GSETTINGSSCHEMADIR'], install_files)
|
||||
|
||||
if not hasattr(self.bld, '_compile_schemas_registered'):
|
||||
self.bld.add_post_fun (compile_schemas_callback)
|
||||
self.bld._compile_schemas_registered = True
|
||||
|
||||
class glib_validate_schema(Task.Task):
|
||||
"""
|
||||
Validate schema files
|
||||
"""
|
||||
run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
|
||||
color = 'PINK'
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Find the following programs:
|
||||
|
||||
* *glib-genmarshal* and set *GLIB_GENMARSHAL*
|
||||
* *glib-mkenums* and set *GLIB_MKENUMS*
|
||||
* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
|
||||
|
||||
And set the variable *GSETTINGSSCHEMADIR*
|
||||
"""
|
||||
conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
|
||||
conf.find_perl_program('glib-mkenums', var='GLIB_MKENUMS')
|
||||
|
||||
# when cross-compiling, gsettings.m4 locates the program with the following:
|
||||
# pkg-config --variable glib_compile_schemas gio-2.0
|
||||
conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS', mandatory=False)
|
||||
|
||||
def getstr(varname):
|
||||
return getattr(Options.options, varname, getattr(conf.env,varname, ''))
|
||||
|
||||
# TODO make this dependent on the gnu_dirs tool?
|
||||
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
|
||||
if not gsettingsschemadir:
|
||||
datadir = getstr('DATADIR')
|
||||
if not datadir:
|
||||
prefix = conf.env['PREFIX']
|
||||
datadir = os.path.join(prefix, 'share')
|
||||
gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
|
||||
|
||||
conf.env['GSETTINGSSCHEMADIR'] = gsettingsschemadir
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add the ``--gsettingsschemadir`` command-line option
|
||||
"""
|
||||
opt.add_option('--gsettingsschemadir', help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
|
||||
|
129
third_party/waf/waf-light/waflib/Tools/gnu_dirs.py
vendored
Normal file
129
third_party/waf/waf-light/waflib/Tools/gnu_dirs.py
vendored
Normal file
|
@ -0,0 +1,129 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
"""
|
||||
Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
|
||||
|
||||
opt.load('gnu_dirs')
|
||||
|
||||
and::
|
||||
|
||||
conf.load('gnu_dirs')
|
||||
|
||||
Add options for the standard GNU directories, this tool will add the options
|
||||
found in autotools, and will update the environment with the following
|
||||
installation variables:
|
||||
|
||||
============== ========================================= =======================
|
||||
Variable Description Value
|
||||
============== ========================================= =======================
|
||||
PREFIX architecture-independent files /usr/local
|
||||
EXEC_PREFIX architecture-dependent files PREFIX
|
||||
BINDIR user executables EXEC_PREFIX/bin
|
||||
SBINDIR user executables EXEC_PREFIX/sbin
|
||||
LIBEXECDIR program executables EXEC_PREFIX/libexec
|
||||
SYSCONFDIR read-only single-machine data PREFIX/etc
|
||||
SHAREDSTATEDIR modifiable architecture-independent data PREFIX/com
|
||||
LOCALSTATEDIR modifiable single-machine data PREFIX/var
|
||||
LIBDIR object code libraries EXEC_PREFIX/lib
|
||||
INCLUDEDIR C header files PREFIX/include
|
||||
OLDINCLUDEDIR C header files for non-gcc /usr/include
|
||||
DATAROOTDIR read-only arch.-independent data root PREFIX/share
|
||||
DATADIR read-only architecture-independent data DATAROOTDIR
|
||||
INFODIR info documentation DATAROOTDIR/info
|
||||
LOCALEDIR locale-dependent data DATAROOTDIR/locale
|
||||
MANDIR man documentation DATAROOTDIR/man
|
||||
DOCDIR documentation root DATAROOTDIR/doc/APPNAME
|
||||
HTMLDIR html documentation DOCDIR
|
||||
DVIDIR dvi documentation DOCDIR
|
||||
PDFDIR pdf documentation DOCDIR
|
||||
PSDIR ps documentation DOCDIR
|
||||
============== ========================================= =======================
|
||||
"""
|
||||
|
||||
import os
|
||||
from waflib import Utils, Options, Context
|
||||
|
||||
_options = [x.split(', ') for x in '''
|
||||
bindir, user executables, ${EXEC_PREFIX}/bin
|
||||
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
|
||||
libexecdir, program executables, ${EXEC_PREFIX}/libexec
|
||||
sysconfdir, read-only single-machine data, ${PREFIX}/etc
|
||||
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
|
||||
localstatedir, modifiable single-machine data, ${PREFIX}/var
|
||||
libdir, object code libraries, ${EXEC_PREFIX}/lib
|
||||
includedir, C header files, ${PREFIX}/include
|
||||
oldincludedir, C header files for non-gcc, /usr/include
|
||||
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
|
||||
datadir, read-only architecture-independent data, ${DATAROOTDIR}
|
||||
infodir, info documentation, ${DATAROOTDIR}/info
|
||||
localedir, locale-dependent data, ${DATAROOTDIR}/locale
|
||||
mandir, man documentation, ${DATAROOTDIR}/man
|
||||
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
|
||||
htmldir, html documentation, ${DOCDIR}
|
||||
dvidir, dvi documentation, ${DOCDIR}
|
||||
pdfdir, pdf documentation, ${DOCDIR}
|
||||
psdir, ps documentation, ${DOCDIR}
|
||||
'''.split('\n') if x]
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Read the command-line options to set lots of variables in *conf.env*. The variables
|
||||
BINDIR and LIBDIR will be overwritten.
|
||||
"""
|
||||
def get_param(varname, default):
|
||||
return getattr(Options.options, varname, '') or default
|
||||
|
||||
env = conf.env
|
||||
env.LIBDIR = env.BINDIR = []
|
||||
env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
|
||||
env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE
|
||||
|
||||
complete = False
|
||||
iter = 0
|
||||
while not complete and iter < len(_options) + 1:
|
||||
iter += 1
|
||||
complete = True
|
||||
for name, help, default in _options:
|
||||
name = name.upper()
|
||||
if not env[name]:
|
||||
try:
|
||||
env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
|
||||
except TypeError:
|
||||
complete = False
|
||||
|
||||
if not complete:
|
||||
lst = [name for name, _, _ in _options if not env[name.upper()]]
|
||||
raise conf.errors.WafError('Variable substitution failure %r' % lst)
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add lots of command-line options, for example::
|
||||
|
||||
--exec-prefix: EXEC_PREFIX
|
||||
"""
|
||||
inst_dir = opt.add_option_group('Installation directories',
|
||||
'By default, "waf install" will put the files in\
|
||||
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
|
||||
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
|
||||
|
||||
for k in ('--prefix', '--destdir'):
|
||||
option = opt.parser.get_option(k)
|
||||
if option:
|
||||
opt.parser.remove_option(k)
|
||||
inst_dir.add_option(option)
|
||||
|
||||
inst_dir.add_option('--exec-prefix',
|
||||
help = 'installation prefix [Default: ${PREFIX}]',
|
||||
default = '',
|
||||
dest = 'EXEC_PREFIX')
|
||||
|
||||
dirs_options = opt.add_option_group('Pre-defined installation directories', '')
|
||||
|
||||
for name, help, default in _options:
|
||||
option_name = '--' + name
|
||||
str_default = default
|
||||
str_help = '%s [Default: %s]' % (help, str_default)
|
||||
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
|
||||
|
152
third_party/waf/waf-light/waflib/Tools/gxx.py
vendored
Normal file
152
third_party/waf/waf-light/waflib/Tools/gxx.py
vendored
Normal file
|
@ -0,0 +1,152 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
|
||||
"""
|
||||
g++/llvm detection.
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib import Configure, Options, Utils
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_gxx(conf):
|
||||
"""
|
||||
Find the program g++, and if present, try to detect its version number
|
||||
"""
|
||||
cxx = conf.find_program(['g++', 'c++'], var='CXX')
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
conf.get_cc_version(cxx, gcc=True)
|
||||
conf.env.CXX_NAME = 'gcc'
|
||||
conf.env.CXX = cxx
|
||||
|
||||
@conf
|
||||
def gxx_common_flags(conf):
|
||||
"""
|
||||
Common flags for g++ on nearly all platforms
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CXX_SRC_F'] = []
|
||||
v['CXX_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = []
|
||||
v['CXXLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
|
||||
v['STLIB_MARKER'] = '-Wl,-Bstatic'
|
||||
|
||||
# program
|
||||
v['cxxprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cxxshlib'] = ['-shared']
|
||||
v['cxxshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic']
|
||||
v['cxxstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
# osx stuff
|
||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
|
||||
v['CXXFLAGS_MACBUNDLE'] = ['-fPIC']
|
||||
v['macbundle_PATTERN'] = '%s.bundle'
|
||||
|
||||
@conf
|
||||
def gxx_modifier_win32(conf):
|
||||
"""Configuration flags for executing gcc on Windows"""
|
||||
v = conf.env
|
||||
v['cxxprogram_PATTERN'] = '%s.exe'
|
||||
|
||||
v['cxxshlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = 'lib%s.dll.a'
|
||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
|
||||
|
||||
v['CXXFLAGS_cxxshlib'] = []
|
||||
|
||||
# Auto-import is enabled by default even without this option,
|
||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
|
||||
# that the linker emits otherwise.
|
||||
v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
|
||||
|
||||
@conf
|
||||
def gxx_modifier_cygwin(conf):
|
||||
"""Configuration flags for executing g++ on Cygwin"""
|
||||
gxx_modifier_win32(conf)
|
||||
v = conf.env
|
||||
v['cxxshlib_PATTERN'] = 'cyg%s.dll'
|
||||
v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
|
||||
v['CXXFLAGS_cxxshlib'] = []
|
||||
|
||||
@conf
|
||||
def gxx_modifier_darwin(conf):
|
||||
"""Configuration flags for executing g++ on MacOS"""
|
||||
v = conf.env
|
||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cxxshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
|
||||
v['cxxshlib_PATTERN'] = 'lib%s.dylib'
|
||||
v['FRAMEWORKPATH_ST'] = '-F%s'
|
||||
v['FRAMEWORK_ST'] = ['-framework']
|
||||
v['ARCH_ST'] = ['-arch']
|
||||
|
||||
v['LINKFLAGS_cxxstlib'] = []
|
||||
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = []
|
||||
v['SONAME_ST'] = []
|
||||
|
||||
@conf
|
||||
def gxx_modifier_aix(conf):
|
||||
"""Configuration flags for executing g++ on AIX"""
|
||||
v = conf.env
|
||||
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
|
||||
|
||||
v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull']
|
||||
v['SHLIB_MARKER'] = []
|
||||
|
||||
@conf
|
||||
def gxx_modifier_hpux(conf):
|
||||
v = conf.env
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = '-Bstatic'
|
||||
v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC']
|
||||
v['cxxshlib_PATTERN'] = 'lib%s.sl'
|
||||
|
||||
@conf
|
||||
def gxx_modifier_platform(conf):
|
||||
"""Execute platform-specific functions based on *gxx_modifier_+NAME*"""
|
||||
# * set configurations specific for a platform.
|
||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
|
||||
# and if it's not recognised, it fallbacks to sys.platform.
|
||||
gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None)
|
||||
if gxx_modifier_func:
|
||||
gxx_modifier_func()
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Configuration for g++
|
||||
"""
|
||||
conf.find_gxx()
|
||||
conf.find_ar()
|
||||
conf.gxx_common_flags()
|
||||
conf.gxx_modifier_platform()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
42
third_party/waf/waf-light/waflib/Tools/icc.py
vendored
Normal file
42
third_party/waf/waf-light/waflib/Tools/icc.py
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Stian Selnes, 2008
|
||||
# Thomas Nagy 2009-2010 (ita)
|
||||
|
||||
"""
|
||||
Detect the Intel C compiler
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib.Tools import ccroot, ar, gcc
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_icc(conf):
|
||||
"""
|
||||
Find the program icc and execute it to ensure it really is icc
|
||||
"""
|
||||
if sys.platform == 'cygwin':
|
||||
conf.fatal('The Intel compiler does not work on Cygwin')
|
||||
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CC']: cc = v['CC']
|
||||
elif 'CC' in conf.environ: cc = conf.environ['CC']
|
||||
if not cc: cc = conf.find_program('icc', var='CC')
|
||||
if not cc: cc = conf.find_program('ICL', var='CC')
|
||||
if not cc: conf.fatal('Intel C Compiler (icc) was not found')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
conf.get_cc_version(cc, icc=True)
|
||||
v['CC'] = cc
|
||||
v['CC_NAME'] = 'icc'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_icc()
|
||||
conf.find_ar()
|
||||
conf.gcc_common_flags()
|
||||
conf.gcc_modifier_platform()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
41
third_party/waf/waf-light/waflib/Tools/icpc.py
vendored
Normal file
41
third_party/waf/waf-light/waflib/Tools/icpc.py
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2009-2010 (ita)
|
||||
|
||||
"""
|
||||
Detect the Intel C++ compiler
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib.Tools import ccroot, ar, gxx
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_icpc(conf):
|
||||
"""
|
||||
Find the program icpc, and execute it to ensure it really is icpc
|
||||
"""
|
||||
if sys.platform == 'cygwin':
|
||||
conf.fatal('The Intel compiler does not work on Cygwin')
|
||||
|
||||
v = conf.env
|
||||
cxx = None
|
||||
if v['CXX']: cxx = v['CXX']
|
||||
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
|
||||
if not cxx: cxx = conf.find_program('icpc', var='CXX')
|
||||
if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
|
||||
conf.get_cc_version(cxx, icc=True)
|
||||
v['CXX'] = cxx
|
||||
v['CXX_NAME'] = 'icc'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_icpc()
|
||||
conf.find_ar()
|
||||
conf.gxx_common_flags()
|
||||
conf.gxx_modifier_platform()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
60
third_party/waf/waf-light/waflib/Tools/ifort.py
vendored
Normal file
60
third_party/waf/waf-light/waflib/Tools/ifort.py
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# DC 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
import re
|
||||
from waflib import Utils
|
||||
from waflib.Tools import fc, fc_config, fc_scan, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_ifort(conf):
|
||||
fc = conf.find_program('ifort', var='FC')
|
||||
fc = conf.cmd_to_list(fc)
|
||||
conf.get_ifort_version(fc)
|
||||
conf.env.FC_NAME = 'IFORT'
|
||||
|
||||
@conf
|
||||
def ifort_modifier_cygwin(conf):
|
||||
raise NotImplementedError("Ifort on cygwin not yet implemented")
|
||||
|
||||
@conf
|
||||
def ifort_modifier_win32(conf):
|
||||
fc_config.fortran_modifier_win32(conf)
|
||||
|
||||
@conf
|
||||
def ifort_modifier_darwin(conf):
|
||||
fc_config.fortran_modifier_darwin(conf)
|
||||
|
||||
@conf
|
||||
def ifort_modifier_platform(conf):
|
||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
|
||||
ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
|
||||
if ifort_modifier_func:
|
||||
ifort_modifier_func()
|
||||
|
||||
@conf
|
||||
def get_ifort_version(conf, fc):
|
||||
"""get the compiler version"""
|
||||
|
||||
version_re = re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
|
||||
cmd = fc + ['--version']
|
||||
out, err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('cannot determine ifort version.')
|
||||
k = match.groupdict()
|
||||
conf.env['FC_VERSION'] = (k['major'], k['minor'])
|
||||
|
||||
def configure(conf):
|
||||
conf.find_ifort()
|
||||
conf.find_program('xiar', var='AR')
|
||||
conf.env.ARFLAGS = 'rcs'
|
||||
conf.fc_flags()
|
||||
conf.fc_add_flags()
|
||||
conf.ifort_modifier_platform()
|
||||
|
176
third_party/waf/waf-light/waflib/Tools/intltool.py
vendored
Normal file
176
third_party/waf/waf-light/waflib/Tools/intltool.py
vendored
Normal file
|
@ -0,0 +1,176 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
Support for translation tools such as msgfmt and intltool
|
||||
|
||||
Usage::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('gnu_dirs intltool')
|
||||
|
||||
def build(bld):
|
||||
# process the .po files into .gmo files, and install them in LOCALEDIR
|
||||
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
|
||||
|
||||
# process an input file, substituting the translations from the po dir
|
||||
bld(
|
||||
features = "intltool_in",
|
||||
podir = "../po",
|
||||
flags = ["-d", "-q", "-u", "-c"],
|
||||
source = 'kupfer.desktop.in',
|
||||
install_path = "${DATADIR}/applications",
|
||||
)
|
||||
|
||||
Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
|
||||
"""
|
||||
|
||||
import os, re
|
||||
from waflib import Configure, TaskGen, Task, Utils, Runner, Options, Build, Logs
|
||||
import waflib.Tools.ccroot
|
||||
from waflib.TaskGen import feature, before_method
|
||||
from waflib.Logs import error
|
||||
|
||||
@before_method('process_source')
|
||||
@feature('intltool_in')
|
||||
def apply_intltool_in_f(self):
|
||||
"""
|
||||
Create tasks to translate files by intltool-merge::
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features = "intltool_in",
|
||||
podir = "../po",
|
||||
flags = ["-d", "-q", "-u", "-c"],
|
||||
source = 'kupfer.desktop.in',
|
||||
install_path = "${DATADIR}/applications",
|
||||
)
|
||||
|
||||
:param podir: location of the .po files
|
||||
:type podir: string
|
||||
:param source: source files to process
|
||||
:type source: list of string
|
||||
:param flags: compilation flags ("-quc" by default)
|
||||
:type flags: list of string
|
||||
:param install_path: installation path
|
||||
:type install_path: string
|
||||
"""
|
||||
try: self.meths.remove('process_source')
|
||||
except ValueError: pass
|
||||
|
||||
if not self.env.LOCALEDIR:
|
||||
self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
|
||||
|
||||
for i in self.to_list(self.source):
|
||||
node = self.path.find_resource(i)
|
||||
|
||||
podir = getattr(self, 'podir', 'po')
|
||||
podirnode = self.path.find_dir(podir)
|
||||
if not podirnode:
|
||||
error("could not find the podir %r" % podir)
|
||||
continue
|
||||
|
||||
cache = getattr(self, 'intlcache', '.intlcache')
|
||||
self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache)
|
||||
self.env['INTLPODIR'] = podirnode.bldpath()
|
||||
self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
|
||||
|
||||
task = self.create_task('intltool', node, node.change_ext(''))
|
||||
inst = getattr(self, 'install_path', '${LOCALEDIR}')
|
||||
if inst:
|
||||
self.bld.install_files(inst, task.outputs)
|
||||
|
||||
@feature('intltool_po')
|
||||
def apply_intltool_po(self):
|
||||
"""
|
||||
Create tasks to process po files::
|
||||
|
||||
def build(bld):
|
||||
bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
|
||||
|
||||
The relevant task generator arguments are:
|
||||
|
||||
:param podir: directory of the .po files
|
||||
:type podir: string
|
||||
:param appname: name of the application
|
||||
:type appname: string
|
||||
:param install_path: installation directory
|
||||
:type install_path: string
|
||||
|
||||
The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
|
||||
"""
|
||||
try: self.meths.remove('process_source')
|
||||
except ValueError: pass
|
||||
|
||||
if not self.env.LOCALEDIR:
|
||||
self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
|
||||
|
||||
appname = getattr(self, 'appname', 'set_your_app_name')
|
||||
podir = getattr(self, 'podir', '')
|
||||
inst = getattr(self, 'install_path', '${LOCALEDIR}')
|
||||
|
||||
linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
|
||||
if linguas:
|
||||
# scan LINGUAS file for locales to process
|
||||
file = open(linguas.abspath())
|
||||
langs = []
|
||||
for line in file.readlines():
|
||||
# ignore lines containing comments
|
||||
if not line.startswith('#'):
|
||||
langs += line.split()
|
||||
file.close()
|
||||
re_linguas = re.compile('[-a-zA-Z_@.]+')
|
||||
for lang in langs:
|
||||
# Make sure that we only process lines which contain locales
|
||||
if re_linguas.match(lang):
|
||||
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
|
||||
task = self.create_task('po', node, node.change_ext('.mo'))
|
||||
|
||||
if inst:
|
||||
filename = task.outputs[0].name
|
||||
(langname, ext) = os.path.splitext(filename)
|
||||
inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
|
||||
self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env)
|
||||
|
||||
else:
|
||||
Logs.pprint('RED', "Error no LINGUAS file found in po directory")
|
||||
|
||||
class po(Task.Task):
|
||||
"""
|
||||
Compile .po files into .gmo files
|
||||
"""
|
||||
run_str = '${MSGFMT} -o ${TGT} ${SRC}'
|
||||
color = 'BLUE'
|
||||
|
||||
class intltool(Task.Task):
|
||||
"""
|
||||
Let intltool-merge translate an input file
|
||||
"""
|
||||
run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
|
||||
color = 'BLUE'
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the program *msgfmt* and set *conf.env.MSGFMT*.
|
||||
Detect the program *intltool-merge* and set *conf.env.INTLTOOL*.
|
||||
It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
|
||||
|
||||
$ INTLTOOL="/path/to/the program/intltool" waf configure
|
||||
|
||||
If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
|
||||
"""
|
||||
conf.find_program('msgfmt', var='MSGFMT')
|
||||
conf.find_perl_program('intltool-merge', var='INTLTOOL')
|
||||
|
||||
prefix = conf.env.PREFIX
|
||||
datadir = conf.env.DATADIR
|
||||
if not datadir:
|
||||
datadir = os.path.join(prefix,'share')
|
||||
|
||||
conf.define('LOCALEDIR', os.path.join(datadir, 'locale').replace('\\', '\\\\'))
|
||||
conf.define('DATADIR', datadir.replace('\\', '\\\\'))
|
||||
|
||||
if conf.env.CC or conf.env.CXX:
|
||||
conf.check(header_name='locale.h')
|
||||
|
63
third_party/waf/waf-light/waflib/Tools/irixcc.py
vendored
Normal file
63
third_party/waf/waf-light/waflib/Tools/irixcc.py
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
#! /usr/bin/env python
|
||||
# imported from samba
|
||||
|
||||
"""
|
||||
compiler definition for irix/MIPSpro cc compiler
|
||||
based on suncc.py from waf
|
||||
"""
|
||||
|
||||
import os
|
||||
from waflib import Utils
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_irixcc(conf):
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CC']: cc = v['CC']
|
||||
elif 'CC' in conf.environ: cc = conf.environ['CC']
|
||||
if not cc: cc = conf.find_program('cc', var='CC')
|
||||
if not cc: conf.fatal('irixcc was not found')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
try:
|
||||
conf.cmd_and_log(cc + ['-version'])
|
||||
except Exception:
|
||||
conf.fatal('%r -version could not be executed' % cc)
|
||||
|
||||
v['CC'] = cc
|
||||
v['CC_NAME'] = 'irix'
|
||||
|
||||
@conf
|
||||
def irixcc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
v['CC_SRC_F'] = ''
|
||||
v['CC_TGT_F'] = ['-c', '-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = ''
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
|
||||
v['cprogram_PATTERN'] = '%s'
|
||||
v['cshlib_PATTERN'] = 'lib%s.so'
|
||||
v['cstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_irixcc()
|
||||
conf.find_cpp()
|
||||
conf.find_ar()
|
||||
conf.irixcc_common_flags()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
480
third_party/waf/waf-light/waflib/Tools/javaw.py
vendored
Normal file
480
third_party/waf/waf-light/waflib/Tools/javaw.py
vendored
Normal file
|
@ -0,0 +1,480 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
Java support
|
||||
|
||||
Javac is one of the few compilers that behaves very badly:
|
||||
|
||||
#. it outputs files where it wants to (-d is only for the package root)
|
||||
|
||||
#. it recompiles files silently behind your back
|
||||
|
||||
#. it outputs an undefined amount of files (inner classes)
|
||||
|
||||
Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
|
||||
running one of the following commands::
|
||||
|
||||
./waf configure
|
||||
python waf configure
|
||||
|
||||
You would have to run::
|
||||
|
||||
java -jar /path/to/jython.jar waf configure
|
||||
|
||||
[1] http://www.jython.org/
|
||||
"""
|
||||
|
||||
import os, re, tempfile, shutil
|
||||
from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node, Logs
|
||||
from waflib.Configure import conf
|
||||
from waflib.TaskGen import feature, before_method, after_method
|
||||
|
||||
from waflib.Tools import ccroot
|
||||
ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
|
||||
|
||||
|
||||
SOURCE_RE = '**/*.java'
|
||||
JAR_RE = '**/*'
|
||||
|
||||
class_check_source = '''
|
||||
public class Test {
|
||||
public static void main(String[] argv) {
|
||||
Class lib;
|
||||
if (argv.length < 1) {
|
||||
System.err.println("Missing argument");
|
||||
System.exit(77);
|
||||
}
|
||||
try {
|
||||
lib = Class.forName(argv[0]);
|
||||
} catch (ClassNotFoundException e) {
|
||||
System.err.println("ClassNotFoundException");
|
||||
System.exit(1);
|
||||
}
|
||||
lib = null;
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
@feature('javac')
|
||||
@before_method('process_source')
|
||||
def apply_java(self):
|
||||
"""
|
||||
Create a javac task for compiling *.java files*. There can be
|
||||
only one javac task by task generator.
|
||||
"""
|
||||
Utils.def_attrs(self, jarname='', classpath='',
|
||||
sourcepath='.', srcdir='.',
|
||||
jar_mf_attributes={}, jar_mf_classpath=[])
|
||||
|
||||
nodes_lst = []
|
||||
|
||||
outdir = getattr(self, 'outdir', None)
|
||||
if outdir:
|
||||
if not isinstance(outdir, Node.Node):
|
||||
outdir = self.path.get_bld().make_node(self.outdir)
|
||||
else:
|
||||
outdir = self.path.get_bld()
|
||||
outdir.mkdir()
|
||||
self.outdir = outdir
|
||||
self.env['OUTDIR'] = outdir.abspath()
|
||||
|
||||
self.javac_task = tsk = self.create_task('javac')
|
||||
tmp = []
|
||||
|
||||
srcdir = getattr(self, 'srcdir', '')
|
||||
if isinstance(srcdir, Node.Node):
|
||||
srcdir = [srcdir]
|
||||
for x in Utils.to_list(srcdir):
|
||||
if isinstance(x, Node.Node):
|
||||
y = x
|
||||
else:
|
||||
y = self.path.find_dir(x)
|
||||
if not y:
|
||||
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
|
||||
tmp.append(y)
|
||||
tsk.srcdir = tmp
|
||||
|
||||
if getattr(self, 'compat', None):
|
||||
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
|
||||
|
||||
if hasattr(self, 'sourcepath'):
|
||||
fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
|
||||
names = os.pathsep.join([x.srcpath() for x in fold])
|
||||
else:
|
||||
names = [x.srcpath() for x in tsk.srcdir]
|
||||
|
||||
if names:
|
||||
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
|
||||
|
||||
@feature('javac')
|
||||
@after_method('apply_java')
|
||||
def use_javac_files(self):
|
||||
"""
|
||||
Process the *use* attribute referring to other java compilations
|
||||
"""
|
||||
lst = []
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
names = self.to_list(getattr(self, 'use', []))
|
||||
get = self.bld.get_tgen_by_name
|
||||
for x in names:
|
||||
try:
|
||||
y = get(x)
|
||||
except Exception:
|
||||
self.uselib.append(x)
|
||||
else:
|
||||
y.post()
|
||||
lst.append(y.jar_task.outputs[0].abspath())
|
||||
self.javac_task.set_run_after(y.jar_task)
|
||||
|
||||
if lst:
|
||||
self.env.append_value('CLASSPATH', lst)
|
||||
|
||||
@feature('javac')
|
||||
@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
|
||||
def set_classpath(self):
|
||||
"""
|
||||
Set the CLASSPATH value on the *javac* task previously created.
|
||||
"""
|
||||
self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
|
||||
for x in self.tasks:
|
||||
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
|
||||
|
||||
@feature('jar')
|
||||
@after_method('apply_java', 'use_javac_files')
|
||||
@before_method('process_source')
|
||||
def jar_files(self):
|
||||
"""
|
||||
Create a jar task. There can be only one jar task by task generator.
|
||||
"""
|
||||
destfile = getattr(self, 'destfile', 'test.jar')
|
||||
jaropts = getattr(self, 'jaropts', [])
|
||||
manifest = getattr(self, 'manifest', None)
|
||||
|
||||
basedir = getattr(self, 'basedir', None)
|
||||
if basedir:
|
||||
if not isinstance(self.basedir, Node.Node):
|
||||
basedir = self.path.get_bld().make_node(basedir)
|
||||
else:
|
||||
basedir = self.path.get_bld()
|
||||
if not basedir:
|
||||
self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))
|
||||
|
||||
self.jar_task = tsk = self.create_task('jar_create')
|
||||
if manifest:
|
||||
jarcreate = getattr(self, 'jarcreate', 'cfm')
|
||||
node = self.path.find_node(manifest)
|
||||
tsk.dep_nodes.append(node)
|
||||
jaropts.insert(0, node.abspath())
|
||||
else:
|
||||
jarcreate = getattr(self, 'jarcreate', 'cf')
|
||||
if not isinstance(destfile, Node.Node):
|
||||
destfile = self.path.find_or_declare(destfile)
|
||||
if not destfile:
|
||||
self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
|
||||
tsk.set_outputs(destfile)
|
||||
tsk.basedir = basedir
|
||||
|
||||
jaropts.append('-C')
|
||||
jaropts.append(basedir.bldpath())
|
||||
jaropts.append('.')
|
||||
|
||||
tsk.env['JAROPTS'] = jaropts
|
||||
tsk.env['JARCREATE'] = jarcreate
|
||||
|
||||
if getattr(self, 'javac_task', None):
|
||||
tsk.set_run_after(self.javac_task)
|
||||
|
||||
@feature('jar')
|
||||
@after_method('jar_files')
|
||||
def use_jar_files(self):
|
||||
"""
|
||||
Process the *use* attribute to set the build order on the
|
||||
tasks created by another task generator.
|
||||
"""
|
||||
lst = []
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
names = self.to_list(getattr(self, 'use', []))
|
||||
get = self.bld.get_tgen_by_name
|
||||
for x in names:
|
||||
try:
|
||||
y = get(x)
|
||||
except Exception:
|
||||
self.uselib.append(x)
|
||||
else:
|
||||
y.post()
|
||||
self.jar_task.run_after.update(y.tasks)
|
||||
|
||||
class jar_create(Task.Task):
|
||||
"""
|
||||
Create a jar file
|
||||
"""
|
||||
color = 'GREEN'
|
||||
run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
|
||||
|
||||
def runnable_status(self):
|
||||
"""
|
||||
Wait for dependent tasks to be executed, then read the
|
||||
files to update the list of inputs.
|
||||
"""
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
if not self.inputs:
|
||||
global JAR_RE
|
||||
try:
|
||||
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
|
||||
except Exception:
|
||||
raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
|
||||
return super(jar_create, self).runnable_status()
|
||||
|
||||
class javac(Task.Task):
|
||||
"""
|
||||
Compile java files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
|
||||
nocache = True
|
||||
"""
|
||||
The .class files cannot be put into a cache at the moment
|
||||
"""
|
||||
|
||||
vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
|
||||
"""
|
||||
The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
|
||||
"""
|
||||
|
||||
def runnable_status(self):
|
||||
"""
|
||||
Wait for dependent tasks to be complete, then read the file system to find the input nodes.
|
||||
"""
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
|
||||
if not self.inputs:
|
||||
global SOURCE_RE
|
||||
self.inputs = []
|
||||
for x in self.srcdir:
|
||||
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
|
||||
return super(javac, self).runnable_status()
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Execute the javac compiler
|
||||
"""
|
||||
env = self.env
|
||||
gen = self.generator
|
||||
bld = gen.bld
|
||||
wd = bld.bldnode.abspath()
|
||||
def to_list(xx):
|
||||
if isinstance(xx, str): return [xx]
|
||||
return xx
|
||||
cmd = []
|
||||
cmd.extend(to_list(env['JAVAC']))
|
||||
cmd.extend(['-classpath'])
|
||||
cmd.extend(to_list(env['CLASSPATH']))
|
||||
cmd.extend(['-d'])
|
||||
cmd.extend(to_list(env['OUTDIR']))
|
||||
cmd.extend(to_list(env['JAVACFLAGS']))
|
||||
|
||||
files = [a.path_from(bld.bldnode) for a in self.inputs]
|
||||
|
||||
# workaround for command line length limit:
|
||||
# http://support.microsoft.com/kb/830473
|
||||
tmp = None
|
||||
try:
|
||||
if len(str(files)) + len(str(cmd)) > 8192:
|
||||
(fd, tmp) = tempfile.mkstemp(dir=bld.bldnode.abspath())
|
||||
try:
|
||||
os.write(fd, '\n'.join(files).encode())
|
||||
finally:
|
||||
if tmp:
|
||||
os.close(fd)
|
||||
if Logs.verbose:
|
||||
Logs.debug('runner: %r' % (cmd + files))
|
||||
cmd.append('@' + tmp)
|
||||
else:
|
||||
cmd += files
|
||||
|
||||
ret = self.exec_command(cmd, cwd=wd, env=env.env or None)
|
||||
finally:
|
||||
if tmp:
|
||||
os.remove(tmp)
|
||||
return ret
|
||||
|
||||
def post_run(self):
|
||||
"""
|
||||
"""
|
||||
for n in self.generator.outdir.ant_glob('**/*.class'):
|
||||
n.sig = Utils.h_file(n.abspath()) # careful with this
|
||||
self.generator.bld.task_sigs[self.uid()] = self.cache_sig
|
||||
|
||||
@feature('javadoc')
|
||||
@after_method('process_rule')
|
||||
def create_javadoc(self):
|
||||
tsk = self.create_task('javadoc')
|
||||
tsk.classpath = getattr(self, 'classpath', [])
|
||||
self.javadoc_package = Utils.to_list(self.javadoc_package)
|
||||
if not isinstance(self.javadoc_output, Node.Node):
|
||||
self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
|
||||
|
||||
class javadoc(Task.Task):
|
||||
color = 'BLUE'
|
||||
|
||||
def __str__(self):
|
||||
return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)
|
||||
|
||||
def run(self):
|
||||
env = self.env
|
||||
bld = self.generator.bld
|
||||
wd = bld.bldnode.abspath()
|
||||
|
||||
#add src node + bld node (for generated java code)
|
||||
srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
|
||||
srcpath += os.pathsep
|
||||
srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir
|
||||
|
||||
classpath = env.CLASSPATH
|
||||
classpath += os.pathsep
|
||||
classpath += os.pathsep.join(self.classpath)
|
||||
classpath = "".join(classpath)
|
||||
|
||||
self.last_cmd = lst = []
|
||||
lst.extend(Utils.to_list(env['JAVADOC']))
|
||||
lst.extend(['-d', self.generator.javadoc_output.abspath()])
|
||||
lst.extend(['-sourcepath', srcpath])
|
||||
lst.extend(['-classpath', classpath])
|
||||
lst.extend(['-subpackages'])
|
||||
lst.extend(self.generator.javadoc_package)
|
||||
lst = [x for x in lst if x]
|
||||
|
||||
self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
|
||||
|
||||
def post_run(self):
|
||||
nodes = self.generator.javadoc_output.ant_glob('**')
|
||||
for x in nodes:
|
||||
x.sig = Utils.h_file(x.abspath())
|
||||
self.generator.bld.task_sigs[self.uid()] = self.cache_sig
|
||||
|
||||
def configure(self):
|
||||
"""
|
||||
Detect the javac, java and jar programs
|
||||
"""
|
||||
# If JAVA_PATH is set, we prepend it to the path list
|
||||
java_path = self.environ['PATH'].split(os.pathsep)
|
||||
v = self.env
|
||||
|
||||
if 'JAVA_HOME' in self.environ:
|
||||
java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
|
||||
self.env['JAVA_HOME'] = [self.environ['JAVA_HOME']]
|
||||
|
||||
for x in 'javac java jar javadoc'.split():
|
||||
self.find_program(x, var=x.upper(), path_list=java_path)
|
||||
self.env[x.upper()] = self.cmd_to_list(self.env[x.upper()])
|
||||
|
||||
if 'CLASSPATH' in self.environ:
|
||||
v['CLASSPATH'] = self.environ['CLASSPATH']
|
||||
|
||||
if not v['JAR']: self.fatal('jar is required for making java packages')
|
||||
if not v['JAVAC']: self.fatal('javac is required for compiling java classes')
|
||||
|
||||
v['JARCREATE'] = 'cf' # can use cvf
|
||||
v['JAVACFLAGS'] = []
|
||||
|
||||
@conf
|
||||
def check_java_class(self, classname, with_classpath=None):
|
||||
"""
|
||||
Check if the specified java class exists
|
||||
|
||||
:param classname: class to check, like java.util.HashMap
|
||||
:type classname: string
|
||||
:param with_classpath: additional classpath to give
|
||||
:type with_classpath: string
|
||||
"""
|
||||
|
||||
javatestdir = '.waf-javatest'
|
||||
|
||||
classpath = javatestdir
|
||||
if self.env['CLASSPATH']:
|
||||
classpath += os.pathsep + self.env['CLASSPATH']
|
||||
if isinstance(with_classpath, str):
|
||||
classpath += os.pathsep + with_classpath
|
||||
|
||||
shutil.rmtree(javatestdir, True)
|
||||
os.mkdir(javatestdir)
|
||||
|
||||
Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
|
||||
|
||||
# Compile the source
|
||||
self.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
|
||||
|
||||
# Try to run the app
|
||||
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
|
||||
self.to_log("%s\n" % str(cmd))
|
||||
found = self.exec_command(cmd, shell=False)
|
||||
|
||||
self.msg('Checking for java class %s' % classname, not found)
|
||||
|
||||
shutil.rmtree(javatestdir, True)
|
||||
|
||||
return found
|
||||
|
||||
@conf
|
||||
def check_jni_headers(conf):
|
||||
"""
|
||||
Check for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c')
|
||||
|
||||
def configure(conf):
|
||||
conf.load('compiler_c java')
|
||||
conf.check_jni_headers()
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='app', use='JAVA')
|
||||
"""
|
||||
|
||||
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
|
||||
conf.fatal('load a compiler first (gcc, g++, ..)')
|
||||
|
||||
if not conf.env.JAVA_HOME:
|
||||
conf.fatal('set JAVA_HOME in the system environment')
|
||||
|
||||
# jni requires the jvm
|
||||
javaHome = conf.env['JAVA_HOME'][0]
|
||||
|
||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
|
||||
if dir is None:
|
||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
|
||||
if dir is None:
|
||||
conf.fatal('JAVA_HOME does not seem to be set properly')
|
||||
|
||||
f = dir.ant_glob('**/(jni|jni_md).h')
|
||||
incDirs = [x.parent.abspath() for x in f]
|
||||
|
||||
dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
|
||||
f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
|
||||
libDirs = [x.parent.abspath() for x in f] or [javaHome]
|
||||
|
||||
# On windows, we need both the .dll and .lib to link. On my JDK, they are
|
||||
# in different directories...
|
||||
f = dir.ant_glob('**/*jvm.(lib)')
|
||||
if f:
|
||||
libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
|
||||
|
||||
for d in libDirs:
|
||||
try:
|
||||
conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
|
||||
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
else:
|
||||
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
|
||||
|
||||
|
90
third_party/waf/waf-light/waflib/Tools/kde4.py
vendored
Normal file
90
third_party/waf/waf-light/waflib/Tools/kde4.py
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
Support for the KDE4 libraries and msgfmt
|
||||
"""
|
||||
|
||||
import os, sys, re
|
||||
from waflib import Options, TaskGen, Task, Utils
|
||||
from waflib.TaskGen import feature, after_method
|
||||
|
||||
@feature('msgfmt')
|
||||
def apply_msgfmt(self):
|
||||
"""
|
||||
Process all languages to create .mo files and to install them::
|
||||
|
||||
def build(bld):
|
||||
bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
|
||||
"""
|
||||
for lang in self.to_list(self.langs):
|
||||
node = self.path.find_resource(lang+'.po')
|
||||
task = self.create_task('msgfmt', node, node.change_ext('.mo'))
|
||||
|
||||
langname = lang.split('/')
|
||||
langname = langname[-1]
|
||||
|
||||
inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
|
||||
|
||||
self.bld.install_as(
|
||||
inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
|
||||
task.outputs[0],
|
||||
chmod = getattr(self, 'chmod', Utils.O644))
|
||||
|
||||
class msgfmt(Task.Task):
|
||||
"""
|
||||
Transform .po files into .mo files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${MSGFMT} ${SRC} -o ${TGT}'
|
||||
|
||||
def configure(self):
|
||||
"""
|
||||
Detect kde4-config and set various variables for the *use* system::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_cxx kde4')
|
||||
def configure(conf):
|
||||
conf.load('compiler_cxx kde4')
|
||||
def build(bld):
|
||||
bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
|
||||
"""
|
||||
kdeconfig = self.find_program('kde4-config')
|
||||
prefix = self.cmd_and_log('%s --prefix' % kdeconfig).strip()
|
||||
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(fname)
|
||||
except OSError:
|
||||
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(fname)
|
||||
except OSError: self.fatal('could not open %s' % fname)
|
||||
|
||||
try:
|
||||
txt = Utils.readf(fname)
|
||||
except (OSError, IOError):
|
||||
self.fatal('could not read %s' % fname)
|
||||
|
||||
txt = txt.replace('\\\n', '\n')
|
||||
fu = re.compile('#(.*)\n')
|
||||
txt = fu.sub('', txt)
|
||||
|
||||
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
|
||||
found = setregexp.findall(txt)
|
||||
|
||||
for (_, key, val) in found:
|
||||
#print key, val
|
||||
self.env[key] = val
|
||||
|
||||
# well well, i could just write an interpreter for cmake files
|
||||
self.env['LIB_KDECORE']= ['kdecore']
|
||||
self.env['LIB_KDEUI'] = ['kdeui']
|
||||
self.env['LIB_KIO'] = ['kio']
|
||||
self.env['LIB_KHTML'] = ['khtml']
|
||||
self.env['LIB_KPARTS'] = ['kparts']
|
||||
|
||||
self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
|
||||
self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
|
||||
self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
|
||||
|
||||
self.find_program('msgfmt', var='MSGFMT')
|
||||
|
59
third_party/waf/waf-light/waflib/Tools/ldc2.py
vendored
Normal file
59
third_party/waf/waf-light/waflib/Tools/ldc2.py
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Alex Rønne Petersen, 2012 (alexrp/Zor)
|
||||
|
||||
import sys
|
||||
from waflib.Tools import ar, d
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_ldc2(conf):
|
||||
"""
|
||||
Find the program *ldc2* and set the variable *D*
|
||||
"""
|
||||
|
||||
conf.find_program(['ldc2'], var='D')
|
||||
|
||||
out = conf.cmd_and_log([conf.env.D, '-version'])
|
||||
if out.find("based on DMD v2.") == -1:
|
||||
conf.fatal("detected compiler is not ldc2")
|
||||
|
||||
@conf
|
||||
def common_flags_ldc2(conf):
|
||||
"""
|
||||
Set the D flags required by *ldc2*
|
||||
"""
|
||||
|
||||
v = conf.env
|
||||
|
||||
v['D_SRC_F'] = ['-c']
|
||||
v['D_TGT_F'] = '-of%s'
|
||||
|
||||
v['D_LINKER'] = v['D']
|
||||
v['DLNK_SRC_F'] = ''
|
||||
v['DLNK_TGT_F'] = '-of%s'
|
||||
v['DINC_ST'] = '-I%s'
|
||||
|
||||
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
|
||||
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s'
|
||||
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s'
|
||||
|
||||
v['LINKFLAGS_dshlib'] = ['-L-shared']
|
||||
|
||||
v['DHEADER_ext'] = '.di'
|
||||
v['DFLAGS_d_with_header'] = ['-H', '-Hf']
|
||||
v['D_HDR_F'] = '%s'
|
||||
|
||||
v['LINKFLAGS'] = []
|
||||
v['DFLAGS_dshlib'] = ['-relocation-model=pic']
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Configuration for *ldc2*
|
||||
"""
|
||||
|
||||
conf.find_ldc2()
|
||||
conf.load('ar')
|
||||
conf.load('d')
|
||||
conf.common_flags_ldc2()
|
||||
conf.d_platform_flags()
|
38
third_party/waf/waf-light/waflib/Tools/lua.py
vendored
Normal file
38
third_party/waf/waf-light/waflib/Tools/lua.py
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Sebastian Schlingmann, 2008
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"""
|
||||
Lua support.
|
||||
|
||||
Compile *.lua* files into *.luac*::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('lua')
|
||||
conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
|
||||
def build(bld):
|
||||
bld(source='foo.lua')
|
||||
"""
|
||||
|
||||
from waflib.TaskGen import extension
|
||||
from waflib import Task, Utils
|
||||
|
||||
@extension('.lua')
|
||||
def add_lua(self, node):
|
||||
tsk = self.create_task('luac', node, node.change_ext('.luac'))
|
||||
inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
|
||||
if inst_to:
|
||||
self.bld.install_files(inst_to, tsk.outputs)
|
||||
return tsk
|
||||
|
||||
class luac(Task.Task):
|
||||
run_str = '${LUAC} -s -o ${TGT} ${SRC}'
|
||||
color = 'PINK'
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the luac compiler and set *conf.env.LUAC*
|
||||
"""
|
||||
conf.find_program('luac', var='LUAC')
|
||||
|
1060
third_party/waf/waf-light/waflib/Tools/msvc.py
vendored
Normal file
1060
third_party/waf/waf-light/waflib/Tools/msvc.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
24
third_party/waf/waf-light/waflib/Tools/nasm.py
vendored
Normal file
24
third_party/waf/waf-light/waflib/Tools/nasm.py
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"""
|
||||
Nasm tool (asm processing)
|
||||
"""
|
||||
|
||||
import waflib.Tools.asm # leave this
|
||||
from waflib.TaskGen import feature
|
||||
|
||||
@feature('asm')
|
||||
def apply_nasm_vars(self):
|
||||
"""provided for compatibility"""
|
||||
self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect nasm/yasm and set the variable *AS*
|
||||
"""
|
||||
nasm = conf.find_program(['nasm', 'yasm'], var='AS')
|
||||
conf.env.AS_TGT_F = ['-o']
|
||||
conf.env.ASLNK_TGT_F = ['-o']
|
||||
conf.load('asm')
|
157
third_party/waf/waf-light/waflib/Tools/perl.py
vendored
Normal file
157
third_party/waf/waf-light/waflib/Tools/perl.py
vendored
Normal file
|
@ -0,0 +1,157 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# andersg at 0x63.nu 2007
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
"""
|
||||
Support for Perl extensions. A C/C++ compiler is required::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c perl')
|
||||
def configure(conf):
|
||||
conf.load('compiler_c perl')
|
||||
conf.check_perl_version((5,6,0))
|
||||
conf.check_perl_ext_devel()
|
||||
conf.check_perl_module('Cairo')
|
||||
conf.check_perl_module('Devel::PPPort 4.89')
|
||||
def build(bld):
|
||||
bld(
|
||||
features = 'c cshlib perlext',
|
||||
source = 'Mytest.xs',
|
||||
target = 'Mytest',
|
||||
install_path = '${ARCHDIR_PERL}/auto')
|
||||
bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
|
||||
"""
|
||||
|
||||
import os
|
||||
from waflib import Task, Options, Utils
|
||||
from waflib.Configure import conf
|
||||
from waflib.TaskGen import extension, feature, before_method
|
||||
|
||||
@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
|
||||
@feature('perlext')
|
||||
def init_perlext(self):
|
||||
"""
|
||||
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
|
||||
*lib* prefix from library names.
|
||||
"""
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
|
||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['perlext_PATTERN']
|
||||
|
||||
@extension('.xs')
|
||||
def xsubpp_file(self, node):
|
||||
"""
|
||||
Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
|
||||
"""
|
||||
outnode = node.change_ext('.c')
|
||||
self.create_task('xsubpp', node, outnode)
|
||||
self.source.append(outnode)
|
||||
|
||||
class xsubpp(Task.Task):
|
||||
"""
|
||||
Process *.xs* files
|
||||
"""
|
||||
run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
|
||||
color = 'BLUE'
|
||||
ext_out = ['.h']
|
||||
|
||||
@conf
|
||||
def check_perl_version(self, minver=None):
|
||||
"""
|
||||
Check if Perl is installed, and set the variable PERL.
|
||||
minver is supposed to be a tuple
|
||||
"""
|
||||
res = True
|
||||
|
||||
if minver:
|
||||
cver = '.'.join(map(str,minver))
|
||||
else:
|
||||
cver = ''
|
||||
|
||||
self.start_msg('Checking for minimum perl version %s' % cver)
|
||||
|
||||
perl = getattr(Options.options, 'perlbinary', None)
|
||||
|
||||
if not perl:
|
||||
perl = self.find_program('perl', var='PERL')
|
||||
|
||||
if not perl:
|
||||
self.end_msg("Perl not found", color="YELLOW")
|
||||
return False
|
||||
|
||||
self.env['PERL'] = perl
|
||||
|
||||
version = self.cmd_and_log([perl, "-e", 'printf \"%vd\", $^V'])
|
||||
if not version:
|
||||
res = False
|
||||
version = "Unknown"
|
||||
elif not minver is None:
|
||||
ver = tuple(map(int, version.split(".")))
|
||||
if ver < minver:
|
||||
res = False
|
||||
|
||||
self.end_msg(version, color=res and "GREEN" or "YELLOW")
|
||||
return res
|
||||
|
||||
@conf
|
||||
def check_perl_module(self, module):
|
||||
"""
|
||||
Check if specified perlmodule is installed.
|
||||
|
||||
The minimum version can be specified by specifying it after modulename
|
||||
like this::
|
||||
|
||||
def configure(conf):
|
||||
conf.check_perl_module("Some::Module 2.92")
|
||||
"""
|
||||
cmd = [self.env['PERL'], '-e', 'use %s' % module]
|
||||
self.start_msg('perl module %s' % module)
|
||||
try:
|
||||
r = self.cmd_and_log(cmd)
|
||||
except Exception:
|
||||
self.end_msg(False)
|
||||
return None
|
||||
self.end_msg(r or True)
|
||||
return r
|
||||
|
||||
@conf
|
||||
def check_perl_ext_devel(self):
|
||||
"""
|
||||
Check for configuration needed to build perl extensions.
|
||||
|
||||
Sets different xxx_PERLEXT variables in the environment.
|
||||
|
||||
Also sets the ARCHDIR_PERL variable useful as installation path,
|
||||
which can be overridden by ``--with-perl-archdir`` option.
|
||||
"""
|
||||
|
||||
env = self.env
|
||||
perl = env.PERL
|
||||
if not perl:
|
||||
self.fatal('find perl first')
|
||||
|
||||
def read_out(cmd):
|
||||
return Utils.to_list(self.cmd_and_log(perl + cmd))
|
||||
|
||||
env['LINKFLAGS_PERLEXT'] = read_out(" -MConfig -e'print $Config{lddlflags}'")
|
||||
env['INCLUDES_PERLEXT'] = read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
|
||||
env['CFLAGS_PERLEXT'] = read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
|
||||
|
||||
env['XSUBPP'] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
|
||||
env['EXTUTILS_TYPEMAP'] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
|
||||
|
||||
if not getattr(Options.options, 'perlarchdir', None):
|
||||
env['ARCHDIR_PERL'] = self.cmd_and_log(perl + " -MConfig -e'print $Config{sitearch}'")
|
||||
else:
|
||||
env['ARCHDIR_PERL'] = getattr(Options.options, 'perlarchdir')
|
||||
|
||||
env['perlext_PATTERN'] = '%s.' + self.cmd_and_log(perl + " -MConfig -e'print $Config{dlext}'")
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
|
||||
"""
|
||||
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
|
||||
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
|
||||
|
540
third_party/waf/waf-light/waflib/Tools/python.py
vendored
Normal file
540
third_party/waf/waf-light/waflib/Tools/python.py
vendored
Normal file
|
@ -0,0 +1,540 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2007-2010 (ita)
|
||||
# Gustavo Carneiro (gjc), 2007
|
||||
|
||||
"""
|
||||
Support for Python, detect the headers and libraries and provide
|
||||
*use* variables to link C/C++ programs against them::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c python')
|
||||
def configure(conf):
|
||||
conf.load('compiler_c python')
|
||||
conf.check_python_version((2,4,2))
|
||||
conf.check_python_headers()
|
||||
def build(bld):
|
||||
bld.program(features='pyembed', source='a.c', target='myprog')
|
||||
bld.shlib(features='pyext', source='b.c', target='mylib')
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib import Utils, Options, Errors, Logs
|
||||
from waflib.TaskGen import extension, before_method, after_method, feature
|
||||
from waflib.Configure import conf
|
||||
|
||||
FRAG = '''
|
||||
#include <Python.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
void Py_Initialize(void);
|
||||
void Py_Finalize(void);
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
(void)argc; (void)argv;
|
||||
Py_Initialize();
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
"""
|
||||
Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
|
||||
"""
|
||||
|
||||
INST = '''
|
||||
import sys, py_compile
|
||||
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
|
||||
'''
|
||||
"""
|
||||
Piece of Python code used in :py:func:`waflib.Tools.python.install_pyfile` for installing python files
|
||||
"""
|
||||
|
||||
DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
|
||||
|
||||
@extension('.py')
|
||||
def process_py(self, node):
|
||||
"""
|
||||
Add a callback using :py:func:`waflib.Tools.python.install_pyfile` to install a python file
|
||||
"""
|
||||
try:
|
||||
if not self.bld.is_install:
|
||||
return
|
||||
except AttributeError:
|
||||
return
|
||||
|
||||
try:
|
||||
if not self.install_path:
|
||||
return
|
||||
except AttributeError:
|
||||
self.install_path = '${PYTHONDIR}'
|
||||
|
||||
# i wonder now why we wanted to do this after the build is over
|
||||
# issue #901: people want to preserve the structure of installed files
|
||||
def inst_py(ctx):
|
||||
install_from = getattr(self, 'install_from', None)
|
||||
if install_from:
|
||||
install_from = self.path.find_dir(install_from)
|
||||
install_pyfile(self, node, install_from)
|
||||
self.bld.add_post_fun(inst_py)
|
||||
|
||||
def install_pyfile(self, node, install_from=None):
|
||||
"""
|
||||
Execute the installation of a python file
|
||||
|
||||
:param node: python file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
|
||||
from_node = install_from or node.parent
|
||||
tsk = self.bld.install_as(self.install_path + '/' + node.path_from(from_node), node, postpone=False)
|
||||
path = tsk.get_install_path()
|
||||
|
||||
if self.bld.is_install < 0:
|
||||
Logs.info("+ removing byte compiled python files")
|
||||
for x in 'co':
|
||||
try:
|
||||
os.remove(path + x)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if self.bld.is_install > 0:
|
||||
try:
|
||||
st1 = os.stat(path)
|
||||
except OSError:
|
||||
Logs.error('The python file is missing, this should not happen')
|
||||
|
||||
for x in ['c', 'o']:
|
||||
do_inst = self.env['PY' + x.upper()]
|
||||
try:
|
||||
st2 = os.stat(path + x)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
if st1.st_mtime <= st2.st_mtime:
|
||||
do_inst = False
|
||||
|
||||
if do_inst:
|
||||
lst = (x == 'o') and [self.env['PYFLAGS_OPT']] or []
|
||||
(a, b, c) = (path, path + x, tsk.get_install_path(destdir=False) + x)
|
||||
argv = self.env['PYTHON'] + lst + ['-c', INST, a, b, c]
|
||||
Logs.info('+ byte compiling %r' % (path + x))
|
||||
env = self.env.env or None
|
||||
ret = Utils.subprocess.Popen(argv, env=env).wait()
|
||||
if ret:
|
||||
raise Errors.WafError('py%s compilation failed %r' % (x, path))
|
||||
|
||||
@feature('py')
|
||||
def feature_py(self):
|
||||
"""
|
||||
Dummy feature which does nothing
|
||||
"""
|
||||
pass
|
||||
|
||||
@feature('pyext')
|
||||
@before_method('propagate_uselib_vars', 'apply_link')
|
||||
@after_method('apply_bundle')
|
||||
def init_pyext(self):
|
||||
"""
|
||||
Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
|
||||
*lib* prefix from library names.
|
||||
"""
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
if not 'PYEXT' in self.uselib:
|
||||
self.uselib.append('PYEXT')
|
||||
# override shlib_PATTERN set by the osx module
|
||||
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
|
||||
self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
|
||||
|
||||
try:
|
||||
if not self.install_path:
|
||||
return
|
||||
except AttributeError:
|
||||
self.install_path = '${PYTHONARCHDIR}'
|
||||
|
||||
@feature('pyext')
|
||||
@before_method('apply_link', 'apply_bundle')
|
||||
def set_bundle(self):
|
||||
if Utils.unversioned_sys_platform() == 'darwin':
|
||||
self.mac_bundle = True
|
||||
|
||||
@before_method('propagate_uselib_vars')
|
||||
@feature('pyembed')
|
||||
def init_pyembed(self):
|
||||
"""
|
||||
Add the PYEMBED variable.
|
||||
"""
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
if not 'PYEMBED' in self.uselib:
|
||||
self.uselib.append('PYEMBED')
|
||||
|
||||
@conf
|
||||
def get_python_variables(self, variables, imports=None):
|
||||
"""
|
||||
Spawn a new python process to dump configuration variables
|
||||
|
||||
:param variables: variables to print
|
||||
:type variables: list of string
|
||||
:param imports: one import by element
|
||||
:type imports: list of string
|
||||
:return: the variable values
|
||||
:rtype: list of string
|
||||
"""
|
||||
if not imports:
|
||||
try:
|
||||
imports = self.python_imports
|
||||
except AttributeError:
|
||||
imports = DISTUTILS_IMP
|
||||
|
||||
program = list(imports) # copy
|
||||
program.append('')
|
||||
for v in variables:
|
||||
program.append("print(repr(%s))" % v)
|
||||
os_env = dict(os.environ)
|
||||
try:
|
||||
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
|
||||
except Errors.WafError:
|
||||
self.fatal('The distutils module is unusable: install "python-devel"?')
|
||||
self.to_log(out)
|
||||
return_values = []
|
||||
for s in out.split('\n'):
|
||||
s = s.strip()
|
||||
if not s:
|
||||
continue
|
||||
if s == 'None':
|
||||
return_values.append(None)
|
||||
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
|
||||
return_values.append(eval(s))
|
||||
elif s[0].isdigit():
|
||||
return_values.append(int(s))
|
||||
else: break
|
||||
return return_values
|
||||
|
||||
@conf
|
||||
def check_python_headers(conf):
|
||||
"""
|
||||
Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
|
||||
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
|
||||
|
||||
* PYEXT: for compiling python extensions
|
||||
* PYEMBED: for embedding a python interpreter
|
||||
"""
|
||||
|
||||
# FIXME rewrite
|
||||
|
||||
env = conf.env
|
||||
if not env['CC_NAME'] and not env['CXX_NAME']:
|
||||
conf.fatal('load a compiler first (gcc, g++, ..)')
|
||||
|
||||
if not env['PYTHON_VERSION']:
|
||||
conf.check_python_version()
|
||||
|
||||
pybin = conf.env.PYTHON
|
||||
if not pybin:
|
||||
conf.fatal('Could not find the python executable')
|
||||
|
||||
v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
|
||||
try:
|
||||
lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
|
||||
except RuntimeError:
|
||||
conf.fatal("Python development headers not found (-v for details).")
|
||||
|
||||
vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
|
||||
conf.to_log("Configuration returned from %r:\n%r\n" % (pybin, '\n'.join(vals)))
|
||||
|
||||
dct = dict(zip(v, lst))
|
||||
x = 'MACOSX_DEPLOYMENT_TARGET'
|
||||
if dct[x]:
|
||||
conf.env[x] = conf.environ[x] = dct[x]
|
||||
|
||||
env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake
|
||||
|
||||
# Check for python libraries for embedding
|
||||
|
||||
all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
|
||||
conf.parse_flags(all_flags, 'PYEMBED')
|
||||
|
||||
all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
|
||||
conf.parse_flags(all_flags, 'PYEXT')
|
||||
|
||||
result = None
|
||||
#name = 'python' + env['PYTHON_VERSION']
|
||||
|
||||
# TODO simplify this
|
||||
for name in ('python' + env['PYTHON_VERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')):
|
||||
|
||||
# LIBPATH_PYEMBED is already set; see if it works.
|
||||
if not result and env['LIBPATH_PYEMBED']:
|
||||
path = env['LIBPATH_PYEMBED']
|
||||
conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
|
||||
|
||||
if not result and dct['LIBDIR']:
|
||||
path = [dct['LIBDIR']]
|
||||
conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
|
||||
|
||||
if not result and dct['LIBPL']:
|
||||
path = [dct['LIBPL']]
|
||||
conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
|
||||
|
||||
if not result:
|
||||
path = [os.path.join(dct['prefix'], "libs")]
|
||||
conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
|
||||
|
||||
if result:
|
||||
break # do not forget to set LIBPATH_PYEMBED
|
||||
|
||||
if result:
|
||||
env['LIBPATH_PYEMBED'] = path
|
||||
env.append_value('LIB_PYEMBED', [name])
|
||||
else:
|
||||
conf.to_log("\n\n### LIB NOT FOUND\n")
|
||||
|
||||
# under certain conditions, python extensions must link to
|
||||
# python libraries, not just python embedding programs.
|
||||
if (Utils.is_win32 or sys.platform.startswith('os2')
|
||||
or dct['Py_ENABLE_SHARED']):
|
||||
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
|
||||
env['LIB_PYEXT'] = env['LIB_PYEMBED']
|
||||
|
||||
# We check that pythonX.Y-config exists, and if it exists we
|
||||
# use it to get only the includes, else fall back to distutils.
|
||||
num = '.'.join(env['PYTHON_VERSION'].split('.')[:2])
|
||||
conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', mandatory=False)
|
||||
|
||||
includes = []
|
||||
if conf.env.PYTHON_CONFIG:
|
||||
for incstr in conf.cmd_and_log([ conf.env.PYTHON_CONFIG, '--includes']).strip().split():
|
||||
# strip the -I or /I
|
||||
if (incstr.startswith('-I') or incstr.startswith('/I')):
|
||||
incstr = incstr[2:]
|
||||
# append include path, unless already given
|
||||
if incstr not in includes:
|
||||
includes.append(incstr)
|
||||
conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n" % (includes,))
|
||||
env['INCLUDES_PYEXT'] = includes
|
||||
env['INCLUDES_PYEMBED'] = includes
|
||||
else:
|
||||
conf.to_log("Include path for Python extensions "
|
||||
"(found via distutils module): %r\n" % (dct['INCLUDEPY'],))
|
||||
env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']]
|
||||
env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']]
|
||||
|
||||
# Code using the Python API needs to be compiled with -fno-strict-aliasing
|
||||
if env['CC_NAME'] == 'gcc':
|
||||
env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
if env['CXX_NAME'] == 'gcc':
|
||||
env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
|
||||
if env.CC_NAME == "msvc":
|
||||
from distutils.msvccompiler import MSVCCompiler
|
||||
dist_compiler = MSVCCompiler()
|
||||
dist_compiler.initialize()
|
||||
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
|
||||
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
|
||||
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
|
||||
|
||||
# See if it compiles
|
||||
try:
|
||||
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
|
||||
uselib='PYEMBED', fragment=FRAG,
|
||||
errmsg=':-(')
|
||||
except conf.errors.ConfigurationError:
|
||||
# python3.2, oh yeah
|
||||
xx = conf.env.CXX_NAME and 'cxx' or 'c'
|
||||
|
||||
flags = ['--cflags', '--libs', '--ldflags']
|
||||
|
||||
for f in flags:
|
||||
conf.check_cfg(msg='Asking python-config for pyembed %s flags' % f,
|
||||
path=conf.env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=[f])
|
||||
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', msg='Getting pyembed flags from python-config',
|
||||
fragment=FRAG, errmsg='Could not build a python embedded interpreter',
|
||||
features='%s %sprogram pyembed' % (xx, xx))
|
||||
|
||||
for f in flags:
|
||||
conf.check_cfg(msg='Asking python-config for pyext %s flags' % f,
|
||||
path=conf.env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=[f])
|
||||
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', msg='Getting pyext flags from python-config',
|
||||
features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions')
|
||||
|
||||
@conf
|
||||
def check_python_version(conf, minver=None):
|
||||
"""
|
||||
Check if the python interpreter is found matching a given minimum version.
|
||||
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
|
||||
|
||||
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
|
||||
(eg. '2.4') of the actual python version found, and PYTHONDIR is
|
||||
defined, pointing to the site-packages directory appropriate for
|
||||
this python version, where modules/packages/extensions should be
|
||||
installed.
|
||||
|
||||
:param minver: minimum version
|
||||
:type minver: tuple of int
|
||||
"""
|
||||
assert minver is None or isinstance(minver, tuple)
|
||||
pybin = conf.env['PYTHON']
|
||||
if not pybin:
|
||||
conf.fatal('could not find the python executable')
|
||||
|
||||
# Get python version string
|
||||
cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
|
||||
Logs.debug('python: Running python command %r' % cmd)
|
||||
lines = conf.cmd_and_log(cmd).split()
|
||||
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
|
||||
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
|
||||
|
||||
# compare python version with the minimum required
|
||||
result = (minver is None) or (pyver_tuple >= minver)
|
||||
|
||||
if result:
|
||||
# define useful environment variables
|
||||
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
|
||||
conf.env['PYTHON_VERSION'] = pyver
|
||||
|
||||
if 'PYTHONDIR' in conf.environ:
|
||||
pydir = conf.environ['PYTHONDIR']
|
||||
else:
|
||||
if Utils.is_win32:
|
||||
(python_LIBDEST, pydir) = conf.get_python_variables(
|
||||
["get_config_var('LIBDEST') or ''",
|
||||
"get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
|
||||
else:
|
||||
python_LIBDEST = None
|
||||
(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
|
||||
if python_LIBDEST is None:
|
||||
if conf.env['LIBDIR']:
|
||||
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
|
||||
else:
|
||||
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
|
||||
|
||||
|
||||
if 'PYTHONARCHDIR' in conf.environ:
|
||||
pyarchdir = conf.environ['PYTHONARCHDIR']
|
||||
else:
|
||||
(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
|
||||
if not pyarchdir:
|
||||
pyarchdir = pydir
|
||||
|
||||
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
|
||||
conf.define('PYTHONDIR', pydir)
|
||||
conf.define('PYTHONARCHDIR', pyarchdir)
|
||||
|
||||
conf.env['PYTHONDIR'] = pydir
|
||||
conf.env['PYTHONARCHDIR'] = pyarchdir
|
||||
|
||||
# Feedback
|
||||
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
|
||||
if minver is None:
|
||||
conf.msg('Checking for python version', pyver_full)
|
||||
else:
|
||||
minver_str = '.'.join(map(str, minver))
|
||||
conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW')
|
||||
|
||||
if not result:
|
||||
conf.fatal('The python version is too old, expecting %r' % (minver,))
|
||||
|
||||
PYTHON_MODULE_TEMPLATE = '''
|
||||
import %s as current_module
|
||||
version = getattr(current_module, '__version__', None)
|
||||
if version is not None:
|
||||
print(str(version))
|
||||
else:
|
||||
print('unknown version')
|
||||
'''
|
||||
|
||||
@conf
|
||||
def check_python_module(conf, module_name, condition=''):
|
||||
"""
|
||||
Check if the selected python interpreter can import the given python module::
|
||||
|
||||
def configure(conf):
|
||||
conf.check_python_module('pygccxml')
|
||||
conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
|
||||
|
||||
:param module_name: module
|
||||
:type module_name: string
|
||||
"""
|
||||
msg = 'Python module %s' % module_name
|
||||
if condition:
|
||||
msg = '%s (%s)' % (msg, condition)
|
||||
conf.start_msg(msg)
|
||||
try:
|
||||
ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
|
||||
except Exception:
|
||||
conf.end_msg(False)
|
||||
conf.fatal('Could not find the python module %r' % module_name)
|
||||
|
||||
ret = ret.strip()
|
||||
if condition:
|
||||
conf.end_msg(ret)
|
||||
if ret == 'unknown version':
|
||||
conf.fatal('Could not check the %s version' % module_name)
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
def num(*k):
|
||||
if isinstance(k[0], int):
|
||||
return LooseVersion('.'.join([str(x) for x in k]))
|
||||
else:
|
||||
return LooseVersion(k[0])
|
||||
d = {'num': num, 'ver': LooseVersion(ret)}
|
||||
ev = eval(condition, {}, d)
|
||||
if not ev:
|
||||
conf.fatal('The %s version does not satisfy the requirements' % module_name)
|
||||
else:
|
||||
if ret == 'unknown version':
|
||||
conf.end_msg(True)
|
||||
else:
|
||||
conf.end_msg(ret)
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the python interpreter
|
||||
"""
|
||||
try:
|
||||
conf.find_program('python', var='PYTHON')
|
||||
except conf.errors.ConfigurationError:
|
||||
Logs.warn("could not find a python executable, setting to sys.executable '%s'" % sys.executable)
|
||||
conf.env.PYTHON = sys.executable
|
||||
|
||||
if conf.env.PYTHON != sys.executable:
|
||||
Logs.warn("python executable %r differs from system %r" % (conf.env.PYTHON, sys.executable))
|
||||
conf.env.PYTHON = conf.cmd_to_list(conf.env.PYTHON)
|
||||
|
||||
v = conf.env
|
||||
v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
|
||||
v['PYFLAGS'] = ''
|
||||
v['PYFLAGS_OPT'] = '-O'
|
||||
|
||||
v['PYC'] = getattr(Options.options, 'pyc', 1)
|
||||
v['PYO'] = getattr(Options.options, 'pyo', 1)
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add the options ``--nopyc`` and ``--nopyo``
|
||||
"""
|
||||
opt.add_option('--nopyc',
|
||||
action='store_false',
|
||||
default=1,
|
||||
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
|
||||
dest = 'pyc')
|
||||
opt.add_option('--nopyo',
|
||||
action='store_false',
|
||||
default=1,
|
||||
help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
|
||||
dest='pyo')
|
||||
|
715
third_party/waf/waf-light/waflib/Tools/qt4.py
vendored
Normal file
715
third_party/waf/waf-light/waflib/Tools/qt4.py
vendored
Normal file
|
@ -0,0 +1,715 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
|
||||
Tool Description
|
||||
================
|
||||
|
||||
This tool helps with finding Qt4 tools and libraries,
|
||||
and also provides syntactic sugar for using Qt4 tools.
|
||||
|
||||
The following snippet illustrates the tool usage::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_cxx qt4')
|
||||
|
||||
def configure(conf):
|
||||
conf.load('compiler_cxx qt4')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features = 'qt4 cxx cxxprogram',
|
||||
uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
|
||||
source = 'main.cpp textures.qrc aboutDialog.ui',
|
||||
target = 'window',
|
||||
)
|
||||
|
||||
Here, the UI description and resource files will be processed
|
||||
to generate code.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
Load the "qt4" tool.
|
||||
|
||||
You also need to edit your sources accordingly:
|
||||
|
||||
- the normal way of doing things is to have your C++ files
|
||||
include the .moc file.
|
||||
This is regarded as the best practice (and provides much faster
|
||||
compilations).
|
||||
It also implies that the include paths have beenset properly.
|
||||
|
||||
- to have the include paths added automatically, use the following::
|
||||
|
||||
from waflib.TaskGen import feature, before_method, after_method
|
||||
@feature('cxx')
|
||||
@after_method('process_source')
|
||||
@before_method('apply_incpaths')
|
||||
def add_includes_paths(self):
|
||||
incs = set(self.to_list(getattr(self, 'includes', '')))
|
||||
for x in self.compiled_tasks:
|
||||
incs.add(x.inputs[0].parent.path_from(self.path))
|
||||
self.includes = list(incs)
|
||||
|
||||
Note: another tool provides Qt processing that does not require
|
||||
.moc includes, see 'playground/slow_qt/'.
|
||||
|
||||
A few options (--qt{dir,bin,...}) and environment variables
|
||||
(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
|
||||
tool path selection, etc; please read the source for more info.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
from xml.sax import make_parser
|
||||
from xml.sax.handler import ContentHandler
|
||||
except ImportError:
|
||||
has_xml = False
|
||||
ContentHandler = object
|
||||
else:
|
||||
has_xml = True
|
||||
|
||||
import os, sys
|
||||
from waflib.Tools import c_preproc, cxx
|
||||
from waflib import Task, Utils, Options, Errors
|
||||
from waflib.TaskGen import feature, after_method, extension
|
||||
from waflib.Configure import conf
|
||||
from waflib import Logs
|
||||
|
||||
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
|
||||
"""
|
||||
File extensions associated to the .moc files
|
||||
"""
|
||||
|
||||
EXT_RCC = ['.qrc']
|
||||
"""
|
||||
File extension for the resource (.qrc) files
|
||||
"""
|
||||
|
||||
EXT_UI = ['.ui']
|
||||
"""
|
||||
File extension for the user interface (.ui) files
|
||||
"""
|
||||
|
||||
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
|
||||
"""
|
||||
File extensions of C++ files that may require a .moc processing
|
||||
"""
|
||||
|
||||
QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
|
||||
|
||||
class qxx(Task.classes['cxx']):
|
||||
"""
|
||||
Each C++ file can have zero or several .moc files to create.
|
||||
They are known only when the files are scanned (preprocessor)
|
||||
To avoid scanning the c++ files each time (parsing C/C++), the results
|
||||
are retrieved from the task cache (bld.node_deps/bld.raw_deps).
|
||||
The moc tasks are also created *dynamically* during the build.
|
||||
"""
|
||||
|
||||
def __init__(self, *k, **kw):
|
||||
Task.Task.__init__(self, *k, **kw)
|
||||
self.moc_done = 0
|
||||
|
||||
def scan(self):
|
||||
"""
|
||||
Re-use the C/C++ scanner, but remove the moc files from the dependencies
|
||||
since the .cpp file already depends on all the headers
|
||||
"""
|
||||
(nodes, names) = c_preproc.scan(self)
|
||||
lst = []
|
||||
for x in nodes:
|
||||
# short lists, no need to use sets
|
||||
if x.name.endswith('.moc'):
|
||||
s = x.path_from(self.inputs[0].parent.get_bld())
|
||||
if s not in names:
|
||||
names.append(s)
|
||||
else:
|
||||
lst.append(x)
|
||||
return (lst, names)
|
||||
|
||||
def runnable_status(self):
|
||||
"""
|
||||
Compute the task signature to make sure the scanner was executed. Create the
|
||||
moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
|
||||
then postpone the task execution (there is no need to recompute the task signature).
|
||||
"""
|
||||
if self.moc_done:
|
||||
return Task.Task.runnable_status(self)
|
||||
else:
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
self.add_moc_tasks()
|
||||
return Task.Task.runnable_status(self)
|
||||
|
||||
def create_moc_task(self, h_node, m_node):
|
||||
"""
|
||||
If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
|
||||
It is not possible to change the file names, but we can assume that the moc transformation will be identical,
|
||||
and the moc tasks can be shared in a global cache.
|
||||
|
||||
The defines passed to moc will then depend on task generator order. If this is not acceptable, then
|
||||
use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
|
||||
"""
|
||||
try:
|
||||
moc_cache = self.generator.bld.moc_cache
|
||||
except AttributeError:
|
||||
moc_cache = self.generator.bld.moc_cache = {}
|
||||
|
||||
try:
|
||||
return moc_cache[h_node]
|
||||
except KeyError:
|
||||
tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
|
||||
tsk.set_inputs(h_node)
|
||||
tsk.set_outputs(m_node)
|
||||
|
||||
# direct injection in the build phase (safe because called from the main thread)
|
||||
gen = self.generator.bld.producer
|
||||
gen.outstanding.insert(0, tsk)
|
||||
gen.total += 1
|
||||
|
||||
return tsk
|
||||
|
||||
def add_moc_tasks(self):
|
||||
"""
|
||||
Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
|
||||
"""
|
||||
node = self.inputs[0]
|
||||
bld = self.generator.bld
|
||||
|
||||
try:
|
||||
# compute the signature once to know if there is a moc file to create
|
||||
self.signature()
|
||||
except KeyError:
|
||||
# the moc file may be referenced somewhere else
|
||||
pass
|
||||
else:
|
||||
# remove the signature, it must be recomputed with the moc task
|
||||
delattr(self, 'cache_sig')
|
||||
|
||||
moctasks=[]
|
||||
mocfiles=[]
|
||||
try:
|
||||
tmp_lst = bld.raw_deps[self.uid()]
|
||||
bld.raw_deps[self.uid()] = []
|
||||
except KeyError:
|
||||
tmp_lst = []
|
||||
for d in tmp_lst:
|
||||
if not d.endswith('.moc'):
|
||||
continue
|
||||
# paranoid check
|
||||
if d in mocfiles:
|
||||
Logs.error("paranoia owns")
|
||||
continue
|
||||
# process that base.moc only once
|
||||
mocfiles.append(d)
|
||||
|
||||
# find the extension - this search is done only once
|
||||
|
||||
h_node = None
|
||||
try: ext = Options.options.qt_header_ext.split()
|
||||
except AttributeError: pass
|
||||
if not ext: ext = MOC_H
|
||||
|
||||
base2 = d[:-4]
|
||||
for x in [node.parent] + self.generator.includes_nodes:
|
||||
for e in ext:
|
||||
h_node = x.find_node(base2 + e)
|
||||
if h_node:
|
||||
break
|
||||
if h_node:
|
||||
m_node = h_node.change_ext('.moc')
|
||||
break
|
||||
else:
|
||||
for k in EXT_QT4:
|
||||
if base2.endswith(k):
|
||||
for x in [node.parent] + self.generator.includes_nodes:
|
||||
h_node = x.find_node(base2)
|
||||
if h_node:
|
||||
break
|
||||
if h_node:
|
||||
m_node = h_node.change_ext(k + '.moc')
|
||||
break
|
||||
if not h_node:
|
||||
raise Errors.WafError('no header found for %r which is a moc file' % d)
|
||||
|
||||
# next time we will not search for the extension (look at the 'for' loop below)
|
||||
bld.node_deps[(self.inputs[0].parent.abspath(), m_node.name)] = h_node
|
||||
|
||||
# create the task
|
||||
task = self.create_moc_task(h_node, m_node)
|
||||
moctasks.append(task)
|
||||
|
||||
# remove raw deps except the moc files to save space (optimization)
|
||||
tmp_lst = bld.raw_deps[self.uid()] = mocfiles
|
||||
|
||||
# look at the file inputs, it is set right above
|
||||
lst = bld.node_deps.get(self.uid(), ())
|
||||
for d in lst:
|
||||
name = d.name
|
||||
if name.endswith('.moc'):
|
||||
task = self.create_moc_task(bld.node_deps[(self.inputs[0].parent.abspath(), name)], d)
|
||||
moctasks.append(task)
|
||||
|
||||
# simple scheduler dependency: run the moc task before others
|
||||
self.run_after.update(set(moctasks))
|
||||
self.moc_done = 1
|
||||
|
||||
run = Task.classes['cxx'].__dict__['run']
|
||||
|
||||
class trans_update(Task.Task):
|
||||
"""Update a .ts files from a list of C++ files"""
|
||||
run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
|
||||
color = 'BLUE'
|
||||
Task.update_outputs(trans_update)
|
||||
|
||||
class XMLHandler(ContentHandler):
|
||||
"""
|
||||
Parser for *.qrc* files
|
||||
"""
|
||||
def __init__(self):
|
||||
self.buf = []
|
||||
self.files = []
|
||||
def startElement(self, name, attrs):
|
||||
if name == 'file':
|
||||
self.buf = []
|
||||
def endElement(self, name):
|
||||
if name == 'file':
|
||||
self.files.append(str(''.join(self.buf)))
|
||||
def characters(self, cars):
|
||||
self.buf.append(cars)
|
||||
|
||||
@extension(*EXT_RCC)
|
||||
def create_rcc_task(self, node):
|
||||
"Create rcc and cxx tasks for *.qrc* files"
|
||||
rcnode = node.change_ext('_rc.cpp')
|
||||
rcctask = self.create_task('rcc', node, rcnode)
|
||||
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
|
||||
try:
|
||||
self.compiled_tasks.append(cpptask)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [cpptask]
|
||||
return cpptask
|
||||
|
||||
@extension(*EXT_UI)
|
||||
def create_uic_task(self, node):
|
||||
"hook for uic tasks"
|
||||
uictask = self.create_task('ui4', node)
|
||||
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
|
||||
|
||||
@extension('.ts')
|
||||
def add_lang(self, node):
|
||||
"""add all the .ts file into self.lang"""
|
||||
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
|
||||
|
||||
@feature('qt4')
|
||||
@after_method('apply_link')
|
||||
def apply_qt4(self):
|
||||
"""
|
||||
Add MOC_FLAGS which may be necessary for moc::
|
||||
|
||||
def build(bld):
|
||||
bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
|
||||
|
||||
The additional parameters are:
|
||||
|
||||
:param lang: list of translation files (\*.ts) to process
|
||||
:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
|
||||
:param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
|
||||
:type update: bool
|
||||
:param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
|
||||
:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
|
||||
"""
|
||||
if getattr(self, 'lang', None):
|
||||
qmtasks = []
|
||||
for x in self.to_list(self.lang):
|
||||
if isinstance(x, str):
|
||||
x = self.path.find_resource(x + '.ts')
|
||||
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
|
||||
|
||||
if getattr(self, 'update', None) and Options.options.trans_qt4:
|
||||
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
|
||||
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
|
||||
for x in qmtasks:
|
||||
self.create_task('trans_update', cxxnodes, x.inputs)
|
||||
|
||||
if getattr(self, 'langname', None):
|
||||
qmnodes = [x.outputs[0] for x in qmtasks]
|
||||
rcnode = self.langname
|
||||
if isinstance(rcnode, str):
|
||||
rcnode = self.path.find_or_declare(rcnode + '.qrc')
|
||||
t = self.create_task('qm2rcc', qmnodes, rcnode)
|
||||
k = create_rcc_task(self, t.outputs[0])
|
||||
self.link_task.inputs.append(k.outputs[0])
|
||||
|
||||
lst = []
|
||||
for flag in self.to_list(self.env['CXXFLAGS']):
|
||||
if len(flag) < 2: continue
|
||||
f = flag[0:2]
|
||||
if f in ['-D', '-I', '/D', '/I']:
|
||||
if (f[0] == '/'):
|
||||
lst.append('-' + flag[1:])
|
||||
else:
|
||||
lst.append(flag)
|
||||
self.env.append_value('MOC_FLAGS', lst)
|
||||
|
||||
@extension(*EXT_QT4)
|
||||
def cxx_hook(self, node):
|
||||
"""
|
||||
Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
|
||||
"""
|
||||
return self.create_compiled_task('qxx', node)
|
||||
|
||||
class rcc(Task.Task):
|
||||
"""
|
||||
Process *.qrc* files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
|
||||
ext_out = ['.h']
|
||||
|
||||
def scan(self):
|
||||
"""Parse the *.qrc* files"""
|
||||
node = self.inputs[0]
|
||||
|
||||
if not has_xml:
|
||||
Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
|
||||
return ([], [])
|
||||
|
||||
parser = make_parser()
|
||||
curHandler = XMLHandler()
|
||||
parser.setContentHandler(curHandler)
|
||||
fi = open(self.inputs[0].abspath(), 'r')
|
||||
try:
|
||||
parser.parse(fi)
|
||||
finally:
|
||||
fi.close()
|
||||
|
||||
nodes = []
|
||||
names = []
|
||||
root = self.inputs[0].parent
|
||||
for x in curHandler.files:
|
||||
nd = root.find_resource(x)
|
||||
if nd: nodes.append(nd)
|
||||
else: names.append(x)
|
||||
return (nodes, names)
|
||||
|
||||
class moc(Task.Task):
|
||||
"""
|
||||
Create *.moc* files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
|
||||
|
||||
class ui4(Task.Task):
|
||||
"""
|
||||
Process *.ui* files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${QT_UIC} ${SRC} -o ${TGT}'
|
||||
ext_out = ['.h']
|
||||
|
||||
class ts2qm(Task.Task):
|
||||
"""
|
||||
Create *.qm* files from *.ts* files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
|
||||
|
||||
class qm2rcc(Task.Task):
|
||||
"""
|
||||
Transform *.qm* files into *.rc* files
|
||||
"""
|
||||
color = 'BLUE'
|
||||
after = 'ts2qm'
|
||||
|
||||
def run(self):
|
||||
"""Create a qrc file including the inputs"""
|
||||
txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
|
||||
code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
|
||||
self.outputs[0].write(code)
|
||||
|
||||
def configure(self):
|
||||
"""
|
||||
Besides the configuration options, the environment variable QT4_ROOT may be used
|
||||
to give the location of the qt4 libraries (absolute path).
|
||||
|
||||
The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
|
||||
"""
|
||||
self.find_qt4_binaries()
|
||||
self.set_qt4_libs_to_check()
|
||||
self.set_qt4_defines()
|
||||
self.find_qt4_libraries()
|
||||
self.add_qt4_rpath()
|
||||
self.simplify_qt4_libs()
|
||||
|
||||
@conf
|
||||
def find_qt4_binaries(self):
|
||||
env = self.env
|
||||
opt = Options.options
|
||||
|
||||
qtdir = getattr(opt, 'qtdir', '')
|
||||
qtbin = getattr(opt, 'qtbin', '')
|
||||
|
||||
paths = []
|
||||
|
||||
if qtdir:
|
||||
qtbin = os.path.join(qtdir, 'bin')
|
||||
|
||||
# the qt directory has been given from QT4_ROOT - deduce the qt binary path
|
||||
if not qtdir:
|
||||
qtdir = os.environ.get('QT4_ROOT', '')
|
||||
qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin')
|
||||
|
||||
if qtbin:
|
||||
paths = [qtbin]
|
||||
|
||||
# no qtdir, look in the path and in /usr/local/Trolltech
|
||||
if not qtdir:
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
paths.append('/usr/share/qt4/bin/')
|
||||
try:
|
||||
lst = Utils.listdir('/usr/local/Trolltech/')
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
if lst:
|
||||
lst.sort()
|
||||
lst.reverse()
|
||||
|
||||
# keep the highest version
|
||||
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
|
||||
qtbin = os.path.join(qtdir, 'bin')
|
||||
paths.append(qtbin)
|
||||
|
||||
# at the end, try to find qmake in the paths given
|
||||
# keep the one with the highest version
|
||||
cand = None
|
||||
prev_ver = ['4', '0', '0']
|
||||
for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
|
||||
try:
|
||||
qmake = self.find_program(qmk, path_list=paths)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
version = self.cmd_and_log([qmake, '-query', 'QT_VERSION']).strip()
|
||||
except self.errors.WafError:
|
||||
pass
|
||||
else:
|
||||
if version:
|
||||
new_ver = version.split('.')
|
||||
if new_ver > prev_ver:
|
||||
cand = qmake
|
||||
prev_ver = new_ver
|
||||
if cand:
|
||||
self.env.QMAKE = cand
|
||||
else:
|
||||
self.fatal('Could not find qmake for qt4')
|
||||
|
||||
qtbin = self.cmd_and_log([self.env.QMAKE, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
|
||||
|
||||
def find_bin(lst, var):
|
||||
if var in env:
|
||||
return
|
||||
for f in lst:
|
||||
try:
|
||||
ret = self.find_program(f, path_list=paths)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
env[var]=ret
|
||||
break
|
||||
|
||||
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
|
||||
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
|
||||
if not env['QT_UIC']:
|
||||
self.fatal('cannot find the uic compiler for qt4')
|
||||
|
||||
try:
|
||||
uicver = self.cmd_and_log(env['QT_UIC'] + " -version 2>&1").strip()
|
||||
except self.errors.ConfigurationError:
|
||||
self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
|
||||
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
|
||||
self.msg('Checking for uic version', '%s' % uicver)
|
||||
if uicver.find(' 3.') != -1:
|
||||
self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
|
||||
|
||||
find_bin(['moc-qt4', 'moc'], 'QT_MOC')
|
||||
find_bin(['rcc'], 'QT_RCC')
|
||||
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
|
||||
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
|
||||
|
||||
env['UIC3_ST']= '%s -o %s'
|
||||
env['UIC_ST'] = '%s -o %s'
|
||||
env['MOC_ST'] = '-o'
|
||||
env['ui_PATTERN'] = 'ui_%s.h'
|
||||
env['QT_LRELEASE_FLAGS'] = ['-silent']
|
||||
env.MOCCPPPATH_ST = '-I%s'
|
||||
env.MOCDEFINES_ST = '-D%s'
|
||||
|
||||
@conf
|
||||
def find_qt4_libraries(self):
|
||||
qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None)
|
||||
if not qtlibs:
|
||||
try:
|
||||
qtlibs = self.cmd_and_log([self.env.QMAKE, '-query', 'QT_INSTALL_LIBS']).strip()
|
||||
except Errors.WafError:
|
||||
qtdir = self.cmd_and_log([self.env.QMAKE, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
|
||||
qtlibs = os.path.join(qtdir, 'lib')
|
||||
self.msg('Found the Qt4 libraries in', qtlibs)
|
||||
|
||||
qtincludes = os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log([self.env.QMAKE, '-query', 'QT_INSTALL_HEADERS']).strip()
|
||||
env = self.env
|
||||
if not 'PKG_CONFIG_PATH' in os.environ:
|
||||
os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
|
||||
|
||||
try:
|
||||
if os.environ.get("QT4_XCOMPILE", None):
|
||||
raise self.errors.ConfigurationError()
|
||||
self.check_cfg(atleast_pkgconfig_version='0.1')
|
||||
except self.errors.ConfigurationError:
|
||||
for i in self.qt4_vars:
|
||||
uselib = i.upper()
|
||||
if Utils.unversioned_sys_platform() == "darwin":
|
||||
# Since at least qt 4.7.3 each library locates in separate directory
|
||||
frameworkName = i + ".framework"
|
||||
qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
|
||||
if os.path.exists(qtDynamicLib):
|
||||
env.append_unique('FRAMEWORK_' + uselib, i)
|
||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
|
||||
else:
|
||||
self.msg('Checking for %s' % i, False, 'YELLOW')
|
||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
|
||||
elif env.DEST_OS != "win32":
|
||||
qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
|
||||
qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
|
||||
if os.path.exists(qtDynamicLib):
|
||||
env.append_unique('LIB_' + uselib, i)
|
||||
self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
|
||||
elif os.path.exists(qtStaticLib):
|
||||
env.append_unique('LIB_' + uselib, i)
|
||||
self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
|
||||
else:
|
||||
self.msg('Checking for %s' % i, False, 'YELLOW')
|
||||
|
||||
env.append_unique('LIBPATH_' + uselib, qtlibs)
|
||||
env.append_unique('INCLUDES_' + uselib, qtincludes)
|
||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
|
||||
else:
|
||||
# Release library names are like QtCore4
|
||||
for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
|
||||
lib = os.path.join(qtlibs, k % i)
|
||||
if os.path.exists(lib):
|
||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
|
||||
self.msg('Checking for %s' % i, lib, 'GREEN')
|
||||
break
|
||||
else:
|
||||
self.msg('Checking for %s' % i, False, 'YELLOW')
|
||||
|
||||
env.append_unique('LIBPATH_' + uselib, qtlibs)
|
||||
env.append_unique('INCLUDES_' + uselib, qtincludes)
|
||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
|
||||
|
||||
# Debug library names are like QtCore4d
|
||||
uselib = i.upper() + "_debug"
|
||||
for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
|
||||
lib = os.path.join(qtlibs, k % i)
|
||||
if os.path.exists(lib):
|
||||
env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
|
||||
self.msg('Checking for %s' % i, lib, 'GREEN')
|
||||
break
|
||||
else:
|
||||
self.msg('Checking for %s' % i, False, 'YELLOW')
|
||||
|
||||
env.append_unique('LIBPATH_' + uselib, qtlibs)
|
||||
env.append_unique('INCLUDES_' + uselib, qtincludes)
|
||||
env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
|
||||
else:
|
||||
for i in self.qt4_vars_debug + self.qt4_vars:
|
||||
self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
|
||||
|
||||
@conf
|
||||
def simplify_qt4_libs(self):
|
||||
# the libpaths make really long command-lines
|
||||
# remove the qtcore ones from qtgui, etc
|
||||
env = self.env
|
||||
def process_lib(vars_, coreval):
|
||||
for d in vars_:
|
||||
var = d.upper()
|
||||
if var == 'QTCORE':
|
||||
continue
|
||||
|
||||
value = env['LIBPATH_'+var]
|
||||
if value:
|
||||
core = env[coreval]
|
||||
accu = []
|
||||
for lib in value:
|
||||
if lib in core:
|
||||
continue
|
||||
accu.append(lib)
|
||||
env['LIBPATH_'+var] = accu
|
||||
|
||||
process_lib(self.qt4_vars, 'LIBPATH_QTCORE')
|
||||
process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
|
||||
|
||||
@conf
|
||||
def add_qt4_rpath(self):
|
||||
# rpath if wanted
|
||||
env = self.env
|
||||
if getattr(Options.options, 'want_rpath', False):
|
||||
def process_rpath(vars_, coreval):
|
||||
for d in vars_:
|
||||
var = d.upper()
|
||||
value = env['LIBPATH_'+var]
|
||||
if value:
|
||||
core = env[coreval]
|
||||
accu = []
|
||||
for lib in value:
|
||||
if var != 'QTCORE':
|
||||
if lib in core:
|
||||
continue
|
||||
accu.append('-Wl,--rpath='+lib)
|
||||
env['RPATH_'+var] = accu
|
||||
process_rpath(self.qt4_vars, 'LIBPATH_QTCORE')
|
||||
process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
|
||||
|
||||
@conf
|
||||
def set_qt4_libs_to_check(self):
|
||||
if not hasattr(self, 'qt4_vars'):
|
||||
self.qt4_vars = QT4_LIBS
|
||||
self.qt4_vars = Utils.to_list(self.qt4_vars)
|
||||
if not hasattr(self, 'qt4_vars_debug'):
|
||||
self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
|
||||
self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
|
||||
|
||||
@conf
|
||||
def set_qt4_defines(self):
|
||||
if sys.platform != 'win32':
|
||||
return
|
||||
for x in self.qt4_vars:
|
||||
y = x[2:].upper()
|
||||
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
|
||||
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Command-line options
|
||||
"""
|
||||
opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
|
||||
|
||||
opt.add_option('--header-ext',
|
||||
type='string',
|
||||
default='',
|
||||
help='header extension for moc files',
|
||||
dest='qt_header_ext')
|
||||
|
||||
for i in 'qtdir qtbin qtlibs'.split():
|
||||
opt.add_option('--'+i, type='string', default='', dest=i)
|
||||
|
||||
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
|
||||
|
193
third_party/waf/waf-light/waflib/Tools/ruby.py
vendored
Normal file
193
third_party/waf/waf-light/waflib/Tools/ruby.py
vendored
Normal file
|
@ -0,0 +1,193 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# daniel.svensson at purplescout.se 2008
|
||||
# Thomas Nagy 2010 (ita)
|
||||
|
||||
"""
|
||||
Support for Ruby extensions. A C/C++ compiler is required::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_c ruby')
|
||||
def configure(conf):
|
||||
conf.load('compiler_c ruby')
|
||||
conf.check_ruby_version((1,8,0))
|
||||
conf.check_ruby_ext_devel()
|
||||
conf.check_ruby_module('libxml')
|
||||
def build(bld):
|
||||
bld(
|
||||
features = 'c cshlib rubyext',
|
||||
source = 'rb_mytest.c',
|
||||
target = 'mytest_ext',
|
||||
install_path = '${ARCHDIR_RUBY}')
|
||||
bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
|
||||
"""
|
||||
|
||||
import os
|
||||
from waflib import Task, Options, Utils
|
||||
from waflib.TaskGen import before_method, feature, after_method, Task, extension
|
||||
from waflib.Configure import conf
|
||||
|
||||
@feature('rubyext')
|
||||
@before_method('apply_incpaths', 'apply_lib_vars', 'apply_bundle', 'apply_link')
|
||||
def init_rubyext(self):
|
||||
"""
|
||||
Add required variables for ruby extensions
|
||||
"""
|
||||
self.install_path = '${ARCHDIR_RUBY}'
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', ''))
|
||||
if not 'RUBY' in self.uselib:
|
||||
self.uselib.append('RUBY')
|
||||
if not 'RUBYEXT' in self.uselib:
|
||||
self.uselib.append('RUBYEXT')
|
||||
|
||||
@feature('rubyext')
|
||||
@before_method('apply_link', 'propagate_uselib')
|
||||
def apply_ruby_so_name(self):
|
||||
"""
|
||||
Strip the *lib* prefix from ruby extensions
|
||||
"""
|
||||
self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['rubyext_PATTERN']
|
||||
|
||||
@conf
|
||||
def check_ruby_version(self, minver=()):
|
||||
"""
|
||||
Checks if ruby is installed.
|
||||
If installed the variable RUBY will be set in environment.
|
||||
The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
|
||||
"""
|
||||
|
||||
if Options.options.rubybinary:
|
||||
self.env.RUBY = Options.options.rubybinary
|
||||
else:
|
||||
self.find_program('ruby', var='RUBY')
|
||||
|
||||
ruby = self.env.RUBY
|
||||
|
||||
try:
|
||||
version = self.cmd_and_log([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
|
||||
except Exception:
|
||||
self.fatal('could not determine ruby version')
|
||||
self.env.RUBY_VERSION = version
|
||||
|
||||
try:
|
||||
ver = tuple(map(int, version.split(".")))
|
||||
except Exception:
|
||||
self.fatal('unsupported ruby version %r' % version)
|
||||
|
||||
cver = ''
|
||||
if minver:
|
||||
if ver < minver:
|
||||
self.fatal('ruby is too old %r' % ver)
|
||||
cver = '.'.join([str(x) for x in minver])
|
||||
else:
|
||||
cver = ver
|
||||
|
||||
self.msg('Checking for ruby version %s' % str(minver or ''), cver)
|
||||
|
||||
@conf
|
||||
def check_ruby_ext_devel(self):
|
||||
"""
|
||||
Check if a ruby extension can be created
|
||||
"""
|
||||
if not self.env.RUBY:
|
||||
self.fatal('ruby detection is required first')
|
||||
|
||||
if not self.env.CC_NAME and not self.env.CXX_NAME:
|
||||
self.fatal('load a c/c++ compiler first')
|
||||
|
||||
version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
|
||||
|
||||
def read_out(cmd):
|
||||
return Utils.to_list(self.cmd_and_log([self.env.RUBY, '-rrbconfig', '-e', cmd]))
|
||||
|
||||
def read_config(key):
|
||||
return read_out('puts Config::CONFIG[%r]' % key)
|
||||
|
||||
ruby = self.env['RUBY']
|
||||
archdir = read_config('archdir')
|
||||
cpppath = archdir
|
||||
|
||||
if version >= (1, 9, 0):
|
||||
ruby_hdrdir = read_config('rubyhdrdir')
|
||||
cpppath += ruby_hdrdir
|
||||
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
|
||||
|
||||
self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file')
|
||||
|
||||
self.env.LIBPATH_RUBYEXT = read_config('libdir')
|
||||
self.env.LIBPATH_RUBYEXT += archdir
|
||||
self.env.INCLUDES_RUBYEXT = cpppath
|
||||
self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
|
||||
self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
|
||||
|
||||
# ok this is really stupid, but the command and flags are combined.
|
||||
# so we try to find the first argument...
|
||||
flags = read_config('LDSHARED')
|
||||
while flags and flags[0][0] != '-':
|
||||
flags = flags[1:]
|
||||
|
||||
# we also want to strip out the deprecated ppc flags
|
||||
if len(flags) > 1 and flags[1] == "ppc":
|
||||
flags = flags[2:]
|
||||
|
||||
self.env.LINKFLAGS_RUBYEXT = flags
|
||||
self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
|
||||
self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')
|
||||
|
||||
if Options.options.rubyarchdir:
|
||||
self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
|
||||
else:
|
||||
self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
|
||||
|
||||
if Options.options.rubylibdir:
|
||||
self.env.LIBDIR_RUBY = Options.options.rubylibdir
|
||||
else:
|
||||
self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
|
||||
|
||||
@conf
|
||||
def check_ruby_module(self, module_name):
|
||||
"""
|
||||
Check if the selected ruby interpreter can require the given ruby module::
|
||||
|
||||
def configure(conf):
|
||||
conf.check_ruby_module('libxml')
|
||||
|
||||
:param module_name: module
|
||||
:type module_name: string
|
||||
"""
|
||||
self.start_msg('Ruby module %s' % module_name)
|
||||
try:
|
||||
self.cmd_and_log([self.env['RUBY'], '-e', 'require \'%s\';puts 1' % module_name])
|
||||
except Exception:
|
||||
self.end_msg(False)
|
||||
self.fatal('Could not find the ruby module %r' % module_name)
|
||||
self.end_msg(True)
|
||||
|
||||
@extension('.rb')
|
||||
def process(self, node):
|
||||
tsk = self.create_task('run_ruby', node)
|
||||
|
||||
class run_ruby(Task.Task):
|
||||
"""
|
||||
Task to run ruby files detected by file extension .rb::
|
||||
|
||||
def options(opt):
|
||||
opt.load('ruby')
|
||||
|
||||
def configure(ctx):
|
||||
ctx.check_ruby_version()
|
||||
|
||||
def build(bld):
|
||||
bld.env['RBFLAGS'] = '-e puts "hello world"'
|
||||
bld(source='a_ruby_file.rb')
|
||||
"""
|
||||
run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
|
||||
"""
|
||||
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
|
||||
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
|
||||
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
|
||||
|
77
third_party/waf/waf-light/waflib/Tools/suncc.py
vendored
Normal file
77
third_party/waf/waf-light/waflib/Tools/suncc.py
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
import os
|
||||
from waflib import Utils
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_scc(conf):
|
||||
"""
|
||||
Detect the Sun C compiler
|
||||
"""
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CC']: cc = v['CC']
|
||||
elif 'CC' in conf.environ: cc = conf.environ['CC']
|
||||
if not cc: cc = conf.find_program('cc', var='CC')
|
||||
if not cc: conf.fatal('Could not find a Sun C compiler')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
try:
|
||||
conf.cmd_and_log(cc + ['-flags'])
|
||||
except Exception:
|
||||
conf.fatal('%r is not a Sun compiler' % cc)
|
||||
|
||||
v['CC'] = cc
|
||||
v['CC_NAME'] = 'sun'
|
||||
|
||||
@conf
|
||||
def scc_common_flags(conf):
|
||||
"""
|
||||
Flags required for executing the sun C compiler
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CC_SRC_F'] = []
|
||||
v['CC_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = ''
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Bdynamic'
|
||||
v['STLIB_MARKER'] = '-Bstatic'
|
||||
|
||||
# program
|
||||
v['cprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['CFLAGS_cshlib'] = ['-Kpic', '-DPIC']
|
||||
v['LINKFLAGS_cshlib'] = ['-G']
|
||||
v['cshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cstlib'] = ['-Bstatic']
|
||||
v['cstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_scc()
|
||||
conf.find_ar()
|
||||
conf.scc_common_flags()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
78
third_party/waf/waf-light/waflib/Tools/suncxx.py
vendored
Normal file
78
third_party/waf/waf-light/waflib/Tools/suncxx.py
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
import os
|
||||
from waflib import Utils
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_sxx(conf):
|
||||
"""
|
||||
Detect the sun C++ compiler
|
||||
"""
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CXX']: cc = v['CXX']
|
||||
elif 'CXX' in conf.environ: cc = conf.environ['CXX']
|
||||
if not cc: cc = conf.find_program('CC', var='CXX') #studio
|
||||
if not cc: cc = conf.find_program('c++', var='CXX')
|
||||
if not cc: conf.fatal('Could not find a Sun C++ compiler')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
try:
|
||||
conf.cmd_and_log(cc + ['-flags'])
|
||||
except Exception:
|
||||
conf.fatal('%r is not a Sun compiler' % cc)
|
||||
|
||||
v['CXX'] = cc
|
||||
v['CXX_NAME'] = 'sun'
|
||||
|
||||
@conf
|
||||
def sxx_common_flags(conf):
|
||||
"""
|
||||
Flags required for executing the sun C++ compiler
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CXX_SRC_F'] = []
|
||||
v['CXX_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = []
|
||||
v['CXXLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Bdynamic'
|
||||
v['STLIB_MARKER'] = '-Bstatic'
|
||||
|
||||
# program
|
||||
v['cxxprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['CXXFLAGS_cxxshlib'] = ['-Kpic', '-DPIC']
|
||||
v['LINKFLAGS_cxxshlib'] = ['-G']
|
||||
v['cxxshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cxxstlib'] = ['-Bstatic']
|
||||
v['cxxstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_sxx()
|
||||
conf.find_ar()
|
||||
conf.sxx_common_flags()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
431
third_party/waf/waf-light/waflib/Tools/tex.py
vendored
Normal file
431
third_party/waf/waf-light/waflib/Tools/tex.py
vendored
Normal file
|
@ -0,0 +1,431 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
|
||||
"""
|
||||
TeX/LaTeX/PDFLaTeX/XeLaTeX support
|
||||
|
||||
Example::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('tex')
|
||||
if not conf.env.LATEX:
|
||||
conf.fatal('The program LaTex is required')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features = 'tex',
|
||||
type = 'latex', # pdflatex or xelatex
|
||||
source = 'document.ltx', # mandatory, the source
|
||||
outs = 'ps', # 'pdf' or 'ps pdf'
|
||||
deps = 'crossreferencing.lst', # to give dependencies directly
|
||||
prompt = 1, # 0 for the batch mode
|
||||
)
|
||||
|
||||
To configure with a special program use::
|
||||
|
||||
$ PDFLATEX=luatex waf configure
|
||||
"""
|
||||
|
||||
import os, re
|
||||
from waflib import Utils, Task, Errors, Logs
|
||||
from waflib.TaskGen import feature, before_method
|
||||
|
||||
re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
|
||||
def bibunitscan(self):
|
||||
"""
|
||||
Parse the inputs and try to find the *bibunit* dependencies
|
||||
|
||||
:return: list of bibunit files
|
||||
:rtype: list of :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
node = self.inputs[0]
|
||||
|
||||
nodes = []
|
||||
if not node: return nodes
|
||||
|
||||
code = node.read()
|
||||
|
||||
for match in re_bibunit.finditer(code):
|
||||
path = match.group('file')
|
||||
if path:
|
||||
for k in ['', '.bib']:
|
||||
# add another loop for the tex include paths?
|
||||
Logs.debug('tex: trying %s%s' % (path, k))
|
||||
fi = node.parent.find_resource(path + k)
|
||||
if fi:
|
||||
nodes.append(fi)
|
||||
# no break, people are crazy
|
||||
else:
|
||||
Logs.debug('tex: could not find %s' % path)
|
||||
|
||||
Logs.debug("tex: found the following bibunit files: %s" % nodes)
|
||||
return nodes
|
||||
|
||||
exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps']
|
||||
"""List of typical file extensions included in latex files"""
|
||||
|
||||
exts_tex = ['.ltx', '.tex']
|
||||
"""List of typical file extensions that contain latex"""
|
||||
|
||||
re_tex = re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
|
||||
"""Regexp for expressions that may include latex files"""
|
||||
|
||||
g_bibtex_re = re.compile('bibdata', re.M)
|
||||
"""Regexp for bibtex files"""
|
||||
|
||||
class tex(Task.Task):
|
||||
"""
|
||||
Compile a tex/latex file.
|
||||
|
||||
.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
|
||||
"""
|
||||
|
||||
bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
|
||||
bibtex_fun.__doc__ = """
|
||||
Execute the program **bibtex**
|
||||
"""
|
||||
|
||||
makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
|
||||
makeindex_fun.__doc__ = """
|
||||
Execute the program **makeindex**
|
||||
"""
|
||||
|
||||
def exec_command(self, cmd, **kw):
|
||||
"""
|
||||
Override :py:meth:`waflib.Task.Task.exec_command` to execute the command without buffering (latex may prompt for inputs)
|
||||
|
||||
:return: the return code
|
||||
:rtype: int
|
||||
"""
|
||||
bld = self.generator.bld
|
||||
try:
|
||||
if not kw.get('cwd', None):
|
||||
kw['cwd'] = bld.cwd
|
||||
except AttributeError:
|
||||
bld.cwd = kw['cwd'] = bld.variant_dir
|
||||
return Utils.subprocess.Popen(cmd, **kw).wait()
|
||||
|
||||
def scan_aux(self, node):
|
||||
"""
|
||||
A recursive regex-based scanner that finds included auxiliary files.
|
||||
"""
|
||||
nodes = [node]
|
||||
re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
|
||||
|
||||
def parse_node(node):
|
||||
code = node.read()
|
||||
for match in re_aux.finditer(code):
|
||||
path = match.group('file')
|
||||
found = node.parent.find_or_declare(path)
|
||||
if found and found not in nodes:
|
||||
Logs.debug('tex: found aux node ' + found.abspath())
|
||||
nodes.append(found)
|
||||
parse_node(found)
|
||||
|
||||
parse_node(node)
|
||||
return nodes
|
||||
|
||||
def scan(self):
|
||||
"""
|
||||
A recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
|
||||
|
||||
Depending on your needs you might want:
|
||||
|
||||
* to change re_tex::
|
||||
|
||||
from waflib.Tools import tex
|
||||
tex.re_tex = myregex
|
||||
|
||||
* or to change the method scan from the latex tasks::
|
||||
|
||||
from waflib.Task import classes
|
||||
classes['latex'].scan = myscanfunction
|
||||
"""
|
||||
node = self.inputs[0]
|
||||
|
||||
nodes = []
|
||||
names = []
|
||||
seen = []
|
||||
if not node: return (nodes, names)
|
||||
|
||||
def parse_node(node):
|
||||
if node in seen:
|
||||
return
|
||||
seen.append(node)
|
||||
code = node.read()
|
||||
global re_tex
|
||||
for match in re_tex.finditer(code):
|
||||
for path in match.group('file').split(','):
|
||||
if path:
|
||||
add_name = True
|
||||
found = None
|
||||
for k in exts_deps_tex:
|
||||
Logs.debug('tex: trying %s%s' % (path, k))
|
||||
found = node.parent.find_resource(path + k)
|
||||
|
||||
for tsk in self.generator.tasks:
|
||||
if not found or found in tsk.outputs:
|
||||
break
|
||||
else:
|
||||
nodes.append(found)
|
||||
add_name = False
|
||||
for ext in exts_tex:
|
||||
if found.name.endswith(ext):
|
||||
parse_node(found)
|
||||
break
|
||||
# no break, people are crazy
|
||||
if add_name:
|
||||
names.append(path)
|
||||
parse_node(node)
|
||||
|
||||
for x in nodes:
|
||||
x.parent.get_bld().mkdir()
|
||||
|
||||
Logs.debug("tex: found the following : %s and names %s" % (nodes, names))
|
||||
return (nodes, names)
|
||||
|
||||
def check_status(self, msg, retcode):
|
||||
"""
|
||||
Check an exit status and raise an error with a particular message
|
||||
|
||||
:param msg: message to display if the code is non-zero
|
||||
:type msg: string
|
||||
:param retcode: condition
|
||||
:type retcode: boolean
|
||||
"""
|
||||
if retcode != 0:
|
||||
raise Errors.WafError("%r command exit status %r" % (msg, retcode))
|
||||
|
||||
def bibfile(self):
|
||||
"""
|
||||
Parse the *.aux* files to find bibfiles to process.
|
||||
If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
|
||||
"""
|
||||
for aux_node in self.aux_nodes:
|
||||
try:
|
||||
ct = aux_node.read()
|
||||
except (OSError, IOError):
|
||||
Logs.error('Error reading %s: %r' % aux_node.abspath())
|
||||
continue
|
||||
|
||||
if g_bibtex_re.findall(ct):
|
||||
Logs.warn('calling bibtex')
|
||||
|
||||
self.env.env = {}
|
||||
self.env.env.update(os.environ)
|
||||
self.env.env.update({'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS})
|
||||
self.env.SRCFILE = aux_node.name[:-4]
|
||||
self.check_status('error when calling bibtex', self.bibtex_fun())
|
||||
|
||||
def bibunits(self):
|
||||
"""
|
||||
Parse the *.aux* file to find bibunit files. If there are bibunit files,
|
||||
execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
|
||||
"""
|
||||
try:
|
||||
bibunits = bibunitscan(self)
|
||||
except OSError:
|
||||
Logs.error('error bibunitscan')
|
||||
else:
|
||||
if bibunits:
|
||||
fn = ['bu' + str(i) for i in xrange(1, len(bibunits) + 1)]
|
||||
if fn:
|
||||
Logs.warn('calling bibtex on bibunits')
|
||||
|
||||
for f in fn:
|
||||
self.env.env = {'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS}
|
||||
self.env.SRCFILE = f
|
||||
self.check_status('error when calling bibtex', self.bibtex_fun())
|
||||
|
||||
def makeindex(self):
|
||||
"""
|
||||
Look on the filesystem if there is a *.idx* file to process. If yes, execute
|
||||
:py:meth:`waflib.Tools.tex.tex.makeindex_fun`
|
||||
"""
|
||||
try:
|
||||
idx_path = self.idx_node.abspath()
|
||||
os.stat(idx_path)
|
||||
except OSError:
|
||||
Logs.warn('index file %s absent, not calling makeindex' % idx_path)
|
||||
else:
|
||||
Logs.warn('calling makeindex')
|
||||
|
||||
self.env.SRCFILE = self.idx_node.name
|
||||
self.env.env = {}
|
||||
self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
|
||||
|
||||
def bibtopic(self):
|
||||
"""
|
||||
Additional .aux files from the bibtopic package
|
||||
"""
|
||||
p = self.inputs[0].parent.get_bld()
|
||||
if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
|
||||
self.aux_nodes += p.ant_glob('*[0-9].aux')
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Runs the TeX build process.
|
||||
|
||||
It may require multiple passes, depending on the usage of cross-references,
|
||||
bibliographies, content susceptible of needing such passes.
|
||||
The appropriate TeX compiler is called until the *.aux* files stop changing.
|
||||
|
||||
Makeindex and bibtex are called if necessary.
|
||||
"""
|
||||
env = self.env
|
||||
|
||||
if not env['PROMPT_LATEX']:
|
||||
env.append_value('LATEXFLAGS', '-interaction=batchmode')
|
||||
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
|
||||
env.append_value('XELATEXFLAGS', '-interaction=batchmode')
|
||||
|
||||
fun = self.texfun
|
||||
|
||||
node = self.inputs[0]
|
||||
srcfile = node.abspath()
|
||||
|
||||
texinputs = self.env.TEXINPUTS or ''
|
||||
self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + texinputs + os.pathsep
|
||||
|
||||
# important, set the cwd for everybody
|
||||
self.cwd = self.inputs[0].parent.get_bld().abspath()
|
||||
|
||||
Logs.warn('first pass on %s' % self.__class__.__name__)
|
||||
|
||||
self.env.env = {}
|
||||
self.env.env.update(os.environ)
|
||||
self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
|
||||
self.env.SRCFILE = srcfile
|
||||
self.check_status('error when calling latex', fun())
|
||||
|
||||
self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
|
||||
self.idx_node = node.change_ext('.idx')
|
||||
|
||||
self.bibtopic()
|
||||
self.bibfile()
|
||||
self.bibunits()
|
||||
self.makeindex()
|
||||
|
||||
hash = ''
|
||||
for i in range(10):
|
||||
# prevent against infinite loops - one never knows
|
||||
|
||||
# watch the contents of file.aux and stop if file.aux does not change anymore
|
||||
prev_hash = hash
|
||||
try:
|
||||
hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
|
||||
hash = Utils.h_list(hashes)
|
||||
except (OSError, IOError):
|
||||
Logs.error('could not read aux.h')
|
||||
pass
|
||||
if hash and hash == prev_hash:
|
||||
break
|
||||
|
||||
# run the command
|
||||
Logs.warn('calling %s' % self.__class__.__name__)
|
||||
|
||||
self.env.env = {}
|
||||
self.env.env.update(os.environ)
|
||||
self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
|
||||
self.env.SRCFILE = srcfile
|
||||
self.check_status('error when calling %s' % self.__class__.__name__, fun())
|
||||
|
||||
class latex(tex):
|
||||
texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
|
||||
class pdflatex(tex):
|
||||
texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
|
||||
class xelatex(tex):
|
||||
texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
|
||||
|
||||
class dvips(Task.Task):
|
||||
run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
|
||||
color = 'BLUE'
|
||||
after = ['latex', 'pdflatex', 'xelatex']
|
||||
|
||||
class dvipdf(Task.Task):
|
||||
run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
|
||||
color = 'BLUE'
|
||||
after = ['latex', 'pdflatex', 'xelatex']
|
||||
|
||||
class pdf2ps(Task.Task):
|
||||
run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
|
||||
color = 'BLUE'
|
||||
after = ['latex', 'pdflatex', 'xelatex']
|
||||
|
||||
@feature('tex')
|
||||
@before_method('process_source')
|
||||
def apply_tex(self):
|
||||
"""
|
||||
Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
|
||||
"""
|
||||
if not getattr(self, 'type', None) in ['latex', 'pdflatex', 'xelatex']:
|
||||
self.type = 'pdflatex'
|
||||
|
||||
tree = self.bld
|
||||
outs = Utils.to_list(getattr(self, 'outs', []))
|
||||
|
||||
# prompt for incomplete files (else the batchmode is used)
|
||||
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
|
||||
|
||||
deps_lst = []
|
||||
|
||||
if getattr(self, 'deps', None):
|
||||
deps = self.to_list(self.deps)
|
||||
for filename in deps:
|
||||
n = self.path.find_resource(filename)
|
||||
if not n:
|
||||
self.bld.fatal('Could not find %r for %r' % (filename, self))
|
||||
if not n in deps_lst:
|
||||
deps_lst.append(n)
|
||||
|
||||
for node in self.to_nodes(self.source):
|
||||
|
||||
if self.type == 'latex':
|
||||
task = self.create_task('latex', node, node.change_ext('.dvi'))
|
||||
elif self.type == 'pdflatex':
|
||||
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
|
||||
elif self.type == 'xelatex':
|
||||
task = self.create_task('xelatex', node, node.change_ext('.pdf'))
|
||||
|
||||
task.env = self.env
|
||||
|
||||
# add the manual dependencies
|
||||
if deps_lst:
|
||||
try:
|
||||
lst = tree.node_deps[task.uid()]
|
||||
for n in deps_lst:
|
||||
if not n in lst:
|
||||
lst.append(n)
|
||||
except KeyError:
|
||||
tree.node_deps[task.uid()] = deps_lst
|
||||
|
||||
v = dict(os.environ)
|
||||
p = node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.get_bld().abspath() + os.pathsep + v.get('TEXINPUTS', '') + os.pathsep
|
||||
v['TEXINPUTS'] = p
|
||||
|
||||
if self.type == 'latex':
|
||||
if 'ps' in outs:
|
||||
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
|
||||
tsk.env.env = dict(v)
|
||||
if 'pdf' in outs:
|
||||
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
|
||||
tsk.env.env = dict(v)
|
||||
elif self.type == 'pdflatex':
|
||||
if 'ps' in outs:
|
||||
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
|
||||
self.source = []
|
||||
|
||||
def configure(self):
|
||||
"""
|
||||
Try to find the programs tex, latex and others. Do not raise any error if they
|
||||
are not found.
|
||||
"""
|
||||
v = self.env
|
||||
for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
|
||||
try:
|
||||
self.find_program(p, var=p.upper())
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
v['DVIPSFLAGS'] = '-Ppdf'
|
||||
|
332
third_party/waf/waf-light/waflib/Tools/vala.py
vendored
Normal file
332
third_party/waf/waf-light/waflib/Tools/vala.py
vendored
Normal file
|
@ -0,0 +1,332 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
# Radosław Szkodziński, 2010
|
||||
|
||||
"""
|
||||
At this point, vala is still unstable, so do not expect
|
||||
this tool to be too stable either (apis, etc)
|
||||
"""
|
||||
|
||||
import os.path, shutil, re
|
||||
from waflib import Context, Task, Utils, Logs, Options, Errors
|
||||
from waflib.TaskGen import extension, taskgen_method
|
||||
from waflib.Configure import conf
|
||||
|
||||
class valac(Task.Task):
|
||||
"""
|
||||
Task to compile vala files.
|
||||
"""
|
||||
#run_str = "${VALAC} ${VALAFLAGS}" # ideally
|
||||
#vars = ['VALAC_VERSION']
|
||||
vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
|
||||
ext_out = ['.h']
|
||||
|
||||
def run(self):
|
||||
cmd = [self.env['VALAC']] + self.env['VALAFLAGS']
|
||||
cmd.extend([a.abspath() for a in self.inputs])
|
||||
ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath())
|
||||
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
for x in self.outputs:
|
||||
if id(x.parent) != id(self.outputs[0].parent):
|
||||
shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name, x.abspath())
|
||||
|
||||
if self.generator.dump_deps_node:
|
||||
self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
|
||||
|
||||
return ret
|
||||
|
||||
valac = Task.update_outputs(valac) # no decorators for python2 classes
|
||||
|
||||
@taskgen_method
|
||||
def init_vala_task(self):
|
||||
self.profile = getattr(self, 'profile', 'gobject')
|
||||
|
||||
if self.profile == 'gobject':
|
||||
self.uselib = Utils.to_list(getattr(self, 'uselib', []))
|
||||
if not 'GOBJECT' in self.uselib:
|
||||
self.uselib.append('GOBJECT')
|
||||
|
||||
def addflags(flags):
|
||||
self.env.append_value('VALAFLAGS', flags)
|
||||
|
||||
if self.profile:
|
||||
addflags('--profile=%s' % self.profile)
|
||||
|
||||
if hasattr(self, 'threading'):
|
||||
if self.profile == 'gobject':
|
||||
if not 'GTHREAD' in self.uselib:
|
||||
self.uselib.append('GTHREAD')
|
||||
else:
|
||||
#Vala doesn't have threading support for dova nor posix
|
||||
Logs.warn("Profile %s means no threading support" % self.profile)
|
||||
self.threading = False
|
||||
|
||||
if self.threading:
|
||||
addflags('--threading')
|
||||
|
||||
valatask = self.valatask
|
||||
|
||||
self.is_lib = 'cprogram' not in self.features
|
||||
if self.is_lib:
|
||||
addflags('--library=%s' % self.target)
|
||||
|
||||
h_node = self.path.find_or_declare('%s.h' % self.target)
|
||||
valatask.outputs.append(h_node)
|
||||
addflags('--header=%s' % h_node.name)
|
||||
|
||||
valatask.outputs.append(self.path.find_or_declare('%s.vapi' % self.target))
|
||||
|
||||
if getattr(self, 'gir', None):
|
||||
gir_node = self.path.find_or_declare('%s.gir' % self.gir)
|
||||
addflags('--gir=%s' % gir_node.name)
|
||||
valatask.outputs.append(gir_node)
|
||||
|
||||
self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
|
||||
if self.vala_target_glib:
|
||||
addflags('--target-glib=%s' % self.vala_target_glib)
|
||||
|
||||
addflags(['--define=%s' % x for x in getattr(self, 'vala_defines', [])])
|
||||
|
||||
|
||||
packages_private = Utils.to_list(getattr(self, 'packages_private', []))
|
||||
addflags(['--pkg=%s' % x for x in packages_private])
|
||||
|
||||
|
||||
def _get_api_version():
|
||||
api_version = '1.0'
|
||||
if hasattr(Context.g_module, 'API_VERSION'):
|
||||
version = Context.g_module.API_VERSION.split(".")
|
||||
if version[0] == "0":
|
||||
api_version = "0." + version[1]
|
||||
else:
|
||||
api_version = version[0] + ".0"
|
||||
return api_version
|
||||
|
||||
self.includes = Utils.to_list(getattr(self, 'includes', []))
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
valatask.install_path = getattr(self, 'install_path', '')
|
||||
|
||||
valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
|
||||
valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE'])
|
||||
valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
|
||||
valatask.install_binding = getattr(self, 'install_binding', True)
|
||||
|
||||
self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
|
||||
self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
|
||||
includes = []
|
||||
|
||||
if hasattr(self, 'use'):
|
||||
local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
|
||||
seen = []
|
||||
while len(local_packages) > 0:
|
||||
package = local_packages.pop()
|
||||
if package in seen:
|
||||
continue
|
||||
seen.append(package)
|
||||
|
||||
# check if the package exists
|
||||
try:
|
||||
package_obj = self.bld.get_tgen_by_name(package)
|
||||
except Errors.WafError:
|
||||
continue
|
||||
package_name = package_obj.target
|
||||
package_node = package_obj.path
|
||||
package_dir = package_node.path_from(self.path)
|
||||
|
||||
for task in package_obj.tasks:
|
||||
for output in task.outputs:
|
||||
if output.name == package_name + ".vapi":
|
||||
valatask.set_run_after(task)
|
||||
if package_name not in packages:
|
||||
packages.append(package_name)
|
||||
if package_dir not in vapi_dirs:
|
||||
vapi_dirs.append(package_dir)
|
||||
if package_dir not in includes:
|
||||
includes.append(package_dir)
|
||||
|
||||
if hasattr(package_obj, 'use'):
|
||||
lst = self.to_list(package_obj.use)
|
||||
lst.reverse()
|
||||
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
|
||||
|
||||
addflags(['--pkg=%s' % p for p in packages])
|
||||
|
||||
for vapi_dir in vapi_dirs:
|
||||
v_node = self.path.find_dir(vapi_dir)
|
||||
if not v_node:
|
||||
Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir)
|
||||
else:
|
||||
addflags('--vapidir=%s' % v_node.abspath())
|
||||
addflags('--vapidir=%s' % v_node.get_bld().abspath())
|
||||
|
||||
self.dump_deps_node = None
|
||||
if self.is_lib and self.packages:
|
||||
self.dump_deps_node = self.path.find_or_declare('%s.deps' % self.target)
|
||||
valatask.outputs.append(self.dump_deps_node)
|
||||
|
||||
self.includes.append(self.bld.srcnode.abspath())
|
||||
self.includes.append(self.bld.bldnode.abspath())
|
||||
for include in includes:
|
||||
try:
|
||||
self.includes.append(self.path.find_dir(include).abspath())
|
||||
self.includes.append(self.path.find_dir(include).get_bld().abspath())
|
||||
except AttributeError:
|
||||
Logs.warn("Unable to locate include directory: '%s'" % include)
|
||||
|
||||
|
||||
if self.is_lib and valatask.install_binding:
|
||||
headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
|
||||
try:
|
||||
self.install_vheader.source = headers_list
|
||||
except AttributeError:
|
||||
self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env)
|
||||
|
||||
vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
|
||||
try:
|
||||
self.install_vapi.source = vapi_list
|
||||
except AttributeError:
|
||||
self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env)
|
||||
|
||||
gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
|
||||
try:
|
||||
self.install_gir.source = gir_list
|
||||
except AttributeError:
|
||||
self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env)
|
||||
|
||||
@extension('.vala', '.gs')
|
||||
def vala_file(self, node):
|
||||
"""
|
||||
Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
|
||||
to its inputs. The typical example is::
|
||||
|
||||
def build(bld):
|
||||
bld.program(
|
||||
packages = 'gtk+-2.0',
|
||||
target = 'vala-gtk-example',
|
||||
uselib = 'GTK GLIB',
|
||||
source = 'vala-gtk-example.vala foo.vala',
|
||||
vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line
|
||||
|
||||
# the following arguments are for libraries
|
||||
#gir = 'hello-1.0',
|
||||
#gir_path = '/tmp',
|
||||
#vapi_path = '/tmp',
|
||||
#pkg_name = 'hello'
|
||||
# disable installing of gir, vapi and header
|
||||
#install_binding = False
|
||||
|
||||
# profile = 'xyz' # adds --profile=<xyz> to enable profiling
|
||||
# threading = True, # add --threading, except if profile is on or not on 'gobject'
|
||||
# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
|
||||
)
|
||||
|
||||
|
||||
:param node: vala file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
|
||||
try:
|
||||
valatask = self.valatask
|
||||
except AttributeError:
|
||||
valatask = self.valatask = self.create_task('valac')
|
||||
self.init_vala_task()
|
||||
|
||||
valatask.inputs.append(node)
|
||||
c_node = node.change_ext('.c')
|
||||
valatask.outputs.append(c_node)
|
||||
self.source.append(c_node)
|
||||
|
||||
@conf
|
||||
def find_valac(self, valac_name, min_version):
|
||||
"""
|
||||
Find the valac program, and execute it to store the version
|
||||
number in *conf.env.VALAC_VERSION*
|
||||
|
||||
:param valac_name: program name
|
||||
:type valac_name: string or list of string
|
||||
:param min_version: minimum version acceptable
|
||||
:type min_version: tuple of int
|
||||
"""
|
||||
valac = self.find_program(valac_name, var='VALAC')
|
||||
try:
|
||||
output = self.cmd_and_log(valac + ' --version')
|
||||
except Exception:
|
||||
valac_version = None
|
||||
else:
|
||||
ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.')
|
||||
valac_version = tuple([int(x) for x in ver])
|
||||
|
||||
self.msg('Checking for %s version >= %r' % (valac_name, min_version),
|
||||
valac_version, valac_version and valac_version >= min_version)
|
||||
if valac and valac_version < min_version:
|
||||
self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
|
||||
|
||||
self.env['VALAC_VERSION'] = valac_version
|
||||
return valac
|
||||
|
||||
@conf
|
||||
def check_vala(self, min_version=(0,8,0), branch=None):
|
||||
"""
|
||||
Check if vala compiler from a given branch exists of at least a given
|
||||
version.
|
||||
|
||||
:param min_version: minimum version acceptable (0.8.0)
|
||||
:type min_version: tuple
|
||||
:param branch: first part of the version number, in case a snapshot is used (0, 8)
|
||||
:type branch: tuple of int
|
||||
"""
|
||||
if not branch:
|
||||
branch = min_version[:2]
|
||||
try:
|
||||
find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
|
||||
except self.errors.ConfigurationError:
|
||||
find_valac(self, 'valac', min_version)
|
||||
|
||||
@conf
|
||||
def check_vala_deps(self):
|
||||
"""
|
||||
Load the gobject and gthread packages if they are missing.
|
||||
"""
|
||||
if not self.env['HAVE_GOBJECT']:
|
||||
pkg_args = {'package': 'gobject-2.0',
|
||||
'uselib_store': 'GOBJECT',
|
||||
'args': '--cflags --libs'}
|
||||
if getattr(Options.options, 'vala_target_glib', None):
|
||||
pkg_args['atleast_version'] = Options.options.vala_target_glib
|
||||
self.check_cfg(**pkg_args)
|
||||
|
||||
if not self.env['HAVE_GTHREAD']:
|
||||
pkg_args = {'package': 'gthread-2.0',
|
||||
'uselib_store': 'GTHREAD',
|
||||
'args': '--cflags --libs'}
|
||||
if getattr(Options.options, 'vala_target_glib', None):
|
||||
pkg_args['atleast_version'] = Options.options.vala_target_glib
|
||||
self.check_cfg(**pkg_args)
|
||||
|
||||
def configure(self):
|
||||
"""
|
||||
Use the following to enforce minimum vala version::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('vala', funs='')
|
||||
conf.check_vala(min_version=(0,10,0))
|
||||
"""
|
||||
self.load('gnu_dirs')
|
||||
self.check_vala_deps()
|
||||
self.check_vala()
|
||||
self.env.VALAFLAGS = ['-C', '--quiet']
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
|
||||
"""
|
||||
opt.load('gnu_dirs')
|
||||
valaopts = opt.add_option_group('Vala Compiler Options')
|
||||
valaopts.add_option ('--vala-target-glib', default=None,
|
||||
dest='vala_target_glib', metavar='MAJOR.MINOR',
|
||||
help='Target version of glib for Vala GObject code generation')
|
||||
|
190
third_party/waf/waf-light/waflib/Tools/waf_unit_test.py
vendored
Normal file
190
third_party/waf/waf-light/waflib/Tools/waf_unit_test.py
vendored
Normal file
|
@ -0,0 +1,190 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2006
|
||||
# Thomas Nagy, 2010
|
||||
|
||||
"""
|
||||
Unit testing system for C/C++/D providing test execution:
|
||||
|
||||
* in parallel, by using ``waf -j``
|
||||
* partial (only the tests that have changed) or full (by using ``waf --alltests``)
|
||||
|
||||
The tests are declared by adding the **test** feature to programs::
|
||||
|
||||
def options(opt):
|
||||
opt.load('compiler_cxx waf_unit_test')
|
||||
def configure(conf):
|
||||
conf.load('compiler_cxx waf_unit_test')
|
||||
def build(bld):
|
||||
bld(features='cxx cxxprogram test', source='main.cpp', target='app')
|
||||
# or
|
||||
bld.program(features='test', source='main2.cpp', target='app2')
|
||||
|
||||
When the build is executed, the program 'test' will be built and executed without arguments.
|
||||
The success/failure is detected by looking at the return code. The status and the standard output/error
|
||||
are stored on the build context.
|
||||
|
||||
The results can be displayed by registering a callback function. Here is how to call
|
||||
the predefined callback::
|
||||
|
||||
def build(bld):
|
||||
bld(features='cxx cxxprogram test', source='main.c', target='app')
|
||||
from waflib.Tools import waf_unit_test
|
||||
bld.add_post_fun(waf_unit_test.summary)
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from waflib.TaskGen import feature, after_method
|
||||
from waflib import Utils, Task, Logs, Options
|
||||
testlock = Utils.threading.Lock()
|
||||
|
||||
@feature('test')
|
||||
@after_method('apply_link')
|
||||
def make_test(self):
|
||||
"""Create the unit test task. There can be only one unit test task by task generator."""
|
||||
if getattr(self, 'link_task', None):
|
||||
self.create_task('utest', self.link_task.outputs)
|
||||
|
||||
class utest(Task.Task):
|
||||
"""
|
||||
Execute a unit test
|
||||
"""
|
||||
color = 'PINK'
|
||||
after = ['vnum', 'inst']
|
||||
vars = []
|
||||
def runnable_status(self):
|
||||
"""
|
||||
Always execute the task if `waf --alltests` was used or no
|
||||
tests if ``waf --notests`` was used
|
||||
"""
|
||||
if getattr(Options.options, 'no_tests', False):
|
||||
return Task.SKIP_ME
|
||||
|
||||
ret = super(utest, self).runnable_status()
|
||||
if ret == Task.SKIP_ME:
|
||||
if getattr(Options.options, 'all_tests', False):
|
||||
return Task.RUN_ME
|
||||
return ret
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Execute the test. The execution is always successful, but the results
|
||||
are stored on ``self.generator.bld.utest_results`` for postprocessing.
|
||||
"""
|
||||
|
||||
filename = self.inputs[0].abspath()
|
||||
self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
|
||||
if getattr(self.generator, 'ut_fun', None):
|
||||
# FIXME waf 1.8 - add a return statement here?
|
||||
self.generator.ut_fun(self)
|
||||
|
||||
try:
|
||||
fu = getattr(self.generator.bld, 'all_test_paths')
|
||||
except AttributeError:
|
||||
# this operation may be performed by at most #maxjobs
|
||||
fu = os.environ.copy()
|
||||
|
||||
lst = []
|
||||
for g in self.generator.bld.groups:
|
||||
for tg in g:
|
||||
if getattr(tg, 'link_task', None):
|
||||
s = tg.link_task.outputs[0].parent.abspath()
|
||||
if s not in lst:
|
||||
lst.append(s)
|
||||
|
||||
def add_path(dct, path, var):
|
||||
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
|
||||
|
||||
if Utils.is_win32:
|
||||
add_path(fu, lst, 'PATH')
|
||||
elif Utils.unversioned_sys_platform() == 'darwin':
|
||||
add_path(fu, lst, 'DYLD_LIBRARY_PATH')
|
||||
add_path(fu, lst, 'LD_LIBRARY_PATH')
|
||||
else:
|
||||
add_path(fu, lst, 'LD_LIBRARY_PATH')
|
||||
self.generator.bld.all_test_paths = fu
|
||||
|
||||
|
||||
cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath()
|
||||
|
||||
testcmd = getattr(Options.options, 'testcmd', False)
|
||||
if testcmd:
|
||||
self.ut_exec = (testcmd % self.ut_exec[0]).split(' ')
|
||||
|
||||
proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
|
||||
tup = (filename, proc.returncode, stdout, stderr)
|
||||
self.generator.utest_result = tup
|
||||
|
||||
testlock.acquire()
|
||||
try:
|
||||
bld = self.generator.bld
|
||||
Logs.debug("ut: %r", tup)
|
||||
try:
|
||||
bld.utest_results.append(tup)
|
||||
except AttributeError:
|
||||
bld.utest_results = [tup]
|
||||
finally:
|
||||
testlock.release()
|
||||
|
||||
def summary(bld):
|
||||
"""
|
||||
Display an execution summary::
|
||||
|
||||
def build(bld):
|
||||
bld(features='cxx cxxprogram test', source='main.c', target='app')
|
||||
from waflib.Tools import waf_unit_test
|
||||
bld.add_post_fun(waf_unit_test.summary)
|
||||
"""
|
||||
lst = getattr(bld, 'utest_results', [])
|
||||
if lst:
|
||||
Logs.pprint('CYAN', 'execution summary')
|
||||
|
||||
total = len(lst)
|
||||
tfail = len([x for x in lst if x[1]])
|
||||
|
||||
Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
|
||||
for (f, code, out, err) in lst:
|
||||
if not code:
|
||||
Logs.pprint('CYAN', ' %s' % f)
|
||||
|
||||
Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
|
||||
for (f, code, out, err) in lst:
|
||||
if code:
|
||||
Logs.pprint('CYAN', ' %s' % f)
|
||||
|
||||
def set_exit_code(bld):
|
||||
"""
|
||||
If any of the tests fail waf will exit with that exit code.
|
||||
This is useful if you have an automated build system which need
|
||||
to report on errors from the tests.
|
||||
You may use it like this:
|
||||
|
||||
def build(bld):
|
||||
bld(features='cxx cxxprogram test', source='main.c', target='app')
|
||||
from waflib.Tools import waf_unit_test
|
||||
bld.add_post_fun(waf_unit_test.set_exit_code)
|
||||
"""
|
||||
lst = getattr(bld, 'utest_results', [])
|
||||
for (f, code, out, err) in lst:
|
||||
if code:
|
||||
msg = []
|
||||
if out:
|
||||
msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
|
||||
if err:
|
||||
msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
|
||||
bld.fatal(os.linesep.join(msg))
|
||||
|
||||
|
||||
def options(opt):
|
||||
"""
|
||||
Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options.
|
||||
"""
|
||||
opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
|
||||
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
|
||||
opt.add_option('--testcmd', action='store', default=False,
|
||||
help = 'Run the unit tests using the test-cmd string'
|
||||
' example "--test-cmd="valgrind --error-exitcode=1'
|
||||
' %s" to run under valgrind', dest='testcmd')
|
||||
|
114
third_party/waf/waf-light/waflib/Tools/winres.py
vendored
Normal file
114
third_party/waf/waf-light/waflib/Tools/winres.py
vendored
Normal file
|
@ -0,0 +1,114 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Brant Young, 2007
|
||||
|
||||
"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
|
||||
|
||||
import re, traceback
|
||||
from waflib import Task, Logs, Utils
|
||||
from waflib.TaskGen import extension
|
||||
from waflib.Tools import c_preproc
|
||||
|
||||
@extension('.rc')
|
||||
def rc_file(self, node):
|
||||
"""
|
||||
Bind the .rc extension to a winrc task
|
||||
"""
|
||||
obj_ext = '.rc.o'
|
||||
if self.env['WINRC_TGT_F'] == '/fo':
|
||||
obj_ext = '.res'
|
||||
rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
|
||||
try:
|
||||
self.compiled_tasks.append(rctask)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [rctask]
|
||||
|
||||
re_lines = re.compile(
|
||||
'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
|
||||
'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
|
||||
re.IGNORECASE | re.MULTILINE)
|
||||
|
||||
class rc_parser(c_preproc.c_parser):
|
||||
def filter_comments(self, filepath):
|
||||
code = Utils.readf(filepath)
|
||||
if c_preproc.use_trigraphs:
|
||||
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
|
||||
code = c_preproc.re_nl.sub('', code)
|
||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
|
||||
ret = []
|
||||
for m in re.finditer(re_lines, code):
|
||||
if m.group(2):
|
||||
ret.append((m.group(2), m.group(3)))
|
||||
else:
|
||||
ret.append(('include', m.group(5)))
|
||||
return ret
|
||||
|
||||
def addlines(self, node):
|
||||
self.currentnode_stack.append(node.parent)
|
||||
filepath = node.abspath()
|
||||
|
||||
self.count_files += 1
|
||||
if self.count_files > c_preproc.recursion_limit:
|
||||
raise c_preproc.PreprocError("recursion limit exceeded")
|
||||
pc = self.parse_cache
|
||||
Logs.debug('preproc: reading file %r', filepath)
|
||||
try:
|
||||
lns = pc[filepath]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.lines.extend(lns)
|
||||
return
|
||||
|
||||
try:
|
||||
lines = self.filter_comments(filepath)
|
||||
lines.append((c_preproc.POPFILE, ''))
|
||||
lines.reverse()
|
||||
pc[filepath] = lines
|
||||
self.lines.extend(lines)
|
||||
except IOError:
|
||||
raise c_preproc.PreprocError("could not read the file %s" % filepath)
|
||||
except Exception:
|
||||
if Logs.verbose > 0:
|
||||
Logs.error("parsing %s failed" % filepath)
|
||||
traceback.print_exc()
|
||||
|
||||
class winrc(Task.Task):
|
||||
"""
|
||||
Task for compiling resource files
|
||||
"""
|
||||
run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
|
||||
color = 'BLUE'
|
||||
|
||||
def scan(self):
|
||||
tmp = rc_parser(self.generator.includes_nodes)
|
||||
tmp.start(self.inputs[0], self.env)
|
||||
nodes = tmp.nodes
|
||||
names = tmp.names
|
||||
|
||||
if Logs.verbose:
|
||||
Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(self), nodes, names))
|
||||
|
||||
return (nodes, names)
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detect the programs RC or windres, depending on the C/C++ compiler in use
|
||||
"""
|
||||
v = conf.env
|
||||
v['WINRC_TGT_F'] = '-o'
|
||||
v['WINRC_SRC_F'] = '-i'
|
||||
|
||||
# find rc.exe
|
||||
if not conf.env.WINRC:
|
||||
if v.CC_NAME == 'msvc':
|
||||
conf.find_program('RC', var='WINRC', path_list = v['PATH'])
|
||||
v['WINRC_TGT_F'] = '/fo'
|
||||
v['WINRC_SRC_F'] = ''
|
||||
else:
|
||||
conf.find_program('windres', var='WINRC', path_list = v['PATH'])
|
||||
if not conf.env.WINRC:
|
||||
conf.fatal('winrc was not found!')
|
||||
|
||||
v['WINRCFLAGS'] = []
|
||||
|
69
third_party/waf/waf-light/waflib/Tools/xlc.py
vendored
Normal file
69
third_party/waf/waf-light/waflib/Tools/xlc.py
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
# Michael Kuhn, 2009
|
||||
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_xlc(conf):
|
||||
"""
|
||||
Detect the Aix C compiler
|
||||
"""
|
||||
cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
conf.get_xlc_version(cc)
|
||||
conf.env.CC_NAME = 'xlc'
|
||||
conf.env.CC = cc
|
||||
|
||||
@conf
|
||||
def xlc_common_flags(conf):
|
||||
"""
|
||||
Flags required for executing the Aix C compiler
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CC_SRC_F'] = []
|
||||
v['CC_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = []
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
|
||||
v['SONAME_ST'] = []
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = []
|
||||
|
||||
# program
|
||||
v['LINKFLAGS_cprogram'] = ['-Wl,-brtl']
|
||||
v['cprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['CFLAGS_cshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
|
||||
v['cshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cstlib'] = []
|
||||
v['cstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_xlc()
|
||||
conf.find_ar()
|
||||
conf.xlc_common_flags()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
69
third_party/waf/waf-light/waflib/Tools/xlcxx.py
vendored
Normal file
69
third_party/waf/waf-light/waflib/Tools/xlcxx.py
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2010 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
# Michael Kuhn, 2009
|
||||
|
||||
from waflib.Tools import ccroot, ar
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_xlcxx(conf):
|
||||
"""
|
||||
Detect the Aix C++ compiler
|
||||
"""
|
||||
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
conf.get_xlc_version(cxx)
|
||||
conf.env.CXX_NAME = 'xlc++'
|
||||
conf.env.CXX = cxx
|
||||
|
||||
@conf
|
||||
def xlcxx_common_flags(conf):
|
||||
"""
|
||||
Flags required for executing the Aix C++ compiler
|
||||
"""
|
||||
v = conf.env
|
||||
|
||||
v['CXX_SRC_F'] = []
|
||||
v['CXX_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = []
|
||||
v['CXXLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
v['DEFINES_ST'] = '-D%s'
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STLIB_ST'] = '-l%s'
|
||||
v['STLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
|
||||
v['SONAME_ST'] = []
|
||||
v['SHLIB_MARKER'] = []
|
||||
v['STLIB_MARKER'] = []
|
||||
|
||||
# program
|
||||
v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
|
||||
v['cxxprogram_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['CXXFLAGS_cxxshlib'] = ['-fPIC']
|
||||
v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull']
|
||||
v['cxxshlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['LINKFLAGS_cxxstlib'] = []
|
||||
v['cxxstlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_xlcxx()
|
||||
conf.find_ar()
|
||||
conf.xlcxx_common_flags()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue