diff --git a/.gitignore b/.gitignore index 5f7dc13726..f54a7ef593 100644 --- a/.gitignore +++ b/.gitignore @@ -8,4 +8,32 @@ build demos/*/build* playground/*/build* .waf-* +.waf3-* *.log +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# Windows build output +waf.bat diff --git a/.pipelines/Jenkinsfile b/.pipelines/Jenkinsfile new file mode 100644 index 0000000000..33d30e12c8 --- /dev/null +++ b/.pipelines/Jenkinsfile @@ -0,0 +1,291 @@ + +pipeline { + agent none + stages { + stage('Build and Lint') { + parallel { + stage('Python 2.7') { + agent { + label "ubuntu" + } + steps { + sh 'python2.7 ./waf-light' + stash includes: 'waf', name: 'waf' + } + } + stage('Python 3.6') { + agent { + label "freebsd" + } + steps { + sh 'python3.6 ./waf-light' + } + } + stage('Deprecation warnings') { + agent { + label "ubuntu" + } + steps { + sh ''' +cd waflib +find . -iname "*.pyc" -exec rm -f {} \\; || true +! (((PYTHONWARNINGS=all python3.6 -m compileall . > /dev/null) 2>&1 ) 2>&1) | grep -i DeprecationWarning +''' + } + } + stage('Pyflakes') { + agent { + label "freebsd" + } + steps { + sh ''' +cd waflib +var=`(/usr/local/bin/pyflakes3.py *.py Tools/*.py extras/*.py 2>&1) | egrep "undefined name|invalid syntax|inconsistent use|unindent does not match any outer indentation level" | wc -l` +if [ "0" -eq "$var" ] +then + /usr/local/bin/pyflakes3.py *.py Tools/*.py extras/*.py || true +else + # just do it again and fail + /usr/local/bin/pyflakes3.py *.py Tools/*.py extras/*.py + exit 1 +fi +''' + } + } + stage('Unit tests') { + agent { + label "fedora" + } + steps { + sh ''' +./waf-light +cd tests/preproc/ +../../waf distclean +../../waf configure build +cd ../..''' + sh ''' +cd tests/install/ +../../waf distclean +../../waf configure build +cd ../..''' + sh ''' +cd tests/general/ +../../waf distclean +../../waf configure build +cd ../..''' + sh ''' +export PATH=$PATH:$PWD +cd tests/init/ +../../waf distclean +../../waf configure build +cd ../..''' + sh ''' +export WAF_TEST_GROUP=waftest +cd tests/install_group/ +../../waf distclean +../../waf configure build +''' + } + } + } + } + stage('Integration') { + parallel { + stage('Ubuntu') { + stages { + stage('py25') { + agent { + label "ubuntu" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=c''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=c++''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=java''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=perl''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=python''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=qt5''' + sh '''cd demos; LD_LIBRARY_PATH=/opt/lib ./waf distclean configure clean build --top=subst''' + } + } + stage('py36') { + agent { + label "ubuntu" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos; python3 ./waf configure clean build --top=c''' + sh '''cd demos; python3 ./waf configure clean build --top=c++''' + sh '''cd demos; python3 ./waf configure clean build --top=java''' + sh '''cd demos; python3 ./waf configure clean build --top=perl''' + sh '''cd demos; python3 ./waf configure clean build --top=python''' + sh '''cd demos; python3 ./waf configure clean build --top=qt5''' + sh '''cd demos; python3 ./waf configure clean build --top=subst''' + } + } + stage('py27') { + agent { + label "ubuntu" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos; ./waf configure clean build --top=c''' + sh '''cd demos; ./waf configure clean build --top=c++''' + sh '''cd demos; ./waf configure clean build --top=java''' + sh '''cd demos; ./waf configure clean build --top=perl''' + sh '''cd demos; ./waf configure clean build --top=python''' + sh '''cd demos; ./waf configure clean build --top=qt5''' + sh '''cd demos; ./waf configure clean build --top=subst''' + } + } + } + } + stage('OpenBSD') { + stages { + stage('Jython') { + agent { + label "openbsd" + } + steps { + sh ''' + export WAF_NO_PREFORK=1 + /home/jenkins/jython/bin/jython ./waf-light + cp waf demos/c + cd demos/c + /home/jenkins/jython/bin/jython ./waf distclean configure clean build + ''' + } + } + stage('py38') { + agent { + label "openbsd" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos/asm; python3 ../waf configure clean build''' + sh '''cd demos/c; python3 ../waf configure clean build''' + sh '''cd demos/c++; python3 ../waf configure clean build''' + sh '''cd demos/glib2; python3 ../waf configure clean build''' + sh '''cd demos/perl; python3 ../waf configure clean build''' + sh '''cd demos/python; python3 ../waf configure clean build''' + sh '''cd demos/subst; python3 ../waf configure clean build''' + } + } + } + } + stage('Windows') { + stages { + stage('C/py34') { + agent { + label "windows" + } + steps { + bat ''' C:/Python34/python.exe waf-light --tools=msvs ''' + bat ''' + copy waf demos\\c /Y + cd demos\\c + C:/Python34/python.exe waf distclean + C:/Python34/python.exe waf configure --no-msvc-lazy build -v + ''' + bat ''' + copy waf demos\\qt5 /Y + cd demos\\qt5 + C:/Python34/python.exe waf distclean + C:/Python34/python.exe waf configure --no-msvc-lazy build -v + ''' + bat ''' + copy waf playground\\msvs /Y + cd playground\\msvs + C:/Python34/python.exe waf distclean + C:/Python34/python.exe waf configure + C:/Python34/python.exe waf msvs + ''' + } + } + stage('C/Msys2/py27') { + agent { + label "windows" + } + steps { + unstash 'waf' + bat ''' + copy waf demos\\c /Y + cd demos\\c + set MSYSTEM=MINGW64 + set WD=C:\\msys64\\usr\\bin + set CHERE_INVOKING=1 + C:\\msys64\\usr\\bin\\sh --login -c 'exec /bin/bash -c "python waf configure clean build && python waf distclean"' + ''' + } + } + stage('C/Msys2/py35') { + agent { + label "windows" + } + steps { + unstash 'waf' + bat ''' + copy waf demos\\c /Y + cd demos\\c + set MSYSTEM=MINGW64 + set WD=C:\\msys64\\usr\\bin + set CHERE_INVOKING=1 + C:\\msys64\\usr\\bin\\sh --login -c 'exec /bin/bash -c "python3 waf configure clean build && python3 waf distclean"' + ''' + } + } + } + } + stage('FreeBSD') { + stages { + stage('py36') { + agent { + label "freebsd" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos/c; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/c++; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/java; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/jni; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/perl; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/python; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/ruby; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/glib2; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/qt5; python3.6 ../waf distclean configure clean build''' + sh '''cd demos/dbus; python3.6 ../waf distclean configure clean build''' + } + } + } + } + stage('MacOS') { + stages { + stage('py27') { + agent { + label "macos" + } + steps { + dir('demos') { + unstash 'waf' + } + sh '''cd demos/c; ../waf distclean configure clean build''' + sh '''cd demos/python; ../waf distclean configure clean build''' + sh '''cd demos/mac_app; ../waf distclean configure clean build''' + } + } + } + } + } + } + } +} diff --git a/ChangeLog b/ChangeLog index 851ea06a0c..3d915b377e 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,79 +1,267 @@ -NEW IN WAF 1.8.8 +NEW IN WAF 2.0.27 +----------------- +* Improve Qt6 detection on msvc #2423 +* Fix a regression in the detection of QtX3D libraries #2367 +* Avoid coloring all MSVC logs #2366 +* Switch to nonstopmode for latex prompts #2421 +* Restrict executable detection to files having the executable bits #2349 + +NEW IN WAF 2.0.26 +----------------- +* Improve "waf dist" - Support SOURCE_DATE_EPOCH +* Detect Qt6 #2355 +* Haxe toolkit support #2352 +* Updated the project's gpg key + +NEW IN WAF 2.0.25 +----------------- +* Fix invalid characters returned by find_program #2397 +* Prepare for distutils removal (Python 3.12 regression) #2402 +* Improve cp65001 compat in cpython < 3.3 #2346 +* Add Fujitsu Fortran compiler detection on ARM64FX #2348 +* Support multiple Sphinx output formats #2344 +* Improve PyQt5 detection #2343 +* Add asynchronous wafcache uploads + +NEW IN WAF 2.0.24 +----------------- +* Use EXT_SUFFIX config var over the deprecated/removed SO (Python 3.11 regression) #2386 +* When detecting Visual Studio compilers, prefer the oldest version regardless of compiler type #2352 +* Update the MacOS-specific examples #2337 +* Fix Configure.find_program() invalid character handling in default variable names #2397 + +NEW IN WAF 2.0.23 +----------------- +* Fix the Qt3D* libraries detection #2368 +* Fix swig processing when \r is preset in the module name #2350 +* Add RISC-V generic detection #2322 +* Detect gcc first on GNU/kFreeBSD #2336 +* Improve waflib/extras/msvcdeps performance #2323 + +NEW IN WAF 2.0.22 +----------------- +* Fix stdin propagation with faulty vcvarsall scripts #2315 +* Enable mixing Unix-style paths with destdir on Windows platforms #2337 +* Fix shell escaping unit test parameters #2314 +* Improve extras/clang_compilation_database and extras/swig compatibility #2336 +* Propagate C++ flags to the Cuda compiler in extras/cuda #2311 +* Fix detection of Qt 5.0.0 (preparation for Qt6) #2331 +* Enable Haxe processing #2308 +* Fix regression in MACOSX_DEPLOYMENT_TARGET caused by distutils #2330 +* Fix extras/wafcache concurrent trimming issues #2312 +* Fix extras/wafcache symlink handling #2327 + +NEW IN WAF 2.0.21 +----------------- +* Set the default --msvc_version from VSCMD_VER if present #2299 +* Force unit-test reruns on ut_str, ut_cmd or ut_path changes #2302 +* Describe Qt5's library detection #2288 +* Introduce conf.env.ASMDEFINES_ST to enable assembly-specific define flags +* Update extras/xcode6 to Python3 #2290 +* Enable parameter "always" in extras/doxygen #2292 +* Fix extras/c_dumbpreproc as it was previously broken +* Fix extras/gccdeps and extras/msvcdeps on header renaming #2293 +* Improve extras/msvcdeps debug outputs and flags #2287 #2291 +* Add add MCST Elbrus CPU detection in c config #2297 +* Add minio object copies to extras/wafcache #2304 + +NEW IN WAF 2.0.20 +----------------- +* Detect -flto and -fno-lto in parse_config #2281 +* Improve custom option processing #2280 +* Enable Clang on cygwin #2279 +* Make distclean target 'no_lock_in_top/run' modifiers compatible with env vars #2271 +* Update irix compiler detection +* Exclude ConfigSet from altering ConfigurationContext.run_build caches #2273 +* Add gas support in extras/gccdeps.py #2278 +* Improve compatibility with custom commands in extras/clang_compilation_database #2274 + +NEW IN WAF 2.0.19 +----------------- +* Enable race-free pdb file generation waflib/extras/msvc_pdb.py #1731 +* Fix negative values for -j #2256 +* Fix Swig example compatibility with Python3 #2259 +* Fix lto settings for endianness configuration tests #2250 +* Tune the application name on --help #2254 +* Improve Qt5's fPIC/fPIE detection +* Propagate LDFLAGS to Fortran tasks (in addition to LINKFLAGS) +* Enable local and remote build object caches waflib/extras/wafcache.py + +NEW IN WAF 2.0.18 +----------------- +* Fix a deadlock with cython and subst tasks #2244 +* Fix rpath processing so that it no longer breaks dependency chains +* Fix fast_partial.py failures on configuration tests +* Fix duplicate -fno-strict-aliasing flags in Python compilation flags detection +* Fix annoying PIE errors in demos/asm/ +* Improve configuration tests cache accuracy #2251 +* Improve extras/fast_partial.py compatibility +* Improve extras/doxygen.py outdir parameter settings #2255 +* Add a dependency scanner for assembly files (Gas/Yasm) +* Add executable arguments for configuration tests / execute=True +* Add a QtTest example to demos/qt5/ #2241 +* Add a cross-compilation option to extras/objcopy.py #2247 + +NEW IN WAF 2.0.17 +----------------- +* Improve build performance on FreeBSD and Python3 #2241 +* Add Python 3.8 flag detection for building embedded interpreters #2239 +* Prevent Qt5 uninstallation errors when c++ files are generated +* Improve installation/uninstallation colors + +NEW IN WAF 2.0.16 +----------------- +* Fix task semaphore errors on stateful tasks #2232 +* Fix encoding errors with UTF-8 paths on Python 2 #2230 +* Fix encoding errors in waf_unit_test #2220 +* Improve dependency between javac task and use-d generated jars nodes +* Install pdb files with /debug:* flags #2224 +* Make javadoc detection optional #2225 +* Improve md5_tstamp documentation #2221 +* Add extras/color_msvc to colorizes MSVC outputs #2221 +* Fix symbol regex on mac-o binaries in extras/syms #2222 +* Maintain order of lines in doxyfile in extras/doxygen #2223 +* Improve extras/msvcdeps path handling +* Add extras/clang_cross for cross-compilation using clang + +NEW IN WAF 2.0.15 +----------------- +* Fix Python path detection under Git-Bash #2217 +* Provide an option to disable args files #2216 +* Improve command line length calculation for args files #2214 +* Add libs/stubs to Cuda library path #2213 +* Suppress ant_glob java warnings #2212 +* Support multiple 'default_cmd' #2211 +* Warn when buildcopy is used without any files to copy #2210 +* Protobuf examples enhancements #2208 #2209 +* Fix all DeprecationWarning: invalid escape sequence #2207 + +NEW IN WAF 2.0.14 +----------------- +* Support Fortran 2008 submodules #2205 +* Possible solution for Msys/Python 3.6 path issues #2217 +* Support NEC SX-Aurora TSUBASA system's Fortran compiler extras/fc_nfort.py #2206 +* Fix ignored configuration flags in gccdeps extras/gccdeps.py #2203 +* Fix included protoc search on nested wscripts extras/protoc.py #2202 +* Support extra taskgen and out of project include directories extras/protoc.py #2204 + +NEW IN WAF 2.0.13 +----------------- +* Fix "broken revdeps" extra error message on certain build failures +* Avoid duplicate flags in Python configuration tests +* Find more Swig dependencies #2206 +* Avoid spawning threads with -j1 on AIX systems + +NEW IN WAF 2.0.12 +----------------- +* Fix broken inheritance task trees #2194 + +NEW IN WAF 2.0.11 +----------------- +* Do not raise an exception on check_cfg/mandatory=False/-vv #2193 +* Post past task generators in lazy sub-folder builds #2191 +* Disable warnings on versioned library installation +* Fix cpplint concurrent execution problems + +NEW IN WAF 2.0.10 +----------------- +* Add a task semaphore system +* Fix --help when no wscript is supplied #2184 +* Fix Fortran processing with generated Fortran files + +NEW IN WAF 2.0.9 +---------------- +* Add dependencies on scriptlet outputs +* Made options optional for cython waftool +* Improve doxygen error handling + +NEW IN WAF 2.0.8 +---------------- +* Improve Windows console encoding outputs on Python 3.6 and Japanese code page #2163 +* Improve msvc detection on Python 3.6 and Japanese code page #2155 +* Improve moc/rcc flag parsing with msvc #2169 +* Improve Eclipse project generation #2166 #2165 #2164 #2149 #2145 +* Improve Boost project detection on dpkg-based systems #2146 + +NEW IN WAF 2.0.7 ---------------- -* Fixed a regression in Fortran builds #1546 -* Doxygen tool enhancements +* Apply priorities to dynamically-generated tasks #2137 +* Fix upcoming Python 3.7 incompatibilities #2126 +* Fix Python3 support in extras/xcode6.py #2121 +* Improve priority support in extras/swig.py #2137 +* Improve support extras/protoc.py #2135 +* Improve argument handling in extras/clang_compilation_database.py #2127 +* Add glib DBus bindings in extras/gdbus.py #2134 +* Avoid name collisions for precompiled headers and libraries with similar names in extras/pch.py #2122 -NEW IN WAF 1.8.7 +NEW IN WAF 2.0.6 ---------------- -* Fixed the default libdir installation suffix on Redhat 64-bit systems #1536 -* Fixed the Python 2.6 detection on Redhat systems #1538 -* Enabled gccdeps to work with clang -* Fixed the detection of clang from the gcc tools -* Added orig_run_str to help subclasses (do not use Task.hcode) -* Fixed the detection of older clang compilers versions -* NEC compiler support -* Enabled batched_cc.py to work with msvc too -* Enabled unity.py to process c files -* Faster dependency calculation in c_dumbpreproc -* New stracedeps example (cfg blog entry) -* Added support for xz compression in waf and waf dist (Python 3.5) - -NEW IN WAF 1.8.6 +* Add Task.deep_inputs to enable further dependencies on input file tasks +* Set unit tests to depend on input file tasks instead of timestamps + +NEW IN WAF 2.0.5 ---------------- -* Python file installation fixes -* Fix for classes containing utf-8 names -* Hide the progress bar when running waf -ppp -* Support more header extensions in subst/is_copy -* Handle non-string and non-list values in substitutions (subst) -* Show how to track build files timestamps to force partial rebuilds (build_file_tracker.py) -* Added a new extension to detect blender libraries -* Added extras/prefork.py, extras/preforkunix.py and extras/preforkjava.py -* Updated demos/d -* Updated demos/vala -* Updated playground/protobuf -* Updated playground/cython -* Updated playground/dynamic_headers -* Updated playground/netcache -* Qt5 detection improvements - -NEW IN WAF 1.8.5 +* Force unit tests to depend on the executable creation time besides file contents +* Enhance the Eclipse project generator +* Update the cuda examples + +NEW IN WAF 2.0.4 ---------------- -* Enhanced Sun compiler detection #1510 -* Fixed a regression in Emacs shell support #1511 -* Fixed a regression in cross-directory builds (Linux only) #1512 -* Executable wscript files #1517 -* Generic script signing utility #1520 +* Enable more advanced warnings of ant_glob on build folders in verbose mode +* Defer node.ant_glob(..., generator=True) evaluation +* Enable 'waf clean' to get the list of files to remove from bld.clean_files +* Define the environment variable WAF_NO_PREFORK to skip pre-forking +* Fix Can't pickle local object '_createenviron..encode' exceptions (Python 3.6) +* Improve the Erlang module #2095 +* Add task target to parallel_debug outputs #2091 -NEW IN WAF 1.8.4 +NEW IN WAF 2.0.3 ---------------- -* Various improvements in stale.py -* More robust file scanning in Qt processing #1502 -* More robust handling of build order implied by hidden dependencies #1506 -* Run msvs on Python 3 #1503 -* Biber update #1501 -* Fixed the Python detection on Windows #1498 - -NEW IN WAF 1.8.2 +* Enable install_path=None for Python targets #2087 +* Skip empty or duplicates in java user classpath #2084 +* Change the unit test summary color scheme to use green and red colors +* Improve the deadlock error message +* Report sys.path values from Context.load_tool to improve error messages +* Cache qrc uic->h conversions to enable qrc re-use across targets +* Output long-running tasks outputs immediately using bld(rule=..., stdout=None) + +NEW IN WAF 2.0.2 ---------------- -* Removed the modules c_* that were accidentally shipped and break pkg-config tests +* Improve Intel Fortran compiler detection on Windows #2063 +* Ensure that the task count starts at 1 in the build outputs +* Add a --pdb option to start pdb on unexpected exceptions #2052 +* Fix conflicting qm/qrc re-used output files for multiple targets #2065 +* Add java support in protobuf (extras) #2049 +* Add a java test example (extras) #2062 +* Enable symbol processing for empty targets (extras) #2053 -NEW IN WAF 1.8.1 +NEW IN WAF 2.0.1 ---------------- -* Re-added compatible option group names from waf 1.7 #1495 -* Fixed bld(define='') splitting which was disabled by mistake +* Improve the default preprocessor behaviour for gcc/msvc +* Accept task objects in Build.add_to_group for compatibility reasons +* Prevent xcode generator from overwriting existing features #2038 +* Fix self.includes data scope #2035 +* Fix Node.ant_glob case sensitivity regression #2034 +* Fix Logs.verbose options regression #2033 -NEW IN WAF 1.8.0 +NEW IN WAF 2.0.0 ---------------- -* Python 2.5 is now required -* Removed the Waf cache from the task level, use the netcache system -* Console handling improvements -* Properly-sized progress bar on windows -* Generic configuration tests -* Better unicode handling in scripts and in file names -* Shorter build outputs -* Prevent external static libraries propagation through uselib vars #1320 -* LaTeX glossaries #1342 and multibib #1345 -* Rst document processing -* Various improvements in the Python detection and processing -* Tons of bugfixes +* Provide a new priority system to improve scalability on complex builds +* Provide TaskGroup objects to improve scalability on complex builds +* Force new files into the build directory by default (use Node objects to bypass) +* Provide built-in support for building over UNC paths +* Simplify the Task class hierarchy; TaskBase is removed +* Display commands as string with "WAF_CMD_FORMAT=string waf build -v" +* Have ant_glob(..., generator=True) return a Python generator +* Accept nested lists and generators in bld(source=...) +* Sort TaskGen methods in alphabetical order by reversing TaskGen.prec order +* Remove 'ut_fun' from waf_unit_test.py +* Remove Node.sig and Node.cache_sig +* Remove the BuildContext.rule decorator +* Remove Task.update_outputs, Task.always_run +* Remove atleast-version, exact-version and max-version from conf.check_cfg +* Remove c_preproc.trimquotes +* Remove field_name, type_name, function_name from conf.check() tests +* Remove extras/mem_reducer.py as a better solution has been merged +* Remove Utils.ex_stack (use traceback.format_exc()) diff --git a/DEVEL b/DEVEL index 1373628580..0ea19fa65e 100644 --- a/DEVEL +++ b/DEVEL @@ -1,4 +1,4 @@ -Waf 1.8 is on https://github.com/waf-project/waf +Waf 2.0 is on https://gitlab.com/ita1024/waf ------------------------------------------------ waflib the core library @@ -15,14 +15,27 @@ docs/sphinx project extracting the docstrings from the source code to crea Documentation ------------------------------------------------- -API documentation https://waf.io/apidocs/ -The Waf Book https://waf.io/book/ +API documentation https://waf.io/apidocs/ +The Waf Book https://waf.io/book/ -Coding guidelines ------------------ +General coding guidelines +------------------------- -* We use tabs, no spaces -* Do not use x.split("\n") but x.splitlines() -* Do not catch all exceptions unless you have a good reason to do so (no "except:") -* File handles are too easy to get wrong, use Node.readf/Node.writef/Utils.readf/Utils.writef +* The code must run in both Python 2.6 to Python 3 +* Use tabs for Python file indentation +* Use x.splitlines() instead of x.split('\n') +* Avoid "except:" and "except Exception:" +* Use Node.readf/Node.writef/Utils.readf/Utils.writef + +Pull requests +------------- + +See https://gitlab.com/ita1024/waf + +When implementing complex features, please add examples in the showcase folder demos/ +for modules under waflib/Tools, under tests/ for platform-independent unit tests, +or in playground/ for modules under waflib/extras. + +The files under waflib/Tools/ are kept API-compatible for the duration +of a middle version (currently 2.0). diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..a4147d2be7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/README b/README deleted file mode 100644 index 8e6f365ade..0000000000 --- a/README +++ /dev/null @@ -1,43 +0,0 @@ -WHAT YOU WILL FIND HERE ------------------------ - -Waf 1.8 - Recently moved to https://github.com/waf-project/waf - -For the manual: https://waf.io/book/ -For the API documentation: https://waf.io/apidocs/ -For the examples: see the folder demos/ and the folder playground/ - -HOW TO CREATE THE WAF SCRIPT ----------------------------- - -Python 2.6, 2.7, 3.0, 3.1, 3.2, 3.3 or 3.4 is required to generate the waf script. The waf script is then the version that can run on Python 2.5. -Just execute: -$ ./waf-light configure build -Or, if you have several python versions installed: -$ python3 ./waf-light configure build - -The Waf tools in waflib/extras are not added to the waf script. To add -some of them, use the --tools switch: -$ ./waf-light --tools=compat15,swig - -To add a tool that does not exist in the folder extras, pass an absolute path, and -to customize the initialization, pass the parameter 'prelude'. Here is for example -how to create a waf file using the compat15 module: -$ ./waf-light --tools=compat15 --prelude=$'\tfrom waflib.extras import compat15\n' - -Any kind of initialization is possible, though one may prefer the build system kit (folder build_system_kit): -$ ./waf-light --make-waf --tools=compat15,/comp/waf/aba.py --prelude=$'\tfrom waflib.extras import compat15\n\tprint("ok")' - -Or if you do not want to regenerate the waf file all the time, set the WAFDIR environment variable to the directory containing "waflib". - -HOW TO TRY THE EXAMPLES ------------------------ - -Try this: -$ cp waf demos/c/ -$ cd demos/c/ -$ ./waf configure build - ---------------------------- -Thomas Nagy, 2014-2015 (ita) - diff --git a/README.md b/README.md index 921a9aff46..1b48604eaa 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +## ABOUT WAF + Waf is a Python-based framework for configuring, compiling and installing applications. Here are perhaps the most important features of Waf: * *Automatic build order*: the build order is computed from input and output files, among others @@ -5,12 +7,51 @@ Waf is a Python-based framework for configuring, compiling and installing applic * *Performance*: tasks are executed in parallel automatically, the startup time is meant to be fast (separation between configuration and build) * *Flexibility*: new commands and tasks can be added very easily through subclassing, bottlenecks for specific builds can be eliminated through dynamic method replacement * *Extensibility*: though many programming languages and compilers are already supported by default, many others are available as extensions - * *IDE support*: Eclipse, Visual Studio and Xcode project generators (waflib/extras/) + * *IDE support*: Eclipse, Visual Studio and Xcode project generators (`waflib/extras/`) * *Documentation*: the application is based on a robust model documented in [The Waf Book](https://waf.io/book/) and in the [API docs](https://waf.io/apidocs/) - * *Python compatibility*: cPython 2.5 to 3.4, Jython 2.5, IronPython, and Pypy + * *Python compatibility*: cPython 2.5 to 3.x, Jython 2.5, IronPython, and Pypy + +Learn more about Waf by reading [The Waf Book](https://waf.io/book/). For researchers and build system writers, Waf also provides a framework and examples for creating [custom build systems](https://gitlab.com/ita1024/waf/tree/master/build_system_kit) and [package distribution systems](https://gitlab.com/ita1024/waf/blob/master/playground/distnet/README.rst). + +Download the project from our page on [waf.io](https://waf.io/), consult the [manual](https://waf.io/book/), the [API documentation](https://waf.io/apidocs/) and the [showcases](https://gitlab.com/ita1024/waf/tree/master/demos) and [experiments](https://gitlab.com/ita1024/waf/tree/master/playground). + +## HOW TO CREATE THE WAF SCRIPT + +Python >= 2.7 is required to generate the waf script: + +```sh +$ python ./waf-light configure build +``` + +## CUSTOMIZATION + +The Waf tools in waflib/extras are not added to the waf script. To add +some of them, use the --tools switch. An absolute path can be passed +if the module does not exist under the 'extras' folder: +```sh +$ ./waf-light --tools=swig +``` + +To customize the initialization, pass the parameter 'prelude'. Here is for example +how to create a waf file using the compat15 module: +```sh +$ ./waf-light --tools=compat15 --prelude=$'\tfrom waflib.extras import compat15\n' +``` + +Although any kind of initialization is possible, using the build system kit +may be easier (folder build\_system\_kit): +```sh +$ ./waf-light --make-waf --tools=compat15,/comp/waf/aba.py --prelude=$'\tfrom waflib.extras import compat15\n\tprint("ok")' +``` + +To avoid regenerating the waf file all the time, just set the `WAFDIR` environment variable to the directory containing "waflib". -Waf is used in particular by innovative companies such as [Avalanche Studios](http://www.avalanchestudios.se) and by open-source projects such as [the Samba project](https://www.samba.org/). Learn more about Waf by reading [The Waf Book](https://waf.io/book/). +## HOW TO RUN THE EXAMPLES -For researchers and build system writers, Waf also provides a framework for creating [custom build systems](http://code.google.com/p/waf/source/browse/build_system_kit/) and [package distribution systems](http://code.google.com/p/waf/source/browse/playground/distnet/README.rst). +Try this: +```sh +cp waf demos/c/ +cd demos/c/ +./waf configure build +``` -Download the project from our page on [waf.io](https://waf.io/) or from the mirror on [freehackers.org](http://www.freehackers.org/~tnagy/release/). diff --git a/TODO b/TODO index aa7e2b9aed..1392725b80 100644 --- a/TODO +++ b/TODO @@ -1,30 +1,12 @@ -Waf 1.9 +Waf 2.1 ------- -* Reduce the key size in bld.task_sigs -* Provide a more efficient ConfigSet implementation -* Remove the split functions from Utils -* Ensure _cache.py are valid python files -* Include the tool 'nobuild' by default -* Support for linkflags/cflags at the end of commands - -Waf 1.8.x ---------- - -* Update the docs - -Can be useful: - -def bld_command(*k): - fun = k[0] - name = fun.__name__ - from waflib.Build import BuildContext - class tmp(BuildContext): - cmd = name - fun = name - return fun - -@bld_command -def foo(ctx): - print ctx.env - +- Remove Winphone/WinCE detection in msvc.py +- Fix the circular imports ccroot<->c_config.py +- Remove superfluous *k in ctx.load() and other unused parameters + Ensure conf.load() has the same interface as ctx.load() +- Remove waflib.Runner.PriorityTasks.appendleft +- Remove waflib.Task.TaskBase +- Remove the upper class of waflib.Task.Task (metaclasses) +- Ordered_iter_dict in python 3.7 +- Change _add_task to add_task in Runner.py diff --git a/build_system_kit/README.txt b/build_system_kit/README.txt index 93ee9a03d1..c7d20b30c1 100644 --- a/build_system_kit/README.txt +++ b/build_system_kit/README.txt @@ -30,4 +30,4 @@ A few examples are provided to illustrate the range of possibilities: * nostate: use timestamps only, and no build directory (very make-like) * extpy: a custom waf file able to read wscript files having the extension ".py" -Thomas Nagy, 2010-2011 +Thomas Nagy, 2010-2016 diff --git a/build_system_kit/extpy/extpy.py b/build_system_kit/extpy/extpy.py index e6446850dc..73a8596682 100644 --- a/build_system_kit/extpy/extpy.py +++ b/build_system_kit/extpy/extpy.py @@ -27,7 +27,7 @@ def recurse(self, dirs, name=None, mandatory=True, once=True): cache[node] = True self.pre_recurse(node) try: - function_code = node.read('rU') + function_code = node.read('r') exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) finally: self.post_recurse(node) diff --git a/build_system_kit/makefile_dumper/wscript b/build_system_kit/makefile_dumper/wscript index 5e9b3f248e..d3a3d6bc1e 100644 --- a/build_system_kit/makefile_dumper/wscript +++ b/build_system_kit/makefile_dumper/wscript @@ -39,16 +39,16 @@ def dump(bld): bld.targets = [] # store the command executed - old_exec = Task.TaskBase.exec_command + old_exec = Task.Task.exec_command def exec_command(self, *k, **kw): ret = old_exec(self, *k, **kw) self.command_executed = k[0] self.path = kw['cwd'] or self.generator.bld.cwd return ret - Task.TaskBase.exec_command = exec_command + Task.Task.exec_command = exec_command # perform a fake build, and accumulate the makefile bits - old_process = Task.TaskBase.process + old_process = Task.Task.process def process(self): old_process(self) @@ -67,13 +67,13 @@ def dump(bld): else: bld.commands.append(' '.join(lst)) bld.commands.append('\tcd %s && %s' % (self.path, self.command_executed)) - Task.TaskBase.process = process + Task.Task.process = process # write the makefile after the build is complete def output_makefile(self): self.commands.insert(0, "all: %s" % " ".join(self.targets)) node = self.bldnode.make_node('Makefile') - node.write("\n".join(self.commands)) - Logs.warn('Wrote %s' % node.abspath()) + node.write('\n'.join(self.commands)) + Logs.warn('Wrote %r', node) bld.add_post_fun(output_makefile) diff --git a/build_system_kit/noscript/dbdlib.py b/build_system_kit/noscript/dbdlib.py index c3f9e9d697..6a39287895 100644 --- a/build_system_kit/noscript/dbdlib.py +++ b/build_system_kit/noscript/dbdlib.py @@ -35,9 +35,8 @@ def build(bld): elif tp == 'objects': features = 'c' - source = Options.options.source app = Options.options.app - bld(features=features, source=source, target=app) + bld(features=features, source=Options.options.source, target=app) def recurse_rep(x, y): f = getattr(Context.g_module, x.cmd or x.fun, Utils.nada) @@ -126,9 +125,3 @@ def c_hook(self, node): # re-bind the extension to this new class return self.create_compiled_task('c2', node) -# modify the existing class to output the targets in the same directory as the original files -Task.update_outputs(c2) -Task.update_outputs(waflib.Tools.c.cprogram) -Task.update_outputs(waflib.Tools.c.cshlib) -Task.update_outputs(waflib.Tools.c.cstlib) - diff --git a/build_system_kit/parser/cbdlib.py b/build_system_kit/parser/cbdlib.py index 8e0c6b6a6d..8d6946e74e 100644 --- a/build_system_kit/parser/cbdlib.py +++ b/build_system_kit/parser/cbdlib.py @@ -20,7 +20,7 @@ def build(bld): for x in txt.splitlines(): if not x: continue - elif x.startswith('\t') or x.startswith(' '): + elif x.startswith(('\t', ' ')): tg.rule = x.lstrip() else: line = x.split(':') diff --git a/configure b/configure index 03fab4b16a..d6d969c013 100755 --- a/configure +++ b/configure @@ -61,7 +61,7 @@ checkWAF() # neither waf nor miniwaf could be found if [ ! -x "$WAF" ] ; then printf "$RED""not found""$NORMAL""\n" - echo "Go to http://code.google.com/p/waf/" + echo "Go to https://waf.io/" echo "and download a waf version" exit $EXIT_FAILURE else diff --git a/demos/asm/test.S b/demos/asm/test.S index 31f487b1b4..af26f0a0b5 100644 --- a/demos/asm/test.S +++ b/demos/asm/test.S @@ -1,23 +1,22 @@ #include "header.h" -.text -.align 2 - -.section .data +.data val: .long SOME_VALUE +.text +.align 2 + // Multiply input value by 421... .global mult10 -.type mult10, function +.type mult10, @function mult10: pushq %rbp movq %rsp,%rbp - movl val,%eax + movabs val,%eax imull %edi,%eax popq %rbp ret - diff --git a/demos/asm/wscript b/demos/asm/wscript index c56e7fe446..1d210aa0f2 100644 --- a/demos/asm/wscript +++ b/demos/asm/wscript @@ -13,15 +13,16 @@ def configure(conf): conf.fatal('this example is for 64-bit systems only') def build(bld): - # http://docs.waf.googlecode.com/git/apidocs_16/tools/asm.html + # https://waf.io/apidocs/tools/asm.html bld.program( source = 'main.c test.S', target = 'asmtest', defines = 'foo=12', - includes = '.') + asflags = '-Os', + includes = '.', + linkflags = '-no-pie') def disp(ctx): node = ctx.bldnode.ant_glob('asmtest*', remove=False)[0] ctx.exec_command('%s' % node.abspath(), shell=False) bld.add_post_fun(disp) - diff --git a/demos/c++/wscript b/demos/c++/wscript index 1b7751f239..ca9d9822fa 100644 --- a/demos/c++/wscript +++ b/demos/c++/wscript @@ -19,6 +19,8 @@ def configure(conf): def build(bld): bld.shlib(source='a.cpp', target='mylib', vnum='9.8.7') + bld.shlib(source='a.cpp', target='mylib2', vnum='9.8.7', cnum='9.8') + bld.shlib(source='a.cpp', target='mylib3') bld.program(source='main.cpp', target='app', use='mylib') bld.stlib(target='foo', source='b.cpp') diff --git a/demos/c/program/gfx/semfavicon.ico b/demos/c/program/gfx/semfavicon.ico new file mode 100644 index 0000000000..af31ad0f26 Binary files /dev/null and b/demos/c/program/gfx/semfavicon.ico differ diff --git a/demos/c/program/msvc_resource.rc b/demos/c/program/msvc_resource.rc new file mode 100644 index 0000000000..333e11b99a --- /dev/null +++ b/demos/c/program/msvc_resource.rc @@ -0,0 +1 @@ +100 icon "gfx/semfavicon.ico" diff --git a/demos/c/program/wscript_build b/demos/c/program/wscript_build index 8379714588..8933768148 100644 --- a/demos/c/program/wscript_build +++ b/demos/c/program/wscript_build @@ -1,19 +1,19 @@ #! /usr/bin/env python def write_header(tsk): - tsk.outputs[0].write('int abc = 423;') -bld(rule=write_header, target='b.h', ext_out=['.h']) + tsk.outputs[0].write('int abc = 423;\n') +bld(features='use', rule=write_header, target='b.h', ext_out=['.h'], name='XYZ') -bld.program( +tg = bld.program( features = 'aaa', source = 'main.c', includes = '. ..', - cflags = ['-O3'], + #cflags = ['-O3'], # for example defines = ['foo=bar'], target = 'myprogram', - use = 'M') + use = 'M XYZ') -# make main.c depend on wscript_build, just for the fun of it +# just for fun, make main.c depend on wscript_build bld.add_manual_dependency('main.c', bld.path.find_resource('wscript_build')) # ---------------------------------------- @@ -34,3 +34,6 @@ class foo(waflib.Task.Task): def run(self): self.outputs[0].write('int kik = 343;\n') + +if bld.env.CC_NAME == 'msvc': + tg.source += ' msvc_resource.rc' diff --git a/demos/c/shlib/wscript_build b/demos/c/shlib/wscript_build index c3cbc09541..e4b8f7dac3 100644 --- a/demos/c/shlib/wscript_build +++ b/demos/c/shlib/wscript_build @@ -3,6 +3,7 @@ bld.shlib( source = 'test_shlib.c', target = 'my_shared_lib', + name = 'xyz', vnum = '1.2.3', defs = 'foo.def') @@ -11,7 +12,7 @@ t = bld.program( #features = 'my_precious', source = 'main.c', target = 'test_shared_link', - use = 'my_shared_lib', + use = 'xyz', # 1. settings flags directly #linkflags = ['-L/disk/comp/waf/demos/c/build/shlib', '-lmy_shared_lib'] ) diff --git a/demos/c/stlib-deps/libA/external_vars.c b/demos/c/stlib-deps/libA/external_vars.c new file mode 100644 index 0000000000..5ad3303313 --- /dev/null +++ b/demos/c/stlib-deps/libA/external_vars.c @@ -0,0 +1,11 @@ + +#include +#include "external_vars.h" + +int k = 5; + +void print_value_of_k() { + + printf("K = %d\n", k); + +} diff --git a/demos/c/stlib-deps/libA/external_vars.h b/demos/c/stlib-deps/libA/external_vars.h new file mode 100644 index 0000000000..3b2bb2cae3 --- /dev/null +++ b/demos/c/stlib-deps/libA/external_vars.h @@ -0,0 +1,14 @@ +/* + Export internal vars + */ + +#ifndef _EXTERNAL_VARS_H +#define _EXTERNAL_VARS_H + +extern int k; /* export k */ + +void print_value_of_k(); + +extern int r; + +#endif /*_EXTERNAL_VARS_H */ diff --git a/demos/c/stlib-deps/libA/wscript_build b/demos/c/stlib-deps/libA/wscript_build new file mode 100644 index 0000000000..d349c66198 --- /dev/null +++ b/demos/c/stlib-deps/libA/wscript_build @@ -0,0 +1,7 @@ +# This is a static library that provides a header to include reference to +# internal variables. +bld.stlib( + target='A', + source='external_vars.c', + includes='.', + export_includes='.') diff --git a/demos/c/stlib-deps/libB/sum.c b/demos/c/stlib-deps/libB/sum.c new file mode 100644 index 0000000000..0e6ad98817 --- /dev/null +++ b/demos/c/stlib-deps/libB/sum.c @@ -0,0 +1,12 @@ + +#include + +#include "sum.h" + +#include "external_vars.h" + +void sum(int j) { + printf("Adding %d...\n", j); + // Add our new value + k += j; +} diff --git a/demos/c/stlib-deps/libB/sum.h b/demos/c/stlib-deps/libB/sum.h new file mode 100644 index 0000000000..910101e82a --- /dev/null +++ b/demos/c/stlib-deps/libB/sum.h @@ -0,0 +1,10 @@ +/* + Export internal vars + */ + +#ifndef _SUM_H +#define _SUM_H + +void sum(int j); + +#endif /*_SUM_H */ diff --git a/demos/c/stlib-deps/libB/wscript_build b/demos/c/stlib-deps/libB/wscript_build new file mode 100644 index 0000000000..8accf77fc3 --- /dev/null +++ b/demos/c/stlib-deps/libB/wscript_build @@ -0,0 +1,9 @@ +# This script uses libA to do some internal logic. It uses the default +# behavior so it will be re-archived every time libA changes, even if +# changes in libA are only visible at runtime. +bld.stlib( + target='B', + source='sum.c', + use='A', + includes='.', + export_includes='.') diff --git a/demos/c/stlib-deps/libC/diff.c b/demos/c/stlib-deps/libC/diff.c new file mode 100644 index 0000000000..292210c4ee --- /dev/null +++ b/demos/c/stlib-deps/libC/diff.c @@ -0,0 +1,13 @@ + +#include + +#include "diff.h" + +#include "external_vars.h" + +void diff(int j) { + + printf("Subtracting %d...\n", j); + // subtract our new value + k -= j; +} diff --git a/demos/c/stlib-deps/libC/diff.h b/demos/c/stlib-deps/libC/diff.h new file mode 100644 index 0000000000..851746fce2 --- /dev/null +++ b/demos/c/stlib-deps/libC/diff.h @@ -0,0 +1,10 @@ +/* + Export internal vars + */ + +#ifndef _DIFF_H +#define _DIFF_H + +void diff(int j); + +#endif /* _DIFF_H */ diff --git a/demos/c/stlib-deps/libC/wscript_build b/demos/c/stlib-deps/libC/wscript_build new file mode 100644 index 0000000000..04d4a07b05 --- /dev/null +++ b/demos/c/stlib-deps/libC/wscript_build @@ -0,0 +1,10 @@ +# This script uses the logic that prevents static libraries from depending on +# eachother. This means that the only way libC is re-archived is if the source +# code file diff.c or any of its depenencies change. +bld.stlib( + target='C', + source='diff.c', + features='skip_stlib_link_deps', + use='A', + includes='.', + export_includes='.') diff --git a/demos/c/stlib-deps/main.c b/demos/c/stlib-deps/main.c new file mode 100644 index 0000000000..5e6d49fdb5 --- /dev/null +++ b/demos/c/stlib-deps/main.c @@ -0,0 +1,22 @@ + +#include + +#include "external_vars.h" +#include "sum.h" +#include "diff.h" + +int main() +{ + /* This should return to whatever the default value is. */ + print_value_of_k(); + sum(6); + print_value_of_k(); + diff(8); + print_value_of_k(); + sum(8); + print_value_of_k(); + diff(6); + print_value_of_k(); + + return 0; +} diff --git a/demos/c/stlib-deps/wscript_build b/demos/c/stlib-deps/wscript_build new file mode 100644 index 0000000000..855b10d803 --- /dev/null +++ b/demos/c/stlib-deps/wscript_build @@ -0,0 +1,11 @@ +#! /usr/bin/env python + +bld.recurse('libA') +bld.recurse('libB') +bld.recurse('libC') + +# Note that main has an implied dependency between our main program and libA +bld.program( + source = 'main.c', + target = 'test_static_link_chain', + use = 'B C') diff --git a/demos/c/stlib/wscript_build b/demos/c/stlib/wscript_build index 2d826213c5..22e796dc95 100644 --- a/demos/c/stlib/wscript_build +++ b/demos/c/stlib/wscript_build @@ -13,7 +13,7 @@ bld.program( def r1(self): import time time.sleep(1) - self.outputs[0].write(' ') + self.outputs[0].write('\n') bld(rule=r1, target='foo.h', before=['c']) # the default scanner may enforce the build order on generated headers, but it is just diff --git a/demos/c/wscript b/demos/c/wscript index 880146b61c..c02184b095 100644 --- a/demos/c/wscript +++ b/demos/c/wscript @@ -19,32 +19,57 @@ def options(opt): def configure(conf): conf.load('compiler_c gnu_dirs') - conf.check_cc(fragment="int main() { return 0; }\n") + + # compile and link in the default mode, which is c++ if present + conf.check(fragment='int main() { return 0; }\n') + + # just compile in c mode, and do not link + conf.check(fragment='int main() { return 0; }\n', features='c') try: - conf.check_cc(fragment="int main() { return 0; }\n", execute=True) # 1 + conf.check(fragment="int main(int argc, char* argv[]) { return 0; }\n", execute=True) # 1 except conf.errors.WafError: Logs.warn('You are probably using a cross-compiler (disabling specific configuration tests)') conf.check_library(test_exec=False) else: - conf.check_cc(fragment="""#include\nint main(){fprintf(stderr, "mu"); printf("%d", 22);return 0;}\n""", execute=True, define_name='HAVE_MU') + conf.check(fragment="""#include\nint main(){fprintf(stderr, "mu"); printf("%d", 22);return 0;}\n""", + msg='Checking for exec results', execute=True, define_name='HAVE_MU') + conf.check(fragment='int main(int argc, char* argv[]) { return argc - 2;}', + msg='Checking for test arguments', test_args=['--foo=bar'], execute=True) conf.check_library(test_exec=True) - conf.check_cc(lib='m', cflags='-Wall', defines=['var=foo', 'x=y'], uselib_store='M', mandatory=False) + conf.check(lib='m', cflags='-Wall', defines=['var=foo', 'x=y'], uselib_store='M', mandatory=False) conf.check_large_file(mandatory=False) conf.check_inline() - conf.check_endianness() + + endianness = conf.check_endianness() + conf.define_cond("BIG_ENDIAN", endianness == "big") + + def test_build(ctx): + ctx(rule='echo hello', shell=True, always=True) + # Configuration tests may even be re-used: + #ctx.in_msg = True # suppress console outputs + #ctx.check_large_file(mandatory=False) conf.multicheck( - {'header_name':'stdio.h'}, - {'header_name':'unistd.h'}, - {'header_name':'stdlib.h'}, - msg = 'Checking for standard headers', - mandatory = False + # list of conf.check() arguments + {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store': 'STDIO'}, + {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False}, + {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'}, + {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'}, + + # parallelism control with after_tests/before_tests + {'header_name':'malloc.h', 'msg':'... malloc', 'uselib_store':'MALLOC', 'id':'malloc_t', 'mandatory':False}, + {'header_name':'unistd.h', 'msg':'... unistd', 'uselib_store':'UNISTD', 'before_tests':['malloc_t'], 'mandatory':False}, + + msg = 'Checking for headers in parallel', + #mandatory = False, # set to False to make all tests non-mandatory + #run_all_tests = False # set to False to stop at the first error ) - conf.check_cc(header_name='stdio.h', auto_add_header_name=True) - #conf.check_cc(header_name='unistd.h') - conf.check_cc(fragment='int main() {return 0;}\n') + + conf.check(header_name='stdio.h', auto_add_header_name=True) + #conf.check(header_name='unistd.h') + conf.check(fragment='int main() {return 0;}\n') conf.write_config_header('config.h') # exclude system libraries, force a particular folder (see strictlib below) @@ -53,7 +78,7 @@ def configure(conf): def build(bld): bld.env.DEFINES=['WAF=1'] - bld.recurse('program stlib shlib') + bld.recurse('program stlib stlib-deps shlib') #bld.install_files('/tmp/foo', 'wscript') #bld.env.PREFIX='/tmp/foo' bld.install_files('${PREFIX}/', 'program/a.h program/main.c', relative_trick=False) @@ -66,7 +91,16 @@ def build(bld): bld.env.FOO =['m', 'ncurses'] bld.env.ST = '-L%s' - bld(rule='echo ${ST:FOO}', always=True, shell=1) + bld.env.A = 'aye' + bld.env.B = 'doh' + bld.env.SRCA = ['aaa'] + bld(rule='echo ${ST:FOO} ${ST:SRC} ${A}${B} ${ST:SRCA} ${ST:SRC[0].abspath()}', + always=True, source='wscript', shell=1, name='Shell') + if not Utils.is_win32: + bld(rule='echo ${ST:FOO} ${ST:SRC} ${A}${B} ${ST:SRCA} ${ST:SRC[0].abspath()}', + always=True, source='wscript', shell=0, + stdout=None, stderr=None, # disable synchronized outputs on this rule + cls_keyword=lambda x:'Trying again', name='NoShell') # illustrate how to add a command 'foo' and to execute things in it if bld.cmd == 'foo': diff --git a/demos/fortran/foo.def b/demos/fortran/foo.def new file mode 100644 index 0000000000..08acdb1380 --- /dev/null +++ b/demos/fortran/foo.def @@ -0,0 +1,2 @@ +EXPORTS + FOO diff --git a/demos/fortran/mod/fakecc.py b/demos/fortran/mod/fakecc.py index 485c024d5f..fa9194464a 100644 --- a/demos/fortran/mod/fakecc.py +++ b/demos/fortran/mod/fakecc.py @@ -53,7 +53,7 @@ def run(self): bnodes = self.outputs m = usemodule(self.inputs[0]) if m: - print "%s requires module %s" % (self.inputs[0].abspath(), m[0]) + print("%s requires module %s" % (self.inputs[0].abspath(), m[0])) #bnodes.append(self.generator.bld.bldnode.exclusive_build_node(m[0])) compile(self) diff --git a/demos/fortran/submodules/container.f90 b/demos/fortran/submodules/container.f90 new file mode 100644 index 0000000000..d8a6c4710d --- /dev/null +++ b/demos/fortran/submodules/container.f90 @@ -0,0 +1,27 @@ +submodule (parent) container + implicit none + +contains + + module procedure init + p%mother = mother + p%father = father + end procedure init + + module subroutine harmonize(p) + type(parent_type), intent(inout) :: p + real :: avg + + avg = 0.5 * (p%father + p%mother) + p%father = avg + p%mother = avg + end subroutine harmonize + + module function parent_weight(p) result(w) + type(parent_type), intent(in) :: p + real :: w + + w = p%mother**2 + p%father**2 + end function parent_weight + +end submodule container diff --git a/demos/fortran/submodules/helper.f90 b/demos/fortran/submodules/helper.f90 new file mode 100644 index 0000000000..b72de8c0ad --- /dev/null +++ b/demos/fortran/submodules/helper.f90 @@ -0,0 +1,13 @@ +submodule (parent:container) helper + implicit none + +contains + + module function parent_distance(pa, pb) result(dist) + type(parent_type), intent(in) :: pa, pb + real :: dist + + dist = sqrt(parent_weight(pa) + parent_weight(pb)) + end function parent_distance + +end submodule helper diff --git a/demos/fortran/submodules/parent.f90 b/demos/fortran/submodules/parent.f90 new file mode 100644 index 0000000000..17b8370bb1 --- /dev/null +++ b/demos/fortran/submodules/parent.f90 @@ -0,0 +1,30 @@ +module parent + implicit none + + type parent_type + real :: mother + real :: father + end type parent_type + + interface + module subroutine init(p, mother, father) + type(parent_type), intent(out) :: p + real, intent(in) :: mother, father + end subroutine init + + module subroutine harmonize(p) + type(parent_type), intent(inout) :: p + end subroutine harmonize + + module function parent_weight(p) result(w) + type(parent_type), intent(in) :: p + real :: w + end function parent_weight + + module function parent_distance(pa, pb) result(dist) + type(parent_type), intent(in) :: pa, pb + real :: dist + end function parent_distance + end interface + +end module parent diff --git a/demos/fortran/submodules/submain.f90 b/demos/fortran/submodules/submain.f90 new file mode 100644 index 0000000000..e47013a599 --- /dev/null +++ b/demos/fortran/submodules/submain.f90 @@ -0,0 +1,18 @@ +program submain + use parent + + implicit none + + type(parent_type) :: a,b + real :: dist, weight + + call init(a, 1.0, 2.0) + call init(b, 10.0, 12.0) + + call harmonize(a) + weight = parent_weight(b) + write(*,*) weight + dist = parent_distance(a, b) + write(*,*) dist + +end program submain diff --git a/demos/fortran/submodules/wscript b/demos/fortran/submodules/wscript new file mode 100644 index 0000000000..771614e1c3 --- /dev/null +++ b/demos/fortran/submodules/wscript @@ -0,0 +1,26 @@ +#! /usr/bin/env python + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_c') + opt.load('compiler_fc') + +def configure(conf): + conf.load('compiler_c') + conf.load('compiler_fc') + +def build(bld): + + bld( + features = 'fc fcshlib', + source = 'parent.f90 container.f90 helper.f90', + target = 'fudge', + ) + bld( + features = 'fc fcprogram', + source = 'submain.f90', + use = 'fudge', + target = 'submain', + ) diff --git a/demos/fortran/typemap/fsrc.def b/demos/fortran/typemap/fsrc.def new file mode 100644 index 0000000000..fa15cdc851 --- /dev/null +++ b/demos/fortran/typemap/fsrc.def @@ -0,0 +1,2 @@ +EXPORTS + EXAMPLE_MOD_mp_SUB1 diff --git a/demos/fortran/typemap/wscript b/demos/fortran/typemap/wscript index 2ba212f78a..9e66f00b17 100644 --- a/demos/fortran/typemap/wscript +++ b/demos/fortran/typemap/wscript @@ -11,14 +11,19 @@ def configure(conf): conf.load('compiler_c') conf.load('compiler_fc') conf.check_fortran() - conf.check_fortran_verbose_flag() - conf.check_fortran_clib() + # configuration tests that may be totally irrelevant + conf.check_fortran_dummy_main() + if not conf.env.IFORT_WIN32: + conf.check_fortran_verbose_flag() + conf.check_fortran_clib() + conf.check_fortran_mangling() def build(bld): bld( features = 'fc typemap fcshlib', source = 'fsrc.f90 basetypes.f90', + defs = 'fsrc.def', target = 'foo', ) @@ -28,9 +33,9 @@ from waflib import Logs, Build, Utils from waflib import TaskGen, Task from waflib.ConfigSet import ConfigSet -#@TaskGen.feature('typemap') <- python >= 2.4 -#@TaskGen.after('process_source') -#@TaskGen.before('apply_link') +@TaskGen.feature('typemap') +@TaskGen.after('process_source') +@TaskGen.before('apply_link') def process_typemaps(self): """ modmap: *.f90 + foo.in -> foo.h + foo.f90 @@ -54,11 +59,6 @@ def process_typemaps(self): tsk = self.create_compiled_task('fc', f90out) tsk.nomod = True # the fortran files won't compile unless all the .mod files are set, ick -# for python 2.3 -TaskGen.feature('typemap')(process_typemaps) -TaskGen.after('process_source')(process_typemaps) -TaskGen.before('apply_link')(process_typemaps) - class modmap(Task.Task): """ create .h and .f90 files, so this must run be executed before any c task @@ -166,25 +166,26 @@ end subroutine outer return res def write_type_map(bld, ctps, fort_file, c_header): - fort_file.write('''\ + buf = ['''\ module type_maps use, intrinsic :: iso_c_binding implicit none -''', flags='w') +'''] for ctp in ctps: - fort_file.write('integer, parameter :: %s = %s\n' % (ctp.name, ctp.fc_type), - flags='a') - fort_file.write('end module type_maps\n', flags='a') + buf.append('integer, parameter :: %s = %s' % (ctp.name, ctp.fc_type)) + buf.append('end module type_maps\n') + fort_file.write('\n'.join(buf)) cap_name = '%s__' % c_header.name.upper().replace('.', '_') - c_header.write('''\ + buf = ['''\ #ifndef %s #define %s -''' % (cap_name, cap_name), flags='w') +''' % (cap_name, cap_name)] for ctp in ctps: # This is just an example, so this would be customized. The 'long long' # would correspond to the actual C type... - c_header.write('typedef long long %s\n' % ctp.name, flags='a') - c_header.write('#endif\n', flags='a') + buf.append('typedef long long %s\n' % ctp.name) + buf.append('#endif\n') + c_header.write('\n'.join(buf)) # vim:ft=python:noet diff --git a/demos/fortran/wscript b/demos/fortran/wscript index ec203b3ae8..f1223d8024 100644 --- a/demos/fortran/wscript +++ b/demos/fortran/wscript @@ -12,19 +12,15 @@ def options(opt): opt.recurse('typemap') def configure(conf): - conf.load('compiler_fc') conf.load('compiler_c') + conf.load('compiler_fc') + if conf.env.FC_NAME == 'IFORT': - conf.env['FCFLAGS'] = ['-warn'] + conf.env.append_unique('FCFLAGS', '-warn') elif conf.env.FC_NAME == 'GFORTRAN': - conf.env['FCFLAGS'] = ['-Wall', '-W'] - #conf.env['INCLUDES'] = ['hfloupi'] + conf.env.append_unique('FCFLAGS', ['-Wall', '-W']) conf.check_fortran() - conf.check_fortran_verbose_flag() - conf.check_fortran_clib() - conf.check_fortran_dummy_main() - conf.check_fortran_mangling() conf.recurse('typemap') def build(bld): @@ -43,6 +39,7 @@ def build(bld): features = 'fc fcshlib', source = 'foo.f', target = 'shlib1', + defs = 'foo.def', vnum = '2.3.9') bld( @@ -81,3 +78,5 @@ def build(bld): target = 'mod/two_mods') bld.recurse('typemap') + if bld.env.FC_NAME == 'GFORTRAN' and int(bld.env.FC_VERSION[0]) >= 6: + bld.recurse('submodules') diff --git a/demos/glib2/org.gsettings.simple.gschema.xml b/demos/glib2/org.gsettings.simple.gschema.xml new file mode 100644 index 0000000000..94a671bd54 --- /dev/null +++ b/demos/glib2/org.gsettings.simple.gschema.xml @@ -0,0 +1,7 @@ + + + + "Hello, world" + + + diff --git a/demos/glib2/wscript b/demos/glib2/wscript index 5666e14ce1..2bfa7d47ce 100644 --- a/demos/glib2/wscript +++ b/demos/glib2/wscript @@ -8,6 +8,8 @@ APPNAME='glib2_test' top = '.' out = 'build' +import os + def options(opt): opt.load ('compiler_c glib2') @@ -46,3 +48,10 @@ def build(bld): source = 'org.glib2.test.gresource.xml', install_path = 'lib/glib2_test' ) + + # Install a schema to a different location. + # It will be compiled to a cache file besides it. + bld( + features = 'glib2', + settings_schema_files = ['org.gsettings.simple.gschema.xml'] + ).env.GSETTINGSSCHEMADIR = os.path.join('etc', 'glib-2.0', 'schemas') diff --git a/demos/intltool/data/wscript b/demos/intltool/data/wscript index 1ddcece5f7..27ae2d1a8f 100644 --- a/demos/intltool/data/wscript +++ b/demos/intltool/data/wscript @@ -4,13 +4,16 @@ from waflib import Utils def build(bld): - if bld.cmd == 'install': + def post_update(bld): try: bld.exec_command(["update-mime-database", Utils.subst_vars("${DATADIR}/mime", bld.env)]) bld.exec_command(["update-desktop-database", Utils.subst_vars("${DATADIR}/applications", bld.env)]) - except: + except Exception: pass + if bld.cmd == 'install': + bld.add_post_fun(post_update) + bld( features = "intltool_in", podir = "../po", diff --git a/demos/java/animals/wscript b/demos/java/animals/wscript index 0a8aa03342..159f38746c 100644 --- a/demos/java/animals/wscript +++ b/demos/java/animals/wscript @@ -8,7 +8,7 @@ def build(bld): # javac srcdir = 'src', - compat = '1.5', + compat = '1.7', # jar basedir = '.', diff --git a/demos/java/bengala/src/org/example/Bengala.java b/demos/java/bengala/src/org/example/Bengala.java new file mode 100644 index 0000000000..30f50b7013 --- /dev/null +++ b/demos/java/bengala/src/org/example/Bengala.java @@ -0,0 +1,14 @@ + +package org.example; + +//import org.example.Animal; +import org.example.Cat; + +class Bengala extends Cat { + + public String sound() { + return "Bengala Meow!"; + } + +} + diff --git a/demos/java/bengala/wscript b/demos/java/bengala/wscript new file mode 100644 index 0000000000..ad9d763536 --- /dev/null +++ b/demos/java/bengala/wscript @@ -0,0 +1,12 @@ +#! /usr/bin/env python + +def build(bld): + + bld(features = 'javac', + srcdir = 'src', + compat = '1.7', + use = 'cats-src', + name = 'bengala-cat', + recurse_use = True, + ) + diff --git a/demos/java/cats/src/org/example/Cat.java b/demos/java/cats/src/org/example/Cat.java index bf937dc9ad..b01788b496 100644 --- a/demos/java/cats/src/org/example/Cat.java +++ b/demos/java/cats/src/org/example/Cat.java @@ -6,9 +6,10 @@ class Cat extends Animal { public String sound() { + // Use base class just to be able to trigger possible rebuilds based on base class change + String base = super.sound(); return "Meow!"; } - } diff --git a/demos/java/cats/wscript b/demos/java/cats/wscript index 3288d26a9c..5a9bdbfc0d 100644 --- a/demos/java/cats/wscript +++ b/demos/java/cats/wscript @@ -4,7 +4,7 @@ def build(bld): bld(features = 'javac', srcdir = 'src', - compat = '1.5', + compat = '1.7', use = 'animals', name = 'cats-src', ) diff --git a/demos/java/wscript b/demos/java/wscript index 84f16a8489..4ee2f9525f 100644 --- a/demos/java/wscript +++ b/demos/java/wscript @@ -36,10 +36,10 @@ def build(bld): bld(features = 'javac jar javadoc', srcdir = 'src/', # folder containing the sources to compile outdir = 'src', # folder where to output the classes (in the build directory) - compat = '1.3', # java compatibility version number + compat = '1.6', # java compatibility version number sourcepath = ['src', 'sup'], classpath = ['.', '..'], - #jaropts = '-C default/src/ .', # can be used to give files + #jaropts = ['-C', 'default/src/', '.'], # can be used to give files basedir = 'src', # folder containing the classes and other files to package (must match outdir) destfile = 'foo.jar', # do not put the destfile in the folder of the java classes! use = 'NNN', @@ -49,5 +49,5 @@ def build(bld): javadoc_output = 'javadoc', ) - bld.recurse('animals cats') + bld.recurse('animals cats bengala') diff --git a/demos/mac_app/sources/main.m b/demos/mac_app/sources/main.m index 4c696b7310..dfb8da3af2 100644 --- a/demos/mac_app/sources/main.m +++ b/demos/mac_app/sources/main.m @@ -1,6 +1,7 @@ // waf sample Mac application - main.m // Chris Pickel, 2011 +#import #import int main(int argc, const char* argv[]) { diff --git a/demos/mac_app/wscript b/demos/mac_app/wscript index a244b54845..7fc96e70e2 100644 --- a/demos/mac_app/wscript +++ b/demos/mac_app/wscript @@ -21,18 +21,19 @@ def configure(conf): if not conf.env.ARCH_ST: conf.fatal('This example is for macs only') conf.env.FRAMEWORK_COCOA = 'Cocoa' - conf.env.ARCH_COCOA = ['i386', 'x86_64'] + conf.env.ARCH_COCOA = ['x86_64', 'arm64'] def build(bld): bld.program( - features = 'c cprogram', - target = 'MacApp', - source = 'sources/main.m', - mac_app = True, - mac_plist = 'Info.plist', - mac_resources = 'resources/MainMenu.nib resources/MacApp.icns', - use = 'COCOA', - install_path = '${PREFIX}', + features = 'c cprogram', + target = 'MacApp', + source = 'sources/main.m', + mac_app = True, + mac_plist = 'Info.plist', + mac_files = bld.path.ant_glob('resources/**'), + mac_files_root = 'resources', + use = 'COCOA', + install_path = '${PREFIX}', ) return diff --git a/demos/precious/wscript b/demos/precious/wscript index ae2fa3434a..1d8ed5eca3 100644 --- a/demos/precious/wscript +++ b/demos/precious/wscript @@ -23,8 +23,7 @@ def build(bld): #rule = '''echo -e "#include \\nint main(){ printf(\\"%%d\\", $$RANDOM); return 0;}" > ${TGT}''', rule = fun, target = node, - always = True, - update_outputs = True) + always = True) bld.program( source = 'precious.c', diff --git a/demos/python/nested_scripts/bar/nested_bar.py b/demos/python/nested_scripts/bar/nested_bar.py new file mode 100644 index 0000000000..f74f2b5a61 --- /dev/null +++ b/demos/python/nested_scripts/bar/nested_bar.py @@ -0,0 +1,3 @@ +""" +Nested file in bar/ +""" diff --git a/demos/python/nested_scripts/foo/nested_foo.py b/demos/python/nested_scripts/foo/nested_foo.py new file mode 100644 index 0000000000..ba64dcb61f --- /dev/null +++ b/demos/python/nested_scripts/foo/nested_foo.py @@ -0,0 +1,3 @@ +""" +Nested file in foo/ +""" diff --git a/demos/python/wscript b/demos/python/wscript index 2a19877981..0d662b63e9 100644 --- a/demos/python/wscript +++ b/demos/python/wscript @@ -29,9 +29,15 @@ def configure(conf): def build(bld): - # first compile a few pyc and pyo files (set install_path=None to disable the installation...) + # first compile a few pyc and pyo files (set install_path=None to disable the installation, + # by default install_path is set to ${PYTHONDIR}) bld(features='py', source=bld.path.ant_glob('*.py'), install_from='.') + # example for generated python files + target = bld.path.find_or_declare('abc.py') + bld(rule='touch ${TGT}', source='wscript', target=target) + bld(features='py', source=[target], install_from=target.parent) + # then a c extension module bld( features = 'c cshlib pyext', @@ -44,3 +50,19 @@ def build(bld): source = 'test.c', target = 'test') + # Install files keeping their directory structure (default: relative_trick=True) + # + # This will create: + # * lib/python2.7/site-packages/nested_scripts/foo/nested_foo.py + bld(features='py', + source=bld.path.ant_glob('nested_scripts/foo/*.py'), + install_from='.') + + # Install files flatting the directory structure (relative_trick=False) + # + # This will create: + # * lib/python2.7/site-packages/nested_bar.py + bld(features='py', + source=bld.path.ant_glob('nested_scripts/bar/*.py'), + relative_trick=False, + install_from='.') diff --git a/demos/qt4/but.ui b/demos/qt5/but.ui similarity index 100% rename from demos/qt4/but.ui rename to demos/qt5/but.ui diff --git a/demos/qt4/data/some.txt b/demos/qt5/data/some.txt similarity index 100% rename from demos/qt4/data/some.txt rename to demos/qt5/data/some.txt diff --git a/demos/qt5/foo.cpp b/demos/qt5/foo.cpp new file mode 100644 index 0000000000..8366ec0a06 --- /dev/null +++ b/demos/qt5/foo.cpp @@ -0,0 +1,38 @@ +// Thomas Nagy, 2011-2016 + +#include "foo.h" + +Foo::Foo() : QWidget(NULL) { + m_button = new QPushButton("Foo Button", this); + m_button->setGeometry(QRect(QPoint(50, 60), + QSize(120, 50))); + connect(m_button, SIGNAL (released()), this, SLOT (handleButton())); + myToggle = true; +} + +void Foo::handleButton() { + if (myToggle) { + m_button->setText("Button Foo"); + } else { + m_button->setText("Foo Button"); + } + myToggle = !myToggle; +} + +int Foo::FortyTwo() { + return 42; +} + +class Bar_private : public QWidget { + Q_OBJECT + signals: + void test(); + public: + Bar_private(); +}; + +Bar_private::Bar_private() : QWidget(NULL) { +} + +#include "foo.moc" + diff --git a/demos/qt5/foo.h b/demos/qt5/foo.h new file mode 100644 index 0000000000..50820d6584 --- /dev/null +++ b/demos/qt5/foo.h @@ -0,0 +1,23 @@ +// Thomas Nagy, 2011-2016 + +#ifndef _FOO +#define _FOO + +#include +#include + +class Foo : public QWidget { + Q_OBJECT + signals: + void test(); + private slots: + void handleButton(); + public: + Foo(); + int FortyTwo(); + QPushButton *m_button; + public: + bool myToggle; +}; + +#endif diff --git a/demos/qt4/linguist/fr.ts b/demos/qt5/linguist/fr.ts similarity index 100% rename from demos/qt4/linguist/fr.ts rename to demos/qt5/linguist/fr.ts diff --git a/demos/qt5/main.cpp b/demos/qt5/main.cpp new file mode 100644 index 0000000000..966a1af8ac --- /dev/null +++ b/demos/qt5/main.cpp @@ -0,0 +1,27 @@ +// Thomas Nagy, 2016 (ita) + +#include +//#include +//#include "mainwindow.h" +#include "ui_but.h" +#include "foo.h" + +int main(int argc, char **argv) +{ + Q_INIT_RESOURCE(res); + QApplication app(argc, argv); + Foo window; + Ui::Form ui; + ui.setupUi(&window); + window.show(); + return app.exec(); +/* + MainWindow window; + if (argc == 2) + window.openFile(argv[1]); + else + window.openFile(":/files/bubbles.svg"); + window.show(); + return app.exec(); +*/ +} diff --git a/demos/qt5/res.qrc b/demos/qt5/res.qrc new file mode 100644 index 0000000000..29ec327010 --- /dev/null +++ b/demos/qt5/res.qrc @@ -0,0 +1,7 @@ + + + + ../../docs/slides/presentation/gfx/waflogo.svg + + + diff --git a/demos/qt5/testqt5.cpp b/demos/qt5/testqt5.cpp new file mode 100644 index 0000000000..a855b3fc33 --- /dev/null +++ b/demos/qt5/testqt5.cpp @@ -0,0 +1,31 @@ +// Example of Qt5 Unit test with QtTest library +// Federico Pellegrin, 2019 (fedepell) + +#include "foo.h" +#include + +class TestQt5Test: public QObject { + Q_OBJECT + private: + Foo myFoo; + private slots: + void testGui(); + void testFunc(); +}; + +// Test of the UI by simulating a button click and button label reading +void TestQt5Test::testGui() { + QCOMPARE(myFoo.m_button->text(), QString("Foo Button")); + QTest::mouseClick(myFoo.m_button, Qt::LeftButton,Qt::NoModifier, QPoint(5,5), 0); + QCOMPARE(myFoo.m_button->text(), QString("Button Foo")); +} + +// Test of a normal function +void TestQt5Test::testFunc() { + QCOMPARE(myFoo.FortyTwo(), 44); // this fails! 42 != 44 +} + +QTEST_MAIN(TestQt5Test) + +#include "testqt5.moc" + diff --git a/demos/qt5/wscript b/demos/qt5/wscript new file mode 100644 index 0000000000..fbc1691bc8 --- /dev/null +++ b/demos/qt5/wscript @@ -0,0 +1,88 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016 (ita) +# Federico Pellegrin, 2019 (fedepell) + +VERSION='0.0.1' +APPNAME='qt5_test' + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_cxx qt5 waf_unit_test') + +def configure(conf): + conf.load('compiler_cxx qt5 waf_unit_test') + #conf.env.append_value('CXXFLAGS', ['-g']) # test + + if not conf.env.QT_LRELEASE: + # While qt5 detects most Qt tools, most of them are optional + conf.fatal('lrelease was not found') + + # These tests would run on Ubuntu but not on other platforms + conf.check( + define_name = 'XYZ_QT5_TESTS', + mandatory = False, + execute = True, + features = 'qt5 cxx cxxprogram', + includes = '.', + defines = 'QT_WIDGETS_LIB', + use = 'QT5CORE QT5GUI QT5WIDGETS QT5TEST', + msg = 'Checking whether Qt5 tests can run', + fragment = ''' +#include +class TestQt5Test: public QObject { + Q_OBJECT + private: + void testGui() { + QWidget *widget = NULL; + QTest::mouseClick(widget, Qt::LeftButton, Qt::NoModifier, QPoint(5,5), 0); + } +}; + +QTEST_MAIN(TestQt5Test) +#include "test.moc" +''') + +def build(bld): + # According to the Qt5 documentation: + # Qt classes in foo.h -> declare foo.h as a header to be processed by moc + # add the resulting moc_foo.cpp to the source files + # Qt classes in foo.cpp -> include foo.moc at the end of foo.cpp + # + bld( + features = 'qt5 cxx cxxprogram', + use = 'QT5CORE QT5GUI QT5SVG QT5WIDGETS', + source = 'main.cpp res.qrc but.ui foo.cpp', + moc = 'foo.h', + target = 'window', + includes = '.', + lang = bld.path.ant_glob('linguist/*.ts'), + langname = 'somefile', # include the .qm files from somefile.qrc + ) + + if bld.env.XYZ_QT5_TESTS: + # Example of integration of Qt5 Unit tests using Qt5Test using waf_unit_test + bld( + features = 'qt5 cxx cxxprogram test', + use = 'QT5CORE QT5GUI QT5WIDGETS QT5TEST', + defines = 'QT_WIDGETS_LIB', + source = 'foo.cpp testqt5.cpp', + moc = 'foo.h', + target = 'footest', + includes = '.', + # ut_str = './${SRC} -o test-report.xml,xunitxml', # put output to a xunit xml + ) + + bld.add_post_fun(print_test_results) # print output of test runner to user + + +def print_test_results(bld): + lst = getattr(bld, 'utest_results', []) + if not lst: + return + for (f, code, out, err) in lst: + print(out.decode('utf-8')) + print(err.decode('utf-8')) + diff --git a/demos/subst/wscript b/demos/subst/wscript index 521bed2c68..0d6dab8235 100644 --- a/demos/subst/wscript +++ b/demos/subst/wscript @@ -19,7 +19,7 @@ def build(bld): features = 'subst', # the feature 'subst' overrides the source/target processing source = 'foo.in', # list of string or nodes target = 'foo.txt', # list of strings or nodes - encoding = 'ascii', # file encoding for python3, default is ISO8859-1 + encoding = 'ascii', # file encoding for python3, default is latin-1 install_path = '/tmp/uff/', # installation path, optional chmod = Utils.O755, # installation mode, optional PREFIX = bld.env.PREFIX, # variables to use in the substitution diff --git a/demos/tex/src/conclusions.ltx b/demos/tex/src/conclusions.ltx index 119cdaa0a6..e66a340b84 100644 --- a/demos/tex/src/conclusions.ltx +++ b/demos/tex/src/conclusions.ltx @@ -1,6 +1,6 @@ \section{Conclusions} Hopefully, the text will create a sensible PostScript or \acronym{PDF} document. That choice is -made in the wscript file in this directory. It perhas needs making into a command line option. -Also there should be the possiblity of making both PostScript and \acronym{PDF} on the same run +made in the wscript file in this directory. It perhaps needs making into a command line option. +Also there should be the possibility of making both PostScript and \acronym{PDF} on the same run -- at least if generating \acronym{PDF} from \acronym{DVI}. diff --git a/demos/tex/src/document-glossaries.ltx b/demos/tex/src/document-glossaries.ltx index 647a737157..267daa9e32 100644 --- a/demos/tex/src/document-glossaries.ltx +++ b/demos/tex/src/document-glossaries.ltx @@ -10,7 +10,8 @@ \maketitle \tableofcontents -\glossarystyle{altlist} +% next line is broken after using a newer texlive version +%\glossarystyle{altlist} %% The glossary entries \newglossaryentry{african} diff --git a/demos/tex/src/wscript b/demos/tex/src/wscript index 7e45efaebb..f3f4a6fafe 100644 --- a/demos/tex/src/wscript +++ b/demos/tex/src/wscript @@ -17,7 +17,7 @@ def build(bld): # optional parameters obj.outs = 'ps' # we want a postscript output too - 'ps pdf' works too - obj.prompt = 1 # put 0 for the batchmode (conceals the debug output) + obj.prompt = 1 # put 0 for the nonstopmode (conceals the debug output) obj.deps = 'wscript crossreferencing.ltx' # use this to give dependencies directly # or more simply, for a pdf.. diff --git a/demos/unit_test/tests/test0/wscript_build b/demos/unit_test/tests/test0/wscript_build index 33b8d3d904..36456e48ff 100644 --- a/demos/unit_test/tests/test0/wscript_build +++ b/demos/unit_test/tests/test0/wscript_build @@ -1,10 +1,12 @@ #! /usr/bin/env python # encoding: utf-8 +bld.env.NARG = '-flag2' bld( features = 'cxx cxxprogram test', source = 'HelloWorldTest.cpp', target = 'unit_test_program', use = 'unittestmain useless CPPUNIT', + ut_str = '${SRC[0].abspath()} -flag1 ${NARG}' ) diff --git a/demos/unit_test/tests/test1/wscript_build b/demos/unit_test/tests/test1/wscript_build index e3ffd3288f..b46df743d5 100644 --- a/demos/unit_test/tests/test1/wscript_build +++ b/demos/unit_test/tests/test1/wscript_build @@ -1,17 +1,11 @@ #! /usr/bin/env python # encoding: utf-8 - -def fun(task): - #print task.generator.bld.name_to_obj('somelib').link_task.outputs[0].abspath(task.env) - task.ut_exec.append('--help') - bld( features = 'cxx cxxprogram test', source = 'AccumulatorTest.cpp', target = 'unit_test_program', use = 'unittestmain useless CPPUNIT', - ut_cwd = bld.path.abspath(), - ut_fun = fun + ut_cwd = bld.path, ) diff --git a/demos/unit_test/tests/test2/test.py b/demos/unit_test/tests/test2/test.py new file mode 100644 index 0000000000..20b4ff325f --- /dev/null +++ b/demos/unit_test/tests/test2/test.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +# encoding: utf-8 +import sys +print('success') +sys.exit(0) diff --git a/demos/unit_test/tests/test2/wscript_build b/demos/unit_test/tests/test2/wscript_build new file mode 100644 index 0000000000..83f80fd72d --- /dev/null +++ b/demos/unit_test/tests/test2/wscript_build @@ -0,0 +1,11 @@ +#! /usr/bin/env python +# encoding: utf-8 + +if bld.env['PYTHON']: + bld( + features = 'test_scripts', + test_scripts_source = 'test.py', + test_scripts_template = '${PYTHON} ${SCRIPT}' + ) + + diff --git a/demos/unit_test/tests/test3/test.py.in b/demos/unit_test/tests/test3/test.py.in new file mode 100644 index 0000000000..a4e6467d81 --- /dev/null +++ b/demos/unit_test/tests/test3/test.py.in @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# encoding: utf-8 +import test_import +import sys +print('success from @NAME@') +sys.exit(@EXIT_STATUS@) diff --git a/demos/unit_test/tests/test3/test_import.py b/demos/unit_test/tests/test3/test_import.py new file mode 100644 index 0000000000..8384e2bf60 --- /dev/null +++ b/demos/unit_test/tests/test3/test_import.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python +# encoding: utf-8 +print('imported') diff --git a/demos/unit_test/tests/test3/wscript_build b/demos/unit_test/tests/test3/wscript_build new file mode 100644 index 0000000000..a2c6ae31a7 --- /dev/null +++ b/demos/unit_test/tests/test3/wscript_build @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 + +if bld.env['PYTHON']: + bld( + features = 'subst', + source = 'test.py.in', + target = 'test.1.py', + NAME = 'first test3', + EXIT_STATUS = '0', + ) + bld( + features = 'subst', + source = 'test.py.in', + target = 'test.2.py', + NAME = 'second test3', + EXIT_STATUS = '0', + ) + paths = { + 'PYTHONPATH': bld.path.abspath() + } + bld( + features = 'test_scripts', + test_scripts_source = 'test.1.py test.2.py', + test_scripts_template = '${PYTHON} ${SCRIPT}', + test_scripts_paths = paths + ) + + diff --git a/demos/unit_test/tests/wscript b/demos/unit_test/tests/wscript index 947759c880..c5ef82c5f5 100644 --- a/demos/unit_test/tests/wscript +++ b/demos/unit_test/tests/wscript @@ -2,7 +2,7 @@ # encoding: utf-8 def build(bld): - bld.recurse('test0 test1') + bld.recurse('test0 test1 test2 test3') obj = bld( features = 'cxx cxxstlib', diff --git a/demos/unit_test/wscript b/demos/unit_test/wscript index 037a76244c..cf89b2e2ae 100644 --- a/demos/unit_test/wscript +++ b/demos/unit_test/wscript @@ -16,7 +16,6 @@ out = 'build' def options(opt): opt.load('compiler_cxx') opt.load('waf_unit_test') - opt.add_option('--onlytests', action='store_true', default=True, help='Exec unit tests only', dest='only_tests') def configure(conf): conf.load('compiler_cxx') @@ -27,6 +26,9 @@ def configure(conf): if 'dl' not in conf.env.LIB_CPPUNIT: l = conf.check(lib='dl', uselib_store='CPPUNIT') + # the interpreted tests need python + conf.find_program('python', mandatory=False) + from waflib import Logs def summary(bld): lst = getattr(bld, 'utest_results', []) @@ -48,15 +50,16 @@ def summary(bld): def build(bld): bld.recurse('src tests') - # unittestw.summary is a pretty ugly function for displaying a report (feel free to improve!) + # waf_unit_test.summary is a pretty ugly function for displaying a report (feel free to improve!) # results -> bld.utest_results [(filename, returncode, stdout, stderr), (..., ), ...] #bld.add_post_fun(waf_unit_test.summary) bld.add_post_fun(summary) # to execute all tests: # $ waf --alltests - # to set this behaviour permanenly: - bld.options.all_tests = True + # to set this behaviour permanently: + #bld.options.all_tests = True + bld.options.clear_failed_tests = True # debugging zone: # $ waf --zones=ut diff --git a/demos/vala/resources/appwindow.ui b/demos/vala/resources/appwindow.ui new file mode 100644 index 0000000000..a4ead6f128 --- /dev/null +++ b/demos/vala/resources/appwindow.ui @@ -0,0 +1,8 @@ + + + + diff --git a/demos/vala/resources/test.gresource.xml b/demos/vala/resources/test.gresource.xml new file mode 100644 index 0000000000..ccd51bf6b1 --- /dev/null +++ b/demos/vala/resources/test.gresource.xml @@ -0,0 +1,7 @@ + + + + + appwindow.ui + + diff --git a/demos/vala/resources/test.vala b/demos/vala/resources/test.vala new file mode 100644 index 0000000000..80b1f7919c --- /dev/null +++ b/demos/vala/resources/test.vala @@ -0,0 +1,22 @@ +using Gtk; + +[GtkTemplate (ui="/org/test/appwindow.ui")] +class TestWindow : Gtk.ApplicationWindow { + public TestWindow(Gtk.Application app) { + Object(application: app); + } +} + +class TestApp : Gtk.Application { + public override void activate() { + var window = new TestWindow(this); + + window.show_all(); + } +} + +int main(string[] args) { + var app = new TestApp(); + + return app.run(); +} diff --git a/demos/vala/resources/wscript b/demos/vala/resources/wscript new file mode 100644 index 0000000000..ca6ec6821d --- /dev/null +++ b/demos/vala/resources/wscript @@ -0,0 +1,32 @@ +#! /usr/bin/env python + +def configure(conf): + try: + conf.check_cfg(package='gtk+-3.0', uselib_store='GTK3', args='--cflags --libs') + conf.load('glib2') + except conf.errors.ConfigurationError: + pass + else: + conf.env.build_gtk3 = True + +def build(bld): + if not bld.env.build_gtk3: + return + + bld ( + features = 'c glib2', + use = 'GLIB GIO GOBJECT', + source = 'test.gresource.xml', + target = 'foo' + ) + + bld ( + features = 'c cprogram glib2', + use = 'GTK3 foo', + packages = 'gtk+-3.0', + source = 'test.vala', + vala_resources = 'test.gresource.xml', # adds --gresources /path/to/test.gresources.xml + target = 'compositewidget', + vala_target_glib = '2.38' + ) + diff --git a/demos/vala/src/vala-gtk-example.vala b/demos/vala/src/vala-gtk-example.vala deleted file mode 100644 index 61dd66cad4..0000000000 --- a/demos/vala/src/vala-gtk-example.vala +++ /dev/null @@ -1,40 +0,0 @@ -/* GTK+ Vala Sample Code */ -using GLib; -using Gtk; - -public void trace (string message) { - #if DEBUG - stdout.printf (message); - #endif -} - -public class Sample : Window { - construct { - title = "Sample Window"; - create_widgets (); - } - - public void create_widgets () { - destroy += Gtk.main_quit; - - var button = new Button.with_label ("Hello World"); - button.clicked += btn => { - title = btn.label; - }; - - add (button); - } - - static int main (string[] args) { - Gtk.init (ref args); - - trace ("testing vala conditional compilation\n"); - - var sample = new Sample (); - sample.show_all (); - - Gtk.main (); - return 0; - } -} - diff --git a/demos/vala/src/wscript_build b/demos/vala/src/wscript_build deleted file mode 100644 index c88254a81e..0000000000 --- a/demos/vala/src/wscript_build +++ /dev/null @@ -1,13 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Jaap Haitsma, 2008 -# Thomas Nagy, 2010 - -bld.program( - packages = 'gtk+-2.0', - target = 'vala-gtk-example', - uselib = 'GTK GLIB', - source = 'vala-gtk-example.vala', - vala_defines = ['DEBUG'] - ) - diff --git a/demos/vala/stlib/wscript_build b/demos/vala/stlib/wscript_build index 3b59b25321..bebc73491f 100644 --- a/demos/vala/stlib/wscript_build +++ b/demos/vala/stlib/wscript_build @@ -1,10 +1,18 @@ +#! /usr/bin/env python + +bld.stlib ( + target = 'static_lib1', + source = ['src/static_lib.vala'], +) + bld.stlib ( - target = 'statlic_lib', + target = 'static_lib2', source = ['src/static_lib.vala'], + vala_dir = 'meh', ) bld.program ( target = 'program', - use = 'statlic_lib', + use = 'static_lib1', source = 'program.vala' ) diff --git a/demos/vala/wscript b/demos/vala/wscript index 4324846063..3eb6a9fcb8 100644 --- a/demos/vala/wscript +++ b/demos/vala/wscript @@ -15,10 +15,11 @@ def options(opt): opt.load('vala') def configure(conf): + conf.env.VALA_MINVER = (0, 25, 0) conf.load('compiler_c vala') conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0', mandatory=1, args='--cflags --libs') - conf.check_cfg(package='gtk+-2.0', uselib_store='GTK', atleast_version='2.10.0', mandatory=1, args='--cflags --libs') + conf.recurse('resources') def build(bld): - bld.recurse('src shlib multi-file stlib') + bld.recurse('shlib multi-file stlib resources') diff --git a/demos/variants/wscript b/demos/variants/wscript index 47f963f70c..167dcbe01e 100644 --- a/demos/variants/wscript +++ b/demos/variants/wscript @@ -72,20 +72,20 @@ def init(ctx): cmd = name + '_' + x variant = x - def buildall(ctx): - import waflib.Options - for x in ('build_debug', 'build_release'): - waflib.Options.commands.insert(0, x) - ## if you work on "debug" 99% of the time, here is how to re-enable "waf build": #for y in (BuildContext, CleanContext, InstallContext, UninstallContext): # class tmp(y): # variant = 'debug' - - # you may also set 'win32/debug' instead of 'debug' (waf 1.6.9) + # you may also set 'win32/debug' instead of 'debug' # the commands will be "build_win32/debug" or "build_win32/release" # in this case you may want to modify Options.commands in this "init" function +# calling "waf buildall" will run "waf build_debug build_release" +def buildall(ctx): + import waflib.Options + for x in ('build_debug', 'build_release'): + waflib.Options.commands.insert(0, x) + # -------------------------- # or, if you want to memorize the default variant and just type "waf", # diff --git a/demos/wscript b/demos/wscript deleted file mode 100644 index f9caaffda9..0000000000 --- a/demos/wscript +++ /dev/null @@ -1,146 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# J. Carretero, 2010 (zougloub) -# Thomas Nagy, 2010 (ita) - -""" -https://launchpad.net/subunit/ -""" - - - -import sys, os - -if "uname" in dir(os): machine = os.uname()[1] -elif sys.platform == "win32": machine = os.environ["COMPUTERNAME"] -else: raise Exception("Unknown platform, cannot get machine name") - -from waflib import Logs, Errors - -# python 2.3 tends to hang for whatever reason :-/ -PYTHONS = "2.5 2.6 2.7 3.0 3.1 3.2 3.3 3.4".split() - -DIRS = ['c', 'python'] -#DIRS = [x for x in os.listdir('.') if os.path.isdir(x) and x not in ('variants', 'build', 'mac_app', 'precious')] - -def options(opt): - for d in opt.path.ant_glob(DIRS, excl=['build', 'variants'], src=False, dir=True): - if d.name[0] == '.' or d.name == 'variants' or d.name == 'build': - continue - - try: - opt.recurse(d.name) - except: - pass - # one sub-project uses autoconfig, but i do not want it here - from waflib import Configure - Configure.autoconfig = False - -def configure(conf): - - #Logs.info('Running action configure') # build farm - try: - sub = conf.find_file('subprocess.py', ['/usr/lib64/python', '/usr/lib/python', '/usr/local/lib64/python', '/usr/local/lib/python']) - except: - sub = '' - - curwaf = os.path.abspath(sys.argv[0]) - conf.exec_command('%s %s configure build --zip-type=gz --tools=doxygen,fluid,ocaml,swig,compiler_fc,fc_config,fc,fc_scan,g95,ifort,gfortran,batched_cc,%s --prelude='' && /bin/cp waf demos/' % (sys.executable, curwaf, sub), - cwd=conf.path.parent.abspath()) - - node = conf.path.find_resource('waf') - if not node: - conf.fatal('Could not find Waf') - #if conf.exec_command([node.abspath(), '--help'], shell=False, env={}, cwd=node.parent.abspath()): - # conf.fatal('the waf file cannot be executed') - conf.env.WAF = node.abspath() - - conf.in_msg += 1 - for d in conf.path.ant_glob(DIRS, excl=['build', 'variants', 'precious'], src=False, dir=True): - if d.name[0] == '.': - continue - - try: - conf.recurse(d.name) - except Exception: - node = conf.path.find_node('%s/build/config.log' % d.name) - if node: - Logs.info("-- BEGIN %s config.log --\n%s-- END %s config.log --" % (d.name, node.read(), d.name)) - try: - e = sys.exc_info()[1] - print(e) - print(e.stdout, e.stderr) - except Exception: - pass - else: - conf.env.append_value('CFG', [d.name]) - - print("The configurations enabled are %r" % conf.env.CFG) - - # now remove the cache folders and re-create them - conf.cmd_and_log('rm -rf .waf*') - for x in PYTHONS: - try: - conf.find_program('python'+x, var=x) - # unpacking the waf directory concurrently can lead to a race condition, we'll need to take care of this (thanks, build farm!) - conf.cmd_and_log(conf.env[x] + ['./waf', '--version'], env={}) - except Exception as e: - pass - else: - conf.env.append_value('PYTHONS', x) - - Logs.info("executing the build for folders %r and with pythons %r" % (conf.env.CFG, conf.env.PYTHONS)) - Logs.info("contents of config.log:") - Logs.info(conf.path.find_node('build/config.log').read()) - -def build(bld): - print('Note: call "waf installcheck" (the default build does not do anything)') - -from waflib.Build import BuildContext -class abc(BuildContext): - cmd = "installcheck" - fun = "installcheck" - -def installcheck(bld): - bld.jobs = 1 - #if bld.cmd == 'build': - # Logs.info('Running action build') # build farm - - #print('testsuite: waflib') - def waf_cmd(self): - cmd = self.env[self.generator.python] + [self.env.WAF, 'distclean', 'configure', 'build', 'clean', 'build', '-o', 'build' + self.generator.python] - cwd = self.generator.cwd - env = dict(os.environ) - env['WAFDIR'] = '' - env['WAFLOCK'] = '.lock-wscript' + self.generator.python # use a different build directory for each build - try: - bld.cmd_and_log(cmd, cwd=cwd, env=env, quiet=0, ) - except Errors.WafError as e: - e = sys.exc_info()[1] - s = "testsuite: %s\ntestsuite-xfail: %s [ %s \n %s ]\n" % (self.generator.name, self.generator.name, e.stderr, e.stdout) - Logs.info(s) - else: - s = "testsuite: %s\ntestsuite-success: %s\n" % (self.generator.name, self.generator.name) - Logs.info(s) - - for x in bld.env.PYTHONS: - for dirname in bld.env.CFG: - bld(rule = waf_cmd, - cwd = dirname, - always = 1, - python = x, - name = '%s_%s' % (dirname, x)) - - #if bld.cmd == 'build': - # Logs.info('BUILD STATUS: 0\nACTION PASSED: build') # build farm - # Logs.info('Running action test') # build farm - #Logs.info('testsuite: abc') - #def end(bld): - # Logs.info('testsuite-success: abc') - # Logs.info('TEST STATUS: 0\nACTION FAILED: test') - #bld.add_post_fun(end) - #elif bld.cmd == 'install': - # Logs.info('Running action install') # build farm - # Logs.info('INSTALL STATUS: 0\nACTION PASSED: install') # build farm - - diff --git a/docs/book/advbuild.txt b/docs/book/advbuild.txt deleted file mode 100644 index 4a71053a2d..0000000000 --- a/docs/book/advbuild.txt +++ /dev/null @@ -1,402 +0,0 @@ -== Advanced build definitions - -=== Custom commands - -==== Context inheritance - -An instance of the class _waflib.Context.Context_ is used by default for the custom commands. To provide a custom context object it is necessary to create a context subclass: - -// advbuild_subclass -[source,python] ---------------- -def configure(ctx): - print(type(ctx)) - -def foo(ctx): <1> - print(type(ctx)) - -def bar(ctx): - print(type(ctx)) - -from waflib.Context import Context - -class one(Context): - cmd = 'foo' <2> - -class two(Context): - cmd = 'tak' <3> - fun = 'bar' ---------------- - -<1> A custom command using the default context -<2> Bind a context class to the command _foo_ -<3> Declare a new command named _tak_, but set it to call the script function _bar_ - -The execution output will be: - -[source,shishell] ---------------- -$ waf configure foo bar tak -Setting top to : /tmp/advbuild_subclass -Setting out to : /tmp/advbuild_subclass/build - -'configure' finished successfully (0.008s) - -'foo' finished successfully (0.001s) - -'bar' finished successfully (0.001s) - -'tak' finished successfully (0.001s) ---------------- - -A typical application of custom context is subclassing the build context to use the configuration data loaded in *ctx.env*: - -[source,python] ---------------- -def configure(ctx): - ctx.env.FOO = 'some data' - -def build(ctx): - print('build command') - -def foo(ctx): - print(ctx.env.FOO) - -from waflib.Build import BuildContext -class one(BuildContext): - cmd = 'foo' - fun = 'foo' ---------------- - -The output will be the following: - -[source,shishell] ---------------- -$ waf configure foo -Setting top to : /tmp/advbuild_confdata -Setting out to : /tmp/advbuild_confdata/build -'configure' finished successfully (0.006s) -Waf: Entering directory `/disk/comp/waf/docs/book/examples/advbuild_confdata/build' -some data -Waf: Leaving directory `/disk/comp/waf/docs/book/examples/advbuild_confdata/build' -'foo' finished successfully (0.004s) ---------------- - -NOTE: The build commands are using this system: _waf install_ → _waflib.Build.InstallContext_, _waf step_ → _waflib.Build.StepContext_, etc - -==== Command composition - -To re-use commands that have context object of different base classes, insert them in the _command stack_: - -// advbuild_composition -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - pass - -def cleanbuild(ctx): - from waflib import Options - Options.commands = ['clean', 'build'] + Options.commands ---------------- - -This technique is useful for writing testcases. By executing 'waf test', the following script will configure a project, create source files in the source directory, build a program, modify the sources, and rebuild the program. In this case, the program must be rebuilt because a header (implicit dependency) has changed. - -[source,python] ---------------- -def options(ctx): - ctx.load('compiler_c') - -def configure(ctx): - ctx.load('compiler_c') - -def setup(ctx): - n = ctx.path.make_node('main.c') - n.write('#include "foo.h"\nint main() {return 0;}\n') - - global v - m = ctx.path.make_node('foo.h') - m.write('int k = %d;\n' % v) - v += 1 - -def build(ctx): - ctx.program(source='main.c', target='app') - -def test(ctx): - global v <1> - v = 12 - - import Options <2> - lst = ['configure', 'setup', 'build', 'setup', 'build'] - Options.commands = lst + Options.commands ---------------- - -<1> A global variable may be used to share data between commands deriving from different classes -<2> The test command is used to add more commands - -The following output will be observed: - -[source,shishell] ---------------- -$ waf test -'test' finished successfully (0.000s) -Setting top to : /tmp/advbuild_testcase -Setting out to : /tmp/advbuild_testcase/build -Checking for 'gcc' (c compiler) : ok -'configure' finished successfully (0.092s) -'setup' finished successfully (0.001s) -Waf: Entering directory `/tmp/advbuild_testcase/build' -[1/2] c: main.c -> build/main.c.0.o -[2/2] cprogram: build/main.c.0.o -> build/app -Waf: Leaving directory `/tmp/advbuild_testcase/build' -'build' finished successfully (0.137s) -'setup' finished successfully (0.002s) -Waf: Entering directory `/tmp/advbuild_testcase/build' -[1/2] c: main.c -> build/main.c.0.o -[2/2] cprogram: build/main.c.0.o -> build/app -Waf: Leaving directory `/tmp/advbuild_testcase/build' -'build' finished successfully (0.125s) ---------------- - -==== Binding a command from a Waf tool - -When the top-level wscript is read, it is converted into a python module and kept in memory. Commands may be added dynamically by injecting the desired function into that module. We will now show how to bind a simple command from a Waf tool: - -// advbuild_cmdtool -[source,python] ---------------- -top = '.' -out = 'build' - -def options(opt): - opt.load('some_tool', tooldir='.') - -def configure(conf): - pass ---------------- - -Waf tools are loaded once for the configuration and for the build. To ensure that the tool is always enabled, it is mandatory to load its options, even if the tool does not actually provide options. Our tool 'some_tool.py', located next to the 'wscript' file, will contain the following code: - -[source,python] ---------------- -from waflib import Context - -def cnt(ctx): <1> - """do something""" - print('just a test') - -Context.g_module.__dict__['cnt'] = cnt <2> ---------------- - -<1> The function to bind must accept a `Context` object as first argument -<2> The main wscript file of the project is loaded as a python module and stored as `Context.g_module` - -The execution output will be the following. - -[source,shishell] ---------------- -$ waf configure cnt -Setting top to : /tmp/examples/advbuild_cmdtool -Setting out to : /tmp/advbuild_cmdtool/build -'configure' finished successfully (0.006s) -just a test -'cnt' finished successfully (0.001s) ---------------- - -=== Custom build outputs - -==== Multiple configurations - -The _WAFLOCK_ environment variable is used to control the configuration lock and to point at the default build directory. Observe the results on the following project: - -// advbuild_waflock -[source,python] ---------------- -def configure(conf): - pass - -def build(bld): - bld(rule='touch ${TGT}', target='foo.txt') ---------------- - -We will change the _WAFLOCK_ variable in the execution: - -[source,shishell] ---------------- -$ export WAFLOCK=.lock-wafdebug <1> - -$ waf -Waf: Entering directory `/tmp/advbuild_waflock/debug' -[1/1] foo.txt: -> debug//foo.txt <2> -Waf: Leaving directory `/tmp/advbuild_waflock/debug' -'build' finished successfully (0.012s) - -$ export WAFLOCK=.lock-wafrelease - -$ waf distclean configure -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.176s) - -$ waf -Waf: Entering directory `/tmp/advbuild_waflock/release' <3> -[1/1] foo.txt: -> release/foo.txt -Waf: Leaving directory `/tmp/advbuild_waflock/release' -'build' finished successfully (0.034s) - -$ tree -a -. -|-- .lock-debug <4> -|-- .lock-release -|-- debug -| |-- .wafpickle-7 -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| `-- foo.txt -|-- release -| |-- .wafpickle-7 -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| `-- foo.txt -`-- wscript ---------------- - -<1> The lock file points at the configuration of the project in use and at the build directory to use -<2> The files are output in the build directory +debug+ -<3> The configuration _release_ is used with a different lock file and a different build directory. -<4> The contents of the project directory contain the two lock files and the two build folders. - -The lock file may also be changed from the code by changing the appropriate variable in the waf scripts: - -[source,python] ---------------- -from waflib import Options -Options.lockfile = '.lock-wafname' ---------------- - -NOTE: The output directory pointed at by the waf lock file only has effect when not given in the waf script - -==== Changing the output directory - -===== Variant builds - -In the previous section, two different configurations were used for similar builds. We will now show how to inherit the same configuration by two different builds, and how to output the targets in different folders. Let's start with the project file: - -// advbuild_variant -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - ctx(rule='touch ${TGT}', target=ctx.cmd + '.txt') <1> - -from waflib.Build import BuildContext -class debug(BuildContext): <2> - cmd = 'debug' - variant = 'debug' <3> ---------------- - -<1> The command being called is _self.cmd_ -<2> Create the _debug_ command inheriting the build context -<3> Declare a folder for targets of the _debug_ command - -This project declares two different builds _build_ and _debug_. Let's examine the execution output: - -[source,shishell] ---------------- -waf configure build debug -Setting top to : /tmp/advbuild_variant -Setting out to : /tmp/advbuild_variant/build -'configure' finished successfully (0.007s) -Waf: Entering directory `/tmp/advbuild_variant/build' -[1/1] build.txt: -> build/build.txt -Waf: Leaving directory `/tmp/advbuild_variant/build' -'build' finished successfully (0.020s) -Waf: Entering directory `/tmp/build_variant/build/debug' -[1/1] debug.txt: -> build/debug/debug.txt <1> -Waf: Leaving directory `/tmp/advbuild_variant/build/debug' -'debug' finished successfully (0.021s) - -$ tree -. -|-- build -| |-- build.txt <2> -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| `-- debug -| `-- debug.txt <3> -`-- wscript ---------------- - -<1> Commands are executed from _build/variant_ -<2> The default _build_ command does not have any variant -<3> The target _debug_ is under the variant directory in the build directory - -===== Configuration sets for variants - -The variants may require different configuration sets created during the configuration. Here is an example: - -// advbuild_variant -[source,python] ---------------- -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.setenv('debug') <1> - conf.load('compiler_c') - conf.env.CFLAGS = ['-g'] <2> - - conf.setenv('release') - conf.load('compiler_c') - conf.env.CFLAGS = ['-O2'] - -def build(bld): - if not bld.variant: <3> - bld.fatal('call "waf build_debug" or "waf build_release", and try "waf --help"') - bld.program(source='main.c', target='app', includes='.') <4> - -from waflib.Build import BuildContext, CleanContext, \ - InstallContext, UninstallContext - -for x in 'debug release'.split(): - for y in (BuildContext, CleanContext, InstallContext, UninstallContext): - name = y.__name__.replace('Context','').lower() - class tmp(y): <5> - cmd = name + '_' + x - variant = x ---------------- - -<1> Create a new configuration set to be returned by 'conf.env', and stored in 'c4che/debug_cache.py' -<2> Modify some data in the configuration set -<3> Make sure a variant is set, this will disable the normal commands 'build', 'clean' and 'install' -<4> 'bld.env' will load the configuration set of the appropriate variant ('debug_cache.py' when in 'debug') -<5> Create new commands such as 'clean_debug' or 'install_debug' (the class name does not matter) - -The execution output will be similar to the following: - -[source,shishell] ---------------- -$ waf clean_debug build_debug clean_release build_release -'clean_debug' finished successfully (0.005s) -Waf: Entering directory `/tmp/examples/advbuild_variant_env/build/debug' -[1/2] c: main.c -> build/debug/main.c.0.o -[2/2] cprogram: build/debug/main.c.0.o -> build/debug/app -Waf: Leaving directory `/tmp/examples/advbuild_variant_env/build/debug' -'build_debug' finished successfully (0.051s) -'clean_release' finished successfully (0.003s) -Waf: Entering directory `/tmp/examples/advbuild_variant_env/build/release' -[1/2] c: main.c -> build/release/main.c.0.o -[2/2] cprogram: build/release/main.c.0.o -> build/release/app -Waf: Leaving directory `/tmp/examples/advbuild_variant_env/build/release' -'build_release' finished successfully (0.052s) ---------------- - diff --git a/docs/book/architecture.txt b/docs/book/architecture.txt deleted file mode 100644 index 1a9306e63c..0000000000 --- a/docs/book/architecture.txt +++ /dev/null @@ -1,337 +0,0 @@ - -== Waf architecture overview - -This chapter provides describes the Waf library and the interaction between the components. - -=== Modules and classes - -==== Core modules - -Waf consists of the following modules which constitute the core library. They are located in the directory `waflib/`. The modules located under `waflib/Tools` and `waflib/extras` are extensions which are not part of the Waf core. - -.List of core modules -[options="header", cols="1,6"] -|================= -|Module | Role -|Build | Defines the build context classes (build, clean, install, uninstall), which holds the data for one build (paths, configuration data) -|Configure | Contains the configuration context class, which is used for launching configuration tests and writing the configuration settings for the build -|ConfigSet | Contains a dictionary class which supports a lightweight copy scheme and provides persistence services -|Context | Contains the base class for all waf commands (context parameters of the Waf commands) -|Errors | Exceptions used in the Waf code -|Logs | Loggging system wrapping the calls to the python logging module -|Node | Contains the file system representation class -|Options | Provides a custom command-line option processing system based on optparse -|Runner | Contains the task execution system (thread-based producer-consumer) -|Scripting | Constitutes the entry point of the Waf application, executes the user commands such as build, configuration and installation -|TaskGen | Provides the task generator system, and its extension system based on method addition -|Task | Contains the task class definitions, and factory functions for creating new task classes -|Utils | Contains support functions and classes used by other Waf modules -|================= - -Not all core modules are required for using Waf as a library. The dependencies between the modules are represented on the following diagram. For example, the module 'Node' requires both modules 'Utils' and 'Errors'. Conversely, if the module 'Build' is used alone, then the modules 'Scripting' and 'Configure' can be removed safely. - -image::core{PIC}["Module dependencies"{backend@docbook:,height=400:},align="center"] - -==== Context classes - -User commands, such as 'configure' or 'build', are represented by classes derived from 'waflib.Context.Context'. When a command does not have a class associated, the base class 'waflib.Context.Context' is used instead. - -The method 'execute' is the start point for a context execution, it often calls the method 'recurse' to start reading the user scripts and execute the functions referenced by the 'fun' class attribute. - -The command is associated to a context class by the class attribute 'cmd' set on the class. Context subclasses are added in 'waflib.Context.classes' by the metaclass 'store_context' and loaded through the function 'waflib.Context.create_context'. The classes defined last will replace existing commands. - -As an example, the following context class will define or override the 'configure' command. When calling 'waf configure', the function 'foo' will be called from wscript files: - -[source,python] ---------------- -from waflib.Context import Context -class somename(Context): - cmd = 'configure' - fun = 'foo' ---------------- - -image::classes{PIC}["Context classes"{backend@docbook:,width=850:},align="center"] - -==== Build classes - -The class 'waflib.Build.BuildContext' and its subclasses such as 'waflib.Build.InstallContext' or 'waflib.Build.StepContext' have task generators created when reading the user scripts. The task generators will usually have task instances, depending on the operations performed after all task generators have been processed. - -The 'ConfigSet' instances are copied from the build context to the tasks ('waflib.ConfigSet.ConfigSet.derive') to propagate values such as configuration flags. A copy-on-write is performed through most methods of that class (append_value, prepend_value, append_unique). - -The 'Parallel' object encapsulates the iteration over all tasks of the build context, and delegates the execution to thread objects (producer-consumer). - -The overall structure is represented on the following diagram: - -image::classes_build{PIC}["Build classes"{backend@docbook:,width=1100:},align="center"] - -=== Context objects - -==== Context commands and recursion - -The context commands are designed to be as independent as possible, and may be executed concurrently. The main application is the execution of small builds as part of configuration tests. For example, the method 'waflib.Configure.run_build' creates a private build context internally to perform the tests. -Here is an example of a build that creates and executes simple configuration contexts concurrently: - -// architecture_link -[source,python] ---------------- -import os -from waflib.Configure import conf, ConfigurationContext -from waflib import Task, Build, Logs - -def options(ctx): - ctx.load('compiler_c') - -def configure(ctx): - ctx.load('compiler_c') - -def build(ctx): - ctx(rule=run_test, always=True, header_name='stdio.h') <1> - ctx(rule=run_test, always=True, header_name='unistd.h') - -def run_test(self): - top = self.generator.bld.srcnode.abspath() - out = self.generator.bld.bldnode.abspath() - - ctx = ConfigurationContext(top_dir=top, out_dir=out) <2> - ctx.init_dirs() <3> - - ctx.in_msg = 1 <4> - ctx.msg('test') <5> - - header = self.generator.header_name - logfile = self.generator.path.get_bld().abspath() + os.sep \ - + header + '.log' - ctx.logger = Logs.make_logger(logfile, header) <6> - - ctx.env = self.env.derive() <7> - ctx.check(header_name=header) <8> ---------------- - -<1> Create task generators which will run the method 'run_test' method defined below -<2> Create a new configuration context as part of a 'Task.run' call -<3> Initialize ctx.srcnode and ctx.bldnode (build and configuration contexts only) -<4> Set the internal counter for the context methods 'msg', 'start_msg' and 'end_msg' -<5> The console output is disabled (non-zero counter value to disable nested messages) -<6> Each context may have a logger to redirect the error messages -<7> Initialize the default environment to a copy of the task one -<8> Perform a configuration check - -After executing 'waf build', the project folder will contain the new log files: - -[source,shishell] ---------------- -$ tree -. -|-- build -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| |-- stdio.h.log -| `-- unistd.h.log -`-- wscript ---------------- - -A few measures are set to ensure that the contexts can be executed concurrently: - -. Context objects may use different loggers derived from the 'waflib.Logs' module. -. Each context object is associated to a private subclass of 'waflib.Node.Node' to ensure that the node objects are unique. To pickle Node objects, it is important to prevent concurrent access by using the lock object 'waflib.Node.pickle_lock'. - -==== Build context and persistence - -The build context holds all the information necessary for a build. To accelerate the start-up, a part of the information is stored and loaded between the runs. The persistent attributes are the following: - -.Persistent attributes -[options="header", cols="1,3,3"] -|================= -|Attribute | Description | Type -|root | Node representing the root of the file system | Node -|node_deps | Implicit dependencies | dict mapping Node to signatures -|raw_deps | Implicit file dependencies which could not be resolved | dict mapping Node ids to any serializable type -|task_sigs | Signature of the tasks executed | dict mapping a Task computed uid to a hash -|================= - - -=== Support for c-like languages - -==== Compiled tasks and link tasks - -The tool _waflib.Tools.ccroot_ provides a system for creating object files and linking them into a single final file. The method _waflib.Tools.ccroot.apply_link_ is called after the method _waflib.TaskGen.process_source_ to create the link task. In pseudocode: - -[source,shishell] ---------------- -call the method process_source: - for each source file foo.ext: - process the file by extension - if the method create_compiled_task is used: - create a new task - set the output file name to be foo.ext.o - add the task to the list self.compiled_tasks - -call the method apply_link - for each name N in self.features: - find a class named N: - if the class N derives from 'waflib.Tools.ccroot.link_task': - create a task of that class, assign it to self.link_task - set the link_task inputs from self.compiled_tasks - set the link_task output name to be env.N_PATTERN % self.target - stop ---------------- - -This system is used for _assembly_, _C_, _C++_, _D_ and _fortran_ by default. Note that the method _apply_link_ is supposed to be called after the method _process_source_. - -We will now demonstrate how to support the following mini language: - -[source,shishell] ---------------- -cp: .ext -> .o -cat: *.o -> .exe ---------------- - -Here is the project file: - -// architecture_link -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - ctx(features='mylink', source='foo.ext faa.ext', target='bingo') - -from waflib.Task import Task -from waflib.TaskGen import feature, extension, after_method -from waflib.Tools import ccroot <1> - -@after_method('process_source') -@feature('mylink') -def call_apply_link(self): <2> - self.apply_link() - -class mylink(ccroot.link_task): <3> - run_str = 'cat ${SRC} > ${TGT}' - -class ext2o(Task): - run_str = 'cp ${SRC} ${TGT}' - -@extension('.ext') -def process_ext(self, node): - self.create_compiled_task('ext2o', node) <4> ---------------- - -<1> This import will bind the methods such as _create_compiled_task_ and _apply_link_task_ -<2> An alternate definition would be calling _waflib.TaskGen.feats[`mylink'] = [`apply_link']_ -<3> The link task must be a subclass of another link task class -<4> Calling the method _create_compiled_task_ - -The execution outputs will be the following: -// why the extra space after "setting top to"? -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.005s) -Setting top to : /tmp/architecture_link -Setting out to : /tmp/architecture_link/build -'configure' finished successfully (0.008s) -Waf: Entering directory `/tmp/architecture_link/build' -[1/3] ext2o: foo.ext -> build/foo.ext.0.o -12:50:25 runner ['cp', '../foo.ext', 'foo.ext.0.o'] -[2/3] ext2o: faa.ext -> build/faa.ext.0.o -12:50:25 runner ['cp', '../faa.ext', 'faa.ext.0.o'] -[3/3] mylink: build/foo.ext.0.o build/faa.ext.0.o -> build/bingo -12:50:25 runner 'cat foo.ext.0.o faa.ext.0.o > bingo' -Waf: Leaving directory `/tmp/architecture_link/build' -'build' finished successfully (0.041s) ---------------- - -NOTE: Task generator instances have at most one link task instance - - - - -=== Writing re-usable Waf tools - -==== Adding a waf tool - -===== Importing the code - -The intent of the Waf tools is to promote high cohesion by moving all conceptually related methods and classes into separate files, hidden from the Waf core, and as independent from each other as possible. - -Custom Waf tools can be left in the projects, added to a custom waf file through the 'waflib/extras' folder, or used through 'sys.path' changes. - -The tools can import other tools directly through the 'import' keyword. The scripts however should always import the tools to the 'ctx.load' to limit the coupling. Compare for example: - -[source,python] ---------------- -def configure(ctx): - from waflib.extras.foo import method1 - method1(ctx) ---------------- - -and: - -[source,python] ---------------- -def configure(ctx): - ctx.load('foo') - ctx.method1() ---------------- - -The second version should be preferred, as it makes fewer assumptions on whether 'method1' comes from the module 'foo' or not, and on where the module 'foo' is located. - -===== Naming convention for C/C++/Fortran - -The tools 'compiler_c', 'compiler_cxx' and 'compiler_fc' use other waf tools to detect the presense of particular compilers. They provide a particular naming convention to give a chance to new tools to register themselves automatically and save the import in user scripts. The tools having names beginning by 'c_', 'cxx_' and 'fc_' will be tested. - -The registration code will be similar to the following: - -[source,python] ---------------- -from waflib.Tools.compiler_X import X_compiler -X_compiler['platform'].append('module_name') ---------------- - -where *X* represents the type of compiler ('c', 'cxx' or 'fc'), *platform* is the platform on which the detection should take place (linux, win32, etc), and *module_name* is the name of the tool to use. - -==== Command methods - -===== Subclassing is only for commands - -As a general rule, subclasses of 'waflib.Context.Context' are created only when a new user command is necessary. This is the case for example when a command for a specific variant (output folder) is required, or to provide a new behaviour. When this happens, the class methods 'recurse', 'execute' or the class attributes 'cmd', 'fun' are usually overridden. - -NOTE: If there is no new command needed, do not use subclassing. - -===== Domain-specific methods are convenient for the end users - -Although the Waf framework promotes the most flexible way of declaring tasks through task generators, it is often more convenient to declare domain-specific wrappers in large projects. For example, the samba project provides a function used as: - -[source,python] ---------------- -bld.SAMBA_SUBSYSTEM('NDR_NBT_BUF', - source = 'nbtname.c', - deps = 'talloc', - autoproto = 'nbtname.h' - ) ---------------- - -===== How to bind new methods - -New methods are commonly bound to the build context or to the configuration context by using the '@conf' decorator: - -[source,python] ---------------- -from waflib.Configure import conf - -@conf -def enterprise_program(self, *k, **kw): - kw['features'] = 'c cprogram debug_tasks' - return self(*k, **kw) - -def build(bld): - # no feature line - bld.enterprise_program(source='main.c', target='app') ---------------- - -The methods should always be bound in this manner or manually, as subclassing may create conflicts between tools written for different purposes. - - diff --git a/docs/book/asciidoc-dblatex.sty b/docs/book/asciidoc-dblatex.sty deleted file mode 100644 index 9e6d2f2296..0000000000 --- a/docs/book/asciidoc-dblatex.sty +++ /dev/null @@ -1,31 +0,0 @@ -%% -%% This style is derived from the docbook one. -%% -\NeedsTeXFormat{LaTeX2e} -\ProvidesPackage{asciidoc}[2008/06/05 AsciiDoc DocBook Style] -%% Just use the original package and pass the options. -\RequirePackageWithOptions{docbook} - -% Sidebar is a boxed minipage that can contain verbatim. -% Changed shadow box to double box. -\renewenvironment{sidebar}[1][0.95\textwidth]{ - \hspace{0mm}\newline% - \noindent\begin{Sbox}\begin{minipage}{#1}% - \setlength\parskip{\medskipamount}% -}{ - \end{minipage}\end{Sbox}\doublebox{\TheSbox}% -} - -% For DocBook literallayout elements, see `./dblatex/dblatex-readme.txt`. -\usepackage{alltt} - -\usepackage{color} -\usepackage{listings} -\definecolor{gray}{gray}{0.5} -\definecolor{plum}{rgb}{0.55078125,0.09765625,0.55859375} -\lstset{commentstyle=\color{plum}} -\lstdefinelanguage{shishell} { - morekeywords={}, - sensitive=false, - morecomment=[l]{\$} -} diff --git a/docs/book/asciidoc-dblatex.xsl b/docs/book/asciidoc-dblatex.xsl deleted file mode 100644 index 77b595573b..0000000000 --- a/docs/book/asciidoc-dblatex.xsl +++ /dev/null @@ -1,74 +0,0 @@ - - - - - - colorlinks,linkcolor=blue,pdfstartview=FitH - 0 - - 1 - 0 - 3 - 0 - 0 - 0 - - - - 1 - - - - \begin{alltt} - \normalfont{} - - \end{alltt} - - - - - \pagebreak[4] - - - - - - \newline - - - - - \begin{center} - \line(1,0){444} - \end{center} - - - - diff --git a/docs/book/build.txt b/docs/book/build.txt deleted file mode 100644 index 973c9c0bab..0000000000 --- a/docs/book/build.txt +++ /dev/null @@ -1,389 +0,0 @@ -== Builds - -We will now provide a detailed description of the build phase, which is used for processing the build targets. - -=== Essential build concepts - -==== Build order and dependencies - -To illustrate the various concepts that are part of the build process, we are now going to use a new example. -The files +foo.txt+ and +bar.txt+ will be created by copying the file +wscript+, and the file +foobar.txt+ will be created from the concatenation of the generated files. Here is a summary: footnote:[It is actually considered a best practice to avoid copying files. When this is required, consider installing files or re-using the examples provided under the folder `demos/subst` of the source ditribution.] - -[source,shishell] ---------------- -cp: wscript -> foo.txt -cp: wscript -> bar.txt -cat: foo.txt, bar.txt -> foobar.txt --------------- - -Each of the three lines represents a command to execute. While the _cp_ commands may be executed in any order or even in parallel, the _cat_ command may only be executed after all the others are done. The constraints on *the build order* are represented on the following http://en.wikipedia.org/wiki/Directed_acyclic_graph[Directed acyclic graph]: - -image::dag_tasks{PIC}["Task representation of the same build"{backend@docbook:,width=260:},align="center"] - -When the +wscript+ input file changes, the +foo.txt+ output file has to be created once again. The file +foo.txt+ is said to depend on the +wscript+ file. The *file dependencies* can be represented by a Direct acyclic graph too: - -image::dag_nodes{PIC}["File dependencies on a simple build"{backend@docbook:,width=120:},align="center"] - -Building a project consists in executing the commands according to a schedule which will respect these constraints. Faster build will be obtained when commands are executed in parallel (by using the build order), and when commands can be skipped (by using the dependencies). - -In Waf, the commands are represented by *task objects*. The dependencies are used by the task classes, and may be file-based or abstract to enforce particular constraints. - -==== Direct task declaration - -We will now represent the build from the previous section by declaring the tasks directly in the build section: - -// build_manual_tasks -[source,python] ---------------- -def configure(ctx): - pass - -from waflib.Task import Task -class cp(Task): <1> - def run(self): <2> - return self.exec_command('cp %s %s' % ( - self.inputs[0].abspath(), <3> - self.outputs[0].abspath() - ) - ) - -class cat(Task): - def run(self): - return self.exec_command('cat %s %s > %s' % ( - self.inputs[0].abspath(), - self.inputs[1].abspath(), - self.outputs[0].abspath() - ) - ) - -def build(ctx): - - cp_1 = cp(env=ctx.env) <4> - cp_1.set_inputs(ctx.path.find_resource('wscript')) <5> - cp_1.set_outputs(ctx.path.find_or_declare('foo.txt')) - ctx.add_to_group(cp_1) <6> - - cp_2 = cp(env=ctx.env) - cp_2.set_inputs(ctx.path.find_resource('wscript')) - cp_2.set_outputs(ctx.path.find_or_declare('bar.txt')) - ctx.add_to_group(cp_2) - - cat_1 = cat(env=ctx.env) - cat_1.set_inputs(cp_1.outputs + cp_2.outputs) - cat_1.set_outputs(ctx.path.find_or_declare('foobar.txt')) - ctx.add_to_group(cat_1) ---------------- - -<1> Task class declaration -<2> Waf tasks have a method named *run* to generate the targets -<3> Instances of _waflib.Task.Task_ have input and output objects representing the files to use (Node objects) -<4> Create a new task instance manually -<5> Set input and output files represented as _waflib.Node.Node_ objects -<6> Add the task to the build context for execution (but do not execute them immediately) - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf clean build <1> -'clean' finished successfully (0.003s) -Waf: Entering directory `/tmp/build_manual_tasks/build' -[1/3] cp: wscript -> build/foo.txt -[2/3] cp: wscript -> build/bar.txt -[3/3] cat: build/foo.txt build/bar.txt -> build/foobar.txt -Waf: Leaving directory `/tmp/build_manual_tasks/build' -'build' finished successfully (0.047s) - -$ waf build <2> -Waf: Entering directory `/tmp/build_manual_tasks/build' -Waf: Leaving directory `/tmp/build_manual_tasks/build' -'build' finished successfully (0.007s) - -$ echo " " >> wscript <3> - -$ waf build -Waf: Entering directory `/tmp/build_manual_tasks/build' -[1/3] cp: wscript -> build/foo.txt <4> -[2/3] cp: wscript -> build/bar.txt -[3/3] cat: build/foo.txt build/bar.txt -> build/foobar.txt -Waf: Leaving directory `/tmp/build_manual_tasks/build' -'build' finished successfully (0.043s) ---------------- - -<1> The tasks are not executed in the _clean_ command -<2> The build keeps track of the files that were generated to avoid generating them again -<3> Modify one of the source files -<4> Rebuild according to the dependency graph - -Please remember: - -. The execution order was *computed automatically*, by using the file inputs and outputs set on the task instances -. The dependencies were *computed automatically* (the files were rebuilt when necessary), by using the node objects (hashes of the file contents were stored between the builds and then compared) -. The tasks that have no order constraints are executed in parallel by default - -==== Task encapsulation by task generators - -Declaring the tasks directly is tedious and results in lengthy scripts. Feature-wise, the following is equivalent to the previous example: - -// build_task_gen -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='foo.txt') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') - ctx(rule='cat ${SRC} > ${TGT}', source='foo.txt bar.txt', target='foobar.txt') ---------------- - -The *ctx(...)* call is a shortcut to the class _waflib.TaskGen.task_gen_, instances of this class are called *task generator objects*. The task generators are lazy containers and will only create the tasks and the task classes when they are actually needed: - -// build_lazy_tg -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - tg = ctx(rule='touch ${TGT}', target='foo') - print(type(tg)) - print(tg.tasks) - tg.post() - print(tg.tasks) - print(type(tg.tasks[0])) ---------------- - -Here is the output: - -[source,shishell] ---------------- -waf configure build -Setting top to : /tmp/build_lazy_tg -Setting out to : /tmp/build_lazy_tg/build -'configure' finished successfully (0.204s) -Waf: Entering directory `/tmp/build_lazy_tg/build' - <1> -[] <2> -[{task: foo -> foo}] <3> - <4> -[1/1] foo: -> build/foo -Waf: Leaving directory `/tmp/build_lazy_tg/build' -'build' finished successfully (0.023s) ---------------- - -<1> Task generator type -<2> The tasks created are stored in the list _tasks_ (0..n tasks may be added) -<3> Tasks are created after calling the method post() - it is usually called automatically internally -<4> A new task class was created dynamically for the target +foo+ - -==== Overview of the build phase - -A high level overview of the build process is represented on the following diagram: - -image::build_overview{PIC}["Overview of the build phase"{backend@docbook:,width=450:},align="center"] - -NOTE: The tasks are all created before any of them is executed. New tasks may be created after the build has started, but the dependencies have to be set by using low-level apis. - -=== More build options - -Although any operation can be executed as part of a task, a few scenarios are typical and it makes sense to provide convenience functions for them. - -==== Executing specific routines before or after the build - -User functions may be bound to be executed at two key moments during the build command (callbacks): - -. immediately before the build starts (bld.add_pre_fun) -. immediately after the build is completed successfully (bld.add_post_fun) - -Here is how to execute a test after the build is finished: - -// build_pre_post -[source,python] ---------------- -top = '.' -out = 'build' - -def options(ctx): - ctx.add_option('--exe', action='store_true', default=False, - help='execute the program after it is built') - -def configure(ctx): - pass - -def pre(ctx): <1> - print('before the build is started') - -def post(ctx): - print('after the build is complete') - if ctx.cmd == 'install': <2> - if ctx.options.exe: <3> - ctx.exec_command('/sbin/ldconfig') <4> - -def build(ctx): - ctx.add_pre_fun(pre) <5> - ctx.add_post_fun(post) ---------------- - -<1> The callbacks take the build context as unique parameter 'ctx' -<2> Access the command type -<3> Access to the command-line options -<4> A common scenario is to call ldconfig after the files are installed. -<5> Scheduling the functions for later execution. Python functions are objects too. - -Upon execution, the following output will be produced: - -[source,shishell] ---------------- -$ waf distclean configure build install --exe -'distclean' finished successfully (0.005s) -'configure' finished successfully (0.011s) -Waf: Entering directory `/tmp/build_pre_post/build' -before the build is started <1> -Waf: Leaving directory `/tmp/build_pre_post/build' -after the build is complete <2> -'build' finished successfully (0.004s) -Waf: Entering directory `/tmp/build_pre_post/build' -before the build is started -Waf: Leaving directory `/tmp/build_pre_post/build' -after the build is complete -/sbin/ldconfig: Can't create temporary cache file /etc/ld.so.cache~: Permission denied <3> -'install' finished successfully (15.730s) ---------------- - -<1> output of the function bound by 'bld.add_pre_fun' -<2> output of the function bound by 'bld.add_post_fun' -<3> execution at installation time - - -==== Installing files - -Three build context methods are provided for installing files created during or after the build: - -. install_files: install several files in a folder -. install_as: install a target with a different name -. symlink_as: create a symbolic link on the platforms that support it - -[source,python] ---------------- -def build(bld): - bld.install_files('${PREFIX}/include', ['a1.h', 'a2.h']) <1> - bld.install_as('${PREFIX}/dir/bar.png', 'foo.png') <2> - bld.symlink_as('${PREFIX}/lib/libfoo.so.1', 'libfoo.so.1.2.3') <3> - - env_foo = bld.env.derive() - env_foo.PREFIX = '/opt' - bld.install_as('${PREFIX}/dir/test.png', 'foo.png', env=env_foo) <4> - - start_dir = bld.path.find_dir('src/bar') - bld.install_files('${PREFIX}/share', ['foo/a1.h'], - cwd=start_dir, relative_trick=True) <5> - - bld.install_files('${PREFIX}/share', start_dir.ant_glob('**/*.png'), <6> - cwd=start_dir, relative_trick=True) ---------------- - -<1> Install various files in the target destination -<2> Install one file, changing its name -<3> Create a symbolic link -<4> Overridding the configuration set ('env' is optional in the three methods install_files, install_as and symlink_as) -<5> Install src/bar/foo/a1.h as seen from the current script into '$\{PREFIX}/share/foo/a1.h' -<6> Install the png files recursively, preserving the folder structure read from src/bar/ - -NOTE: the methods _install_files_, _install_as_ and _symlink_as_ will do something only during _waf install_ or _waf uninstall_, they have no effect in other build commands - -==== Listing the task generators and forcing specific task generators - -The _list_ command is used to display the task generators that are declared: - -// build_list -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - pass - -def build(ctx): - ctx(source='wscript', target='foo.txt', rule='cp ${SRC} ${TGT}') - ctx(target='bar.txt', rule='touch ${TGT}', name='bar') ---------------- - -By default, the name of the task generator is computed from the _target_ attribute: - -[source,shishell] ---------------- -$ waf configure list -'configure' finished successfully (0.005s) -foo.txt -bar -'list' finished successfully (0.008s) ---------------- - -The main usage of the name values is to force a partial build with the _--targets_ option. Compare the following: - -[source,shishell] ---------------- -$ waf clean build -'clean' finished successfully (0.003s) -Waf: Entering directory `/tmp/build_list/build' -[1/2] foo.txt: wscript -> build/foo.txt -[2/2] bar: -> build/bar.txt -Waf: Leaving directory `/tmp/build_list/build' -'build' finished successfully (0.028s) - -$ waf clean build --targets=foo.txt -'clean' finished successfully (0.003s) -Waf: Entering directory `/tmp/build_list/build' -[1/1] foo.txt: wscript -> build/foo.txt -Waf: Leaving directory `/tmp/build_list/build' -'build' finished successfully (0.022s) ---------------- - -==== Execution step by step for debugging (the _step_ command) - -The _step_ is used to execute specific tasks and to return the exit status and any error message. It is particularly useful for debugging: - -[source,shishell] ---------------- -waf step --files=test_shlib.c,test_staticlib.c -Waf: Entering directory `/tmp/demos/c/build' -c: shlib/test_shlib.c -> build/shlib/test_shlib.c.1.o - -> 0 -cshlib: build/shlib/test_shlib.c.1.o -> build/shlib/libmy_shared_lib.so - -> 0 -c: stlib/test_staticlib.c -> build/stlib/test_staticlib.c.1.o - -> 0 -cstlib: build/stlib/test_staticlib.c.1.o -> build/stlib/libmy_static_lib.a - -> 0 -Waf: Leaving directory `/tmp/demos/c/build' -'step' finished successfully (0.201s) ---------------- - -In this case the +.so+ files were also rebuilt. Since the files attribute is interpreted as a comma-separated list of regular expressions, the following will produce a different output: - -[source,shishell] ---------------- -$ waf step --files=test_shlib.c$ -Waf: Entering directory `/tmp/demos/c/build' -c: shlib/test_shlib.c -> build/shlib/test_shlib.c.1.o - -> 0 -Waf: Leaving directory `/tmp/demos/c/build' -'step' finished successfully (0.083s) ---------------- - -Finally, the tasks to execute may be prefixed by 'in:' or 'out:' to specify if it is a source or a target file: - -[source,shishell] ---------------- -$ waf step --files=out:build/shlib/test_shlib.c.1.o -Waf: Entering directory `/tmp/demos/c/build' -cc: shlib/test_shlib.c -> build/shlib/test_shlib.c.1.o - -> 0 -Waf: Leaving directory `/tmp/demos/c/build' -'step' finished successfully (0.091s) ---------------- - -NOTE: when using _waf step_, all tasks are executed sequentially, even if some of them return a non-zero exit status - diff --git a/docs/book/build_overview.semd b/docs/book/build_overview.semd deleted file mode 100644 index 4759a5e45d..0000000000 Binary files a/docs/book/build_overview.semd and /dev/null differ diff --git a/docs/book/callouts/1.png b/docs/book/callouts/1.png deleted file mode 100644 index 7d473430b7..0000000000 Binary files a/docs/book/callouts/1.png and /dev/null differ diff --git a/docs/book/callouts/10.png b/docs/book/callouts/10.png deleted file mode 100644 index 997bbc8246..0000000000 Binary files a/docs/book/callouts/10.png and /dev/null differ diff --git a/docs/book/callouts/11.png b/docs/book/callouts/11.png deleted file mode 100644 index ce47dac3f5..0000000000 Binary files a/docs/book/callouts/11.png and /dev/null differ diff --git a/docs/book/callouts/12.png b/docs/book/callouts/12.png deleted file mode 100644 index 31daf4e2f2..0000000000 Binary files a/docs/book/callouts/12.png and /dev/null differ diff --git a/docs/book/callouts/13.png b/docs/book/callouts/13.png deleted file mode 100644 index 14021a89c2..0000000000 Binary files a/docs/book/callouts/13.png and /dev/null differ diff --git a/docs/book/callouts/14.png b/docs/book/callouts/14.png deleted file mode 100644 index 64014b75fe..0000000000 Binary files a/docs/book/callouts/14.png and /dev/null differ diff --git a/docs/book/callouts/15.png b/docs/book/callouts/15.png deleted file mode 100644 index 0d65765fcf..0000000000 Binary files a/docs/book/callouts/15.png and /dev/null differ diff --git a/docs/book/callouts/2.png b/docs/book/callouts/2.png deleted file mode 100644 index 5d09341b2f..0000000000 Binary files a/docs/book/callouts/2.png and /dev/null differ diff --git a/docs/book/callouts/3.png b/docs/book/callouts/3.png deleted file mode 100644 index ef7b700471..0000000000 Binary files a/docs/book/callouts/3.png and /dev/null differ diff --git a/docs/book/callouts/4.png b/docs/book/callouts/4.png deleted file mode 100644 index adb8364eb5..0000000000 Binary files a/docs/book/callouts/4.png and /dev/null differ diff --git a/docs/book/callouts/5.png b/docs/book/callouts/5.png deleted file mode 100644 index 4d7eb46002..0000000000 Binary files a/docs/book/callouts/5.png and /dev/null differ diff --git a/docs/book/callouts/6.png b/docs/book/callouts/6.png deleted file mode 100644 index 0ba694af6c..0000000000 Binary files a/docs/book/callouts/6.png and /dev/null differ diff --git a/docs/book/callouts/7.png b/docs/book/callouts/7.png deleted file mode 100644 index 472e96f8ac..0000000000 Binary files a/docs/book/callouts/7.png and /dev/null differ diff --git a/docs/book/callouts/8.png b/docs/book/callouts/8.png deleted file mode 100644 index 5e60973c21..0000000000 Binary files a/docs/book/callouts/8.png and /dev/null differ diff --git a/docs/book/callouts/9.png b/docs/book/callouts/9.png deleted file mode 100644 index a0676d26cc..0000000000 Binary files a/docs/book/callouts/9.png and /dev/null differ diff --git a/docs/book/chains.txt b/docs/book/chains.txt deleted file mode 100644 index 120b32dfd5..0000000000 --- a/docs/book/chains.txt +++ /dev/null @@ -1,371 +0,0 @@ - -=== Name and extension-based file processing - -Transformations may be performed automatically based on the file name or on the extension. - -==== Refactoring repeated rule-based task generators into implicit rules - -The explicit rules described in the previous chapter become limited for processing several files of the same kind. The following code may lead to unmaintainable scripts and to slow builds (for loop): - -[source, python] ----------------- -def build(bld): - for x in 'a.lua b.lua c.lua'.split(): - y = x.replace('.lua', '.luac') - bld(source=x, target=y, rule='${LUAC} -s -o ${TGT} ${SRC}') - bld.install_files('${LUADIR}', x) ----------------- - -Rather, the rule should be removed from the user script, like this: - -[source,python] ---------------- -def build(bld): - bld(source='a.lua b.lua c.lua') ---------------- - -The equivalent logic may then be provided by using the following code. It may be located in either the same 'wscript', or in a waf tool: - -[source,python] ---------------- -from waflib import TaskGen -TaskGen.declare_chain( - name = 'luac', <1> - rule = '${LUAC} -s -o ${TGT} ${SRC}', <2> - shell = False, - ext_in = '.lua', <3> - ext_out = '.luac', <4> - reentrant = False, <5> - install_path = '${LUADIR}', <6> -) ---------------- - -<1> The name for the corresponding task class to use -<2> The rule is the same as for any rule-based task generator -<3> Input file, processed by extension -<4> Output files extensions separated by spaces. In this case there is only one output file -<5> The reentrant attribute is used to add the output files as source again, for processing by another implicit rule -<6> String representing the installation path for the output files, similar to the destination path from 'bld.install_files'. To disable installation, set it to False. - -==== Chaining more than one command - -Now consider the long chain 'uh.in' → 'uh.a' → 'uh.b' → 'uh.c'. The following implicit rules demonstrate how to generate the files while maintaining a minimal user script: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(source='uh.in') - -from waflib import TaskGen -TaskGen.declare_chain(name='a', rule='cp ${SRC} ${TGT}', ext_in='.in', ext_out='.a',) -TaskGen.declare_chain(name='b', rule='cp ${SRC} ${TGT}', ext_in='.a', ext_out='.b',) -TaskGen.declare_chain(name='c', rule='cp ${SRC} ${TGT}', ext_in='.b', ext_out='.c', reentrant = False) ---------------- - -During the build phase, the correct compilation order is computed based on the extensions given: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.000s) -'configure' finished successfully (0.090s) -Waf: Entering directory `/comp/waf/demos/simple_scenarios/chaining/build' -[1/3] a: uh.in -> build/uh.a -[2/3] b: build/uh.a -> build/uh.b -[3/3] c: build/uh.b -> build/uh.c -Waf: Leaving directory `/comp/waf/demos/simple_scenarios/chaining/build' -'build' finished successfully (0.034s) ---------------- - -==== Scanner methods - -Because transformation chains rely on implicit transformations, it may be desirable to hide some files from the list of sources. Or, some dependencies may be produced conditionally and may not be known in advance. A 'scanner method' is a kind of callback used to find additional dependencies just before the target is generated. For illustration purposes, let us start with an empty project containing three files: the 'wscript', 'ch.in' and 'ch.dep' - -[source,shishell] ---------------- -$ cd /tmp/smallproject - -$ tree -. -|-- ch.dep -|-- ch.in -`-- wscript ---------------- - -The build will create a copy of 'ch.in' called 'ch.out'. Also, 'ch.out' must be rebuild whenever 'ch.dep' changes. This corresponds more or less to the following Makefile: - -[source,make] ------------------ -ch.out: ch.in ch.dep - cp ch.in ch.out ------------------ - -The user script should only contain the following code: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(source = 'ch.in') ---------------- - -The code below is independent from the user scripts and may be located in a Waf tool. - -[source,python] ---------------- -def scan_meth(task): <1> - node = task.inputs[0] - dep = node.parent.find_resource(node.name.replace('.in', '.dep')) <2> - if not dep: - raise ValueError("Could not find the .dep file for %r" % node) - return ([dep], []) <3> - -from waflib import TaskGen -TaskGen.declare_chain( - name = 'copy', - rule = 'cp ${SRC} ${TGT}', - ext_in = '.in', - ext_out = '.out', - reentrant = False, - scan = scan_meth, <4> -) --------------- -<1> The scanner method accepts a task object as input (not a task generator) -<2> Use node methods to locate the dependency (and raise an error if it cannot be found) -<3> Scanner methods return a tuple containing two lists. The first list contains the list of node objects to depend on. The second list contains private data such as debugging information. The results are cached between build calls so the contents must be serializable. -<4> Add the scanner method to chain declaration - -The execution trace will be the following: - -[source,shishell] --------------- -$ echo 1 > ch.in -$ echo 1 > ch.dep <1> - -$ waf distclean configure build -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/smallproject/build' -[1/1] copy: ch.in -> build/ch.out <2> -Waf: Leaving directory `/tmp/smallproject/build' -'build' finished successfully (0.010s) - -$ waf -Waf: Entering directory `/tmp/smallproject/build' -Waf: Leaving directory `/tmp/smallproject/build' -'build' finished successfully (0.005s) <3> - -$ echo 2 > ch.dep <4> - -$ waf -Waf: Entering directory `/tmp/smallproject/build' -[1/1] copy: ch.in -> build/ch.out <5> -Waf: Leaving directory `/tmp/smallproject/build' -'build' finished successfully (0.012s) --------------- - -<1> Initialize the file contents of 'ch.in' and 'ch.dep' -<2> Execute a first clean build. The file 'ch.out' is produced -<3> The target 'ch.out' is up-to-date because nothing has changed -<4> Change the contents of 'ch.dep' -<5> The dependency has changed, so the target is rebuilt - -Here are a few important points about scanner methods: - -. they are executed only when the target is not up-to-date. -. they may not modify the 'task' object or the contents of the configuration set 'task.env' -. they are executed in a single main thread to avoid concurrency issues -. the results of the scanner (tuple of two lists) are re-used between build executions (and it is possible to access programatically those results) -. the make-like rules also accept a 'scan' argument (scanner methods are bound to the task rather than the task generators) -. they are used by Waf internally for c/c++ support, to add dependencies dynamically on the header files ('.c' → '.h') - - -==== Extension callbacks - -In the chain declaration from the previous sections, the attribute 'reentrant' was described to control if the generated files are to be processed or not. There are cases however where one of the two generated files must be declared (because it will be used as a dependency) but where it cannot be considered as a source file in itself (like a header in c/c\++). Now consider the following two chains ('uh.in' → 'uh.a1' + 'uh.a2') and ('uh.a1' → 'uh.b') in the following example: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - obj = bld(source='uh.in') - -from waflib import TaskGen -TaskGen.declare_chain( - name = 'a', - rule = 'cp ${SRC} ${TGT}', - ext_in = '.in', - ext_out = ['.a1', '.a2'], - reentrant = True, -) - -TaskGen.declare_chain( - name = 'b', - rule = 'cp ${SRC} ${TGT}', - ext_in = '.a1', - ext_out = '.b', - reentrant = False, -) --------------- - -The following error message will be produced: - -[source,shishell] --------------- -$ waf distclean configure build -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/smallproject' -Waf: Leaving directory `/tmp/smallproject' -Cannot guess how to process bld:///tmp/smallproject/uh.a2 (got mappings ['.a1', '.in'] in - class TaskGen.task_gen) -> try conf.load(..)? --------------- - -The error message indicates that there is no way to process 'uh.a2'. Only files of extension '.a1' or '.in' can be processed. Internally, extension names are bound to callback methods. The error is raised because no such method could be found, and here is how to register an extension callback globally: - -[source,python] ---------------- -@TaskGen.extension('.a2') -def foo(*k, **kw): - pass ---------------- - -To register an extension callback locally, a reference to the task generator object must be kept: - -[source,python] ---------------- -def build(bld): - obj = bld(source='uh.in') - def callback(*k, **kw): - pass - obj.mappings['.a2'] = callback ---------------- - -The exact method signature and typical usage for the extension callbacks is the following: - -[source,python] ---------------- -from waflib import TaskGen -@TaskGen.extension(".a", ".b") <1> -def my_callback(task_gen_object<2>, node<3>): - task_gen_object.create_task( - task_name, <4> - node, <5> - output_nodes) <6> ---------------- - -<1> Comma-separated list of extensions (strings) -<2> Task generator instance holding the data -<3> Instance of Node, representing a file (either source or build) -<4> The first argument to create a task is the name of the task class -<5> The second argument is the input node (or a list of nodes for several inputs) -<6> The last parameter is the output node (or a list of nodes for several outputs) - -The creation of new task classes will be described in the next section. - -==== Task class declaration - -Waf tasks are instances of the class Task.TaskBase. Yet, the base class contains the real minimum, and the immediate subclass 'Task.Task' is usually chosen in user scripts. We will now start over with a simple project containing only one project 'wscript' file and and example file named 'ah.in'. A task class will be added. - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(source='uh.in') - -from waflib import Task, TaskGen - -@TaskGen.extension('.in') -def process(self, node): - tsk = self.create_task('abcd') <1> - print(tsk.__class__) - -class abcd(Task.Task): <2> - def run(self): <3> - print('executing...') - return 0 <4> ---------------- - -<1> Create a new instance of 'abcd'. The method 'create_task' is a shortcut to make certain the task will keep a reference on its task generator. -<2> Inherit the class Task located in the module Task.py -<3> The method run is called when the task is executed -<4> The task return status must be an integer, which is zero to indicate success. The tasks that have failed will be executed on subsequent builds - -The output of the build execution will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.002s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/simpleproject/build' - -[1/1] abcd: -executing... -Waf: Leaving directory `/tmp/simpleproject/build' -'build' finished successfully (0.005s) ---------------- - -Although it is possible to write down task classes in plain python, two functions (factories) are provided to simplify the work, for example: - -[source,python] ---------------- -Task.simple_task_type( <1> - 'xsubpp', <2> - rule = '${PERL} ${XSUBPP} ${SRC} > ${TGT}', <3> - color = 'BLUE', <4> - before = 'cc') <5> - -def build_it(task): - return 0 - -Task.task_type_from_func(<6> - 'sometask', <7> - func = build_it, <8> - vars = ['SRT'], - color = 'RED', - ext_in = '.in', - ext_out = '.out') <9> ---------------- - -<1> Create a new task class executing a rule string -<2> Task class name -<3> Rule to execute during the build -<4> Color for the output during the execution -<5> Execute the task instance before any instance of task classes named 'cc'. The opposite of 'before' is 'after' -<6> Create a new task class from a custom python function. The 'vars' attribute represents additional configuration set values to use as dependencies -<7> Task class name -<8> Function to use -<9> In this context, the extension names are meant to be used for computing the execution order with other tasks, without naming the other task classes explicitly - -Note that most attributes are common between the two function factories. More usage examples may be found in most Waf tools. - -==== Source attribute processing - -The first step in processing the source file attribute is to convert all file names into Nodes. Special methods may be mapped to intercept names by the exact file name entry (no extension). The Node objects are then added to the task generator attribute 'source'. - -The list of nodes is then consumed by regular extension mappings. Extension methods may re-inject the output nodes for further processing by appending them to the the attribute 'source' (hence the name re-entrant provided in declare_chain). - -image::source{PIC}["Source attribute processing"{backend@docbook:,width=450:},align="center"] - diff --git a/docs/book/classes.semd b/docs/book/classes.semd deleted file mode 100644 index 53c00cd7cb..0000000000 Binary files a/docs/book/classes.semd and /dev/null differ diff --git a/docs/book/classes_build.semd b/docs/book/classes_build.semd deleted file mode 100644 index 45e9f47bfd..0000000000 Binary files a/docs/book/classes_build.semd and /dev/null differ diff --git a/docs/book/cls.eps b/docs/book/cls.eps deleted file mode 100644 index 939895bdf3..0000000000 --- a/docs/book/cls.eps +++ /dev/null @@ -1,836 +0,0 @@ -%!PS-Adobe-1.0 EPSF-3.0 -%%BoundingBox: 24 631 400 819 -%%Creator: Qt 4.6.2 -%%CreationDate: vie jun 25 14:14:20 2010 -%%Orientation: Portrait -%%Pages: (atend) -%%DocumentFonts: (atend) -%%EndComments -%%BeginProlog -% Prolog copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies). -% You may copy this prolog in any way that is directly related to this document. -% For other use of this prolog, see your licensing agreement for Qt. -/BD{bind def}bind def/d2{dup dup}BD/ED{exch def}BD/D0{0 ED}BD/F{setfont}BD -/RL{rlineto}BD/CM{currentmatrix}BD/SM{setmatrix}BD/TR{translate}BD/SD -{setdash}BD/SC{aload pop setrgbcolor}BD/CR{currentfile read pop}BD/i{index} -BD/scs{setcolorspace}BD/DB{dict dup begin}BD/DE{end def}BD/ie{ifelse}BD/gs -{gsave}BD/gr{grestore}BD/w{setlinewidth}BD/d{setdash}BD/J{setlinecap}BD/j -{setlinejoin}BD/scn{3 array astore/BCol exch def}BD/SCN{3 array astore/PCol -exch def}BD/cm{6 array astore concat}BD/m{moveto}BD/l{lineto}BD/c{curveto}BD -/h{closepath}BD/W{clip}BD/W*{eoclip}BD/n{newpath}BD/q{gsave 10 dict begin}BD -/Q{end grestore}BD/re{4 2 roll m dup 0 exch RL exch 0 RL 0 exch neg RL h}BD -/S{gs PCol SC stroke gr n}BD/BT{gsave 10 dict begin/_m matrix CM def BCol -SC}BD/ET{end grestore}BD/Tf{/_fs ED findfont[_fs 0 0 _fs 0 0]makefont F}BD -/Tm{6 array astore concat}BD/Td{translate}BD/Tj{0 0 m show}BD/BDC{pop pop}BD -/EMC{}BD/BSt 0 def/WFi false def/BCol[1 1 1]def/PCol[0 0 0]def/BDArr[0.94 -0.88 0.63 0.50 0.37 0.12 0.06]def/level3{/languagelevel where{pop -languagelevel 3 ge}{false}ie}BD/QCIgray D0/QCIcolor D0/QCIindex D0/QCI{ -/colorimage where{pop false 3 colorimage}{exec/QCIcolor ED/QCIgray QCIcolor -length 3 idiv string def 0 1 QCIcolor length 3 idiv 1 sub{/QCIindex ED/_x -QCIindex 3 mul def QCIgray QCIindex QCIcolor _x get 0.30 mul QCIcolor _x 1 -add get 0.59 mul QCIcolor _x 2 add get 0.11 mul add add cvi put}for QCIgray -image}ie}BD/di{gs TR 1 i 1 eq{pop pop false 3 1 roll BCol SC imagemask}{dup -false ne{level3}{false}ie{/_ma ED 8 eq{/_dc[0 1]def/DeviceGray}{/_dc[0 1 0 1 -0 1]def/DeviceRGB}ie scs/_im ED/_mt ED/_h ED/_w ED <>/MaskDict <> -/InterleaveType 3 >> image}{pop 8 4 1 roll 8 eq{image}{QCI}ie}ie}ie gr}BD/BF -{gs BSt 1 eq{BCol SC WFi{fill}{eofill}ie}if BSt 2 ge BSt 8 le and{BDArr BSt -2 sub get/_sc ED BCol{1. exch sub _sc mul 1. exch sub}forall 3 array astore -SC WFi{fill}{eofill}ie}if BSt 9 ge BSt 14 le and{WFi{W}{W*}ie pathbbox 3 i 3 -i TR 4 2 roll 3 2 roll exch sub/_h ED sub/_w ED BCol SC 0.3 w n BSt 9 eq BSt -11 eq or{0 4 _h{dup 0 exch m _w exch l}for}if BSt 10 eq BSt 11 eq or{0 4 _w{ -dup 0 m _h l}for}if BSt 12 eq BSt 14 eq or{_w _h gt{0 6 _w _h add{dup 0 m _h -sub _h l}for}{0 6 _w _h add{dup 0 exch m _w sub _w exch l}for}ie}if BSt 13 -eq BSt 14 eq or{_w _h gt{0 6 _w _h add{dup _h m _h sub 0 l}for}{0 6 _w _h -add{dup _w exch m _w sub 0 exch l}for}ie}if stroke}if BSt 15 eq{}if BSt 24 -eq{}if gr}BD/f{/WFi true def BF n}BD/f*{/WFi false def BF n}BD/B{/WFi true -def BF S n}BD/B*{/WFi false def BF S n}BD/QI{/C save def pageinit q n}BD/QP{ -Q C restore showpage}BD/SPD{/setpagedevice where{<< 3 1 roll >> -setpagedevice}{pop pop}ie}BD/T1AddMapping{10 dict begin/glyphs ED/fnt ED -/current fnt/NumGlyphs get def/CMap fnt/CMap get def 0 1 glyphs length 1 sub -{glyphs exch get/gn ED current dup 256 mod/min ED 256 idiv/maj ED CMap dup -maj get dup null eq{pop 256 array 0 1 255{1 i exch/.notdef put}for}if dup -min gn put maj exch put/current current 1 add def}for fnt/CMap CMap put fnt -/NumGlyphs current put end}def/T1AddGlyphs{10 dict begin/glyphs ED/fnt ED -/current fnt/NumGlyphs get def/CMap fnt/CMap get def/CharStrings fnt -/CharStrings get def 0 1 glyphs length 2 idiv 1 sub{2 mul dup glyphs exch -get/gn ED 1 add glyphs exch get/cs ED current dup 256 mod/min ED 256 idiv -/maj ED CMap dup maj get dup null eq{pop 256 array 0 1 255{1 i exch/.notdef -put}for}if dup min gn put maj exch put CharStrings gn cs put/current current -1 add def}for fnt/CharStrings CharStrings put fnt/CMap CMap put fnt -/NumGlyphs current put end}def/StringAdd{1 i length 1 i length add string 3 -1 roll 2 i 0 3 i putinterval 2 i 2 i length 2 i putinterval pop pop}def -/T1Setup{10 dict begin dup/FontName ED (-Base) StringAdd cvx cvn/Font ED -/MaxPage Font/NumGlyphs get 1 sub 256 idiv def/FDepVector MaxPage 1 add -array def/Encoding MaxPage 1 add array def 0 1 MaxPage{dup Encoding exch dup -put dup/Page ED FontName (-) StringAdd exch 20 string cvs StringAdd cvn Font -0 dict copy d2/CMap get Page get/Encoding exch put definefont FDepVector -exch Page exch put}for FontName cvn <> definefont pop end}def - -/pageinit { -24 24 translate -% 193 *280 mm (portrait) -0 794 translate 0.666666666 -0.666666666 scale } def -%%EndProlog -/F1-Base -<< -/FontName /LiberationSans-Bold -/FontInfo <> -/FontType 1 -/PaintType 0 -/FontMatrix [.001 0 0 .001 0 0] -/FontBBox { 0 0 0 0 } -/Private << -/password 5839 -/MinFeature {16 16} -/BlueValues [] -/lenIV -1 ->> -/CharStrings << >> -/NumGlyphs 0 -/CMap 256 array ->> def -F1-Base [ -/.notdef - -/l - -/i - -/n - -/k - -/underscore -<81F8C00DFB0E04B107F8D3066507FCD306090E> -/t -<97F7E10DF7558215638B6C9675A10875A180AC8BB808F7CC074806E707D506B6F71005E106FB1007EF062F072706FBA7078B719078947F08957F9A85A08B08938B938C928C08928C928D948D0836077A85798778880878897789748B08090E> -/a - -/s - -/c - -/b - -/x -<92F8C00DF81D16FB0FF75305FB10FB5305FB2606F755F7A505FB4CF79305F72806F705FB4105F704F74105F72906FB4CFB9205F757FBA605FB2906090E> -/p - -/r - -/o - -/g - -/m - -/d - -/h - -] T1AddGlyphs -%%Page: 1 1 -%%BeginPageSetup -QI -(F1) T1Setup -%%EndPageSetup -q -Q -Q q -q -0 0 0 scn -/BSt 0 def -Q -q -0 0 0 scn -/BSt 0 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d -Q -q -0 0 0 scn -/BSt 0 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d -Q -q -1.01851851 0 0 1 0 0 cm -0 0 0 scn -/BSt 0 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d -Q -q -1 1 1 scn -/BSt 1 def -0 0 m -564 0 l -564 282 l -0 282 l -0 0 l -h -f* -Q -q -1 0 0 1 -40 -40 cm -0 0 0 scn -/BSt 0 def -1 0 0 SCN -0 w 2 J 2 j [] 0 d -300 310 m -300 70 l -S -300 110 m -300 70 l -S -470 190 m -470 70 l -S -380 190 m -470 190 l -S -430 60 m -330 60 l -S -300 210 m -300 70 l -S -120 60 m -260 60 l -S -130 160 m -300 160 l -S -540 190 m -470 190 l -S -300 260 m -300 70 l -S -90 110 m -300 110 l -S -110 210 m -300 210 l -S -90 310 m -300 310 l -S -300 160 m -300 70 l -S -120 260 m -300 260 l -S -470 190 m -470 70 l -S -470 280 m -470 70 l -S -Q -q -1 1 1 scn -/BSt 1 def -260 30 m -253 41 l -267 41 l -260 30 l -h -f* -1 1 1 scn -/BSt 1 def -260 30 m -253 41 l -267 41 l -260 30 l -h -f* -1 1 1 scn -/BSt 1 def -430 30 m -423 41 l -437 41 l -430 30 l -h -f* -1 1 1 scn -/BSt 1 def -430 30 m -423 41 l -437 41 l -430 30 l -h -f* -1 1 1 scn -/BSt 1 def -290 20 m -301 27 l -301 13 l -290 20 l -h -f* -1 1 1 scn -/BSt 1 def -220 20 m -209 13 l -209 27 l -220 20 l -h -f* -1 1 1 scn -/BSt 1 def -260 30 m -253 41 l -267 41 l -260 30 l -h -f* -1 1 1 scn -/BSt 1 def -260 30 m -253 41 l -267 41 l -260 30 l -h -f* -1 1 1 scn -/BSt 1 def -260 30 m -253 41 l -267 41 l -260 30 l -h -f* -1 1 1 scn -/BSt 1 def -430 30 m -423 41 l -437 41 l -430 30 l -h -f* -Q -q -1 0 0 1 -40 -40 cm -0 0 0 scn -/BSt 0 def -1 0 0 SCN -0 w 2 J 2 j [] 0 d -249 53 m -249 67 l -S -300 70 m -307 81 l -S -1 1 0 scn -/BSt 1 def -260 40 m -333 40 l -333 72 l -260 72 l -260 40 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -265 -53 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <0003> Tj -9 0 Td <0004> Tj -8 0 Td <0005> Tj -8 0 Td <0006> Tj -5 0 Td <0007> Tj -8 0 Td <0008> Tj -8 0 Td <0004> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -260 56 m -332 56 l -B* -260 64 m -332 64 l -B* -0 0 0 scn -/BSt 0 def -300 70 m -293 81 l -S -330 60 m -341 67 l -S -300 70 m -293 81 l -S -470 70 m -477 81 l -S -470 70 m -463 81 l -S -300 70 m -293 81 l -S -300 70 m -307 81 l -S -463 81 m -477 81 l -S -463 81 m -477 81 l -S -293 81 m -307 81 l -S -470 70 m -477 81 l -S -293 81 m -307 81 l -S -300 70 m -307 81 l -S -260 60 m -249 53 l -S -300 70 m -293 81 l -S -470 70 m -463 81 l -S -1 1 0 scn -/BSt 1 def -330 170 m -378 170 l -378 202 l -330 202 l -330 170 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -335 -183 Td <0009> Tj -8 0 Td <0008> Tj -8 0 Td <0006> Tj -5 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -330 186 m -377 186 l -B* -330 194 m -377 194 l -B* -0 0 0 scn -/BSt 0 def -300 70 m -307 81 l -S -470 70 m -463 81 l -S -293 81 m -307 81 l -S -300 70 m -307 81 l -S -341 67 m -341 53 l -S -260 60 m -249 67 l -S -293 81 m -307 81 l -S -300 70 m -293 81 l -S -293 81 m -307 81 l -S -330 60 m -341 53 l -S -470 70 m -477 81 l -S -463 81 m -477 81 l -S -1 1 0 scn -/BSt 1 def -430 40 m -512 40 l -512 72 l -430 72 l -430 40 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -435 -53 Td <0008> Tj -8 0 Td <0006> Tj -5 0 Td <0007> Tj -8 0 Td <0006> Tj -5 0 Td <0002> Tj -4 0 Td <0009> Tj -8 0 Td <0005> Tj -8 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <0003> Tj -9 0 Td <0004> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -430 56 m -511 56 l -B* -430 64 m -511 64 l -B* -1 1 0 scn -/BSt 1 def -40 140 m -133 140 l -133 172 l -40 172 l -40 140 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -153 Td <0009> Tj -8 0 Td <000B> Tj -8 0 Td <000B> Tj -8 0 Td <000C> Tj -9 0 Td <000D> Tj -6 0 Td <000E> Tj -9 0 Td <000F> Tj -9 0 Td <000D> Tj -6 0 Td <0007> Tj -8 0 Td <0010> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 156 m -132 156 l -B* -40 164 m -132 164 l -B* -1 1 0 scn -/BSt 1 def -540 170 m -604 170 l -604 202 l -540 202 l -540 170 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -545 -183 Td <0009> Tj -8 0 Td <000B> Tj -8 0 Td <000B> Tj -8 0 Td <0008> Tj -8 0 Td <0006> Tj -5 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -540 186 m -603 186 l -B* -540 194 m -603 194 l -B* -1 1 0 scn -/BSt 1 def -40 290 m -93 290 l -93 322 l -40 322 l -40 290 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -303 Td <0011> Tj -9 0 Td <0008> Tj -8 0 Td <0012> Tj -9 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 306 m -92 306 l -B* -40 314 m -92 314 l -B* -1 1 0 scn -/BSt 1 def -440 280 m -489 280 l -489 312 l -440 312 l -440 280 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -445 -293 Td <0011> Tj -9 0 Td <0008> Tj -8 0 Td <0006> Tj -5 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -440 296 m -488 296 l -B* -440 304 m -488 304 l -B* -1 1 0 scn -/BSt 1 def -40 40 m -117 40 l -117 72 l -40 72 l -40 40 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -53 Td <0009> Tj -8 0 Td <000C> Tj -9 0 Td <000D> Tj -6 0 Td <000E> Tj -9 0 Td <000F> Tj -9 0 Td <000D> Tj -6 0 Td <0007> Tj -8 0 Td <0010> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 56 m -116 56 l -B* -40 64 m -116 64 l -B* -1 1 0 scn -/BSt 1 def -40 90 m -92 90 l -92 122 l -40 122 l -40 90 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -103 Td <0009> Tj -8 0 Td <0008> Tj -8 0 Td <0012> Tj -9 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 106 m -91 106 l -B* -40 114 m -91 114 l -B* -1 1 0 scn -/BSt 1 def -40 240 m -118 240 l -118 272 l -40 272 l -40 240 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -253 Td <0011> Tj -9 0 Td <000C> Tj -9 0 Td <000D> Tj -6 0 Td <000E> Tj -9 0 Td <000F> Tj -9 0 Td <000D> Tj -6 0 Td <0007> Tj -8 0 Td <0010> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 256 m -117 256 l -B* -40 264 m -117 264 l -B* -1 1 0 scn -/BSt 1 def -40 190 m -108 190 l -108 222 l -40 222 l -40 190 l -h -B* -0 0 0 SCN -0 w 2 J 2 j [] 0 d -q -0 0 0 scn -/BSt 1 def -BT -/F1 13.5000000 Tf 1 0 0 -1 0 0 Tm -45 -203 Td <0009> Tj -8 0 Td <000B> Tj -8 0 Td <000B> Tj -8 0 Td <0008> Tj -8 0 Td <0012> Tj -9 0 Td <0001> Tj -4 0 Td <0002> Tj -4 0 Td <000A> Tj -ET -Q -1 0 0 SCN -0 w 2 J 2 j [] 0 d -40 206 m -107 206 l -B* -40 214 m -107 214 l -B* -Q -q -1.01851851 0 0 1 0 0 cm -0 0 0 scn -/BSt 0 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d -Q -q -0 0 0 scn -/BSt 0 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d -Q -q -1 0 0 1 -40 -40 cm -1 1 0 scn -/BSt 1 def -0 0 0 SCN -0 w 2 J 2 j [] 0 d - -Q QP -%%Trailer -%%Pages: 1 -%%DocumentFonts: -%%EOF diff --git a/docs/book/cls.xmi b/docs/book/cls.xmi deleted file mode 100644 index 4529a5c30d..0000000000 --- a/docs/book/cls.xmi +++ /dev/null @@ -1,255 +0,0 @@ - - - - - umbrello uml modeller http://uml.sf.net - 1.5.8 - UnicodeUTF8 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/book/conclusion.txt b/docs/book/conclusion.txt deleted file mode 100644 index 18a60a2130..0000000000 --- a/docs/book/conclusion.txt +++ /dev/null @@ -1,15 +0,0 @@ -== Further reading - -Due to the amount of features provided by Waf, this book cannot be both complete and up-to-date. For greater understanding and practice the following links are recommended to the reader: - -.Recommended links -[options="header"] -|================ -|Link|Description -|https://waf.io/apidocs/index.html|The apidocs -|https://waf.io|The Waf project page and downloads -|https://github.com/waf-project/waf|Source code repository -|http://groups.google.com/group/waf-users|The Waf mailing-list -|http://waf-devel.blogspot.com/2011/01/python-32-and-build-system-kit.html|Information on the build system kit -|================ - diff --git a/docs/book/configuration.txt b/docs/book/configuration.txt deleted file mode 100644 index 390c8cc9ca..0000000000 --- a/docs/book/configuration.txt +++ /dev/null @@ -1,476 +0,0 @@ -== Project configuration - -The _configuration_ command is used to check if the requiremements for working on a project are met and to store the information. The parameters are then stored for use by other commands, such as the build command. - -=== Using persistent data - -==== Sharing data with the build - -The configuration context is used to store data which may be re-used during the build. Let's begin with the following example: - -// configuration_build -[source,python] ---------------- -top = '.' -out = 'build' - -def options(ctx): - ctx.add_option('--foo', action='store', default=False, help='Silly test') - -def configure(ctx): - ctx.env.FOO = ctx.options.foo <1> - ctx.find_program('touch', var='TOUCH') <2> - -def build(bld): - print(bld.env.TOUCH) - print(bld.env.FOO) <3> - bld(rule='${TOUCH} ${TGT}', target='foo.txt') <4> ---------------- - -<1> Store the option _foo_ into the variable _env_ (dict-like structure) -<2> Configuration routine used to find the program _touch_ and to store it into _ctx.env.TOUCH_ footnote:['find_program' may use the same variable from the OS environment during the search, for example 'CC=gcc waf configure'] -<3> Print the value of _ctx.env.FOO_ that was set during the configuration -<4> The variable _$\{TOUCH}_ corresponds to the variable _ctx.env.TOUCH_. - -Here is the execution output: - -[source,shishell] ---------------- -$ waf distclean configure build --foo=abcd -v -'distclean' finished successfully (0.005s) -Checking for program touch : /usr/bin/touch <1> -'configure' finished successfully (0.007s) -Waf: Entering directory `/tmp/configuration_build/build' -/usr/bin/touch <2> -abcd -[1/1] foo.txt: -> build/foo.txt -10:56:41 runner '/usr/bin/touch foo.txt' <3> -Waf: Leaving directory `/tmp/configuration_build/build' -'build' finished successfully (0.021s) ---------------- -<1> Output of the configuration test _find_program_ -<2> The value of _TOUCH_ -<3> Command-line used to create the target 'foo.txt' - -The variable _ctx.env_ is called a *Configuration set*, and is an instance of the class 'ConfigSet'. The class is a wrapper around Python dicts to handle serialization. For this reason it should be used for simple variables only (no functions or classes). The values are stored in a python-like format in the build directory: - -[source,shishell] ---------------- -$ tree -build/ -|-- foo.txt -|-- c4che -| |-- build.config.py -| `-- _cache.py -`-- config.log - -$ cat build/c4che/_cache.py -FOO = 'abcd' -PREFIX = '/usr/local' -TOUCH = '/usr/bin/touch' ---------------- - -NOTE: Reading and writing values to _ctx.env_ is possible in both configuration and build commands. Yet, the values are stored to a file only during the configuration phase. - -==== Configuration set usage - -We will now provide more examples of the configuration set usage. The object *ctx.env* provides convenience methods to access its contents: - -// configuration_sets -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.env['CFLAGS'] = ['-g'] <1> - ctx.env.CFLAGS = ['-g'] <2> - ctx.env.append_value('CXXFLAGS', ['-O2', '-g']) <3> - ctx.env.append_unique('CFLAGS', ['-g', '-O2']) - ctx.env.prepend_value('CFLAGS', ['-O3']) <4> - - print(type(ctx.env)) - print(ctx.env) - print(ctx.env.FOO) ----------------- - -<1> Key-based access; storing a list -<2> Attribute-based access (the two forms are equivalent) -<3> Append each element to the list _ctx.env.CXXFLAGS_, assuming it is a list -<4> Insert the values at the beginning. Note that there is no such method as _prepend_unique_ - -The execution will produce the following output: - -[source,shishell] ---------------- -$ waf configure - <1> -'CFLAGS' ['-O3', '-g', '-O2'] <2> -'CXXFLAGS' ['-O2', '-g'] -'PREFIX' '/usr/local' -[] <3> - -$ cat build/c4che/_cache.py <4> -CFLAGS = ['-O3', '-g', '-O2'] -CXXFLAGS = ['-O2', '-g'] -PREFIX = '/usr/local' ---------------- - -<1> The object _conf.env_ is an instance of the class ConfigSet defined in _waflib/ConfigSet.py_ -<2> The contents of _conf.env_ after the modifications -<3> When a key is undefined, it is assumed that it is a list (used by *append_value* above) -<4> The object _conf.env_ is stored by default in this file - -Copy and serialization apis are also provided: - -// configuration_copysets -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.env.FOO = 'TEST' - - env_copy = ctx.env.derive() <1> - - node = ctx.path.make_node('test.txt') <2> - env_copy.store(node.abspath()) <3> - - from waflib.ConfigSet import ConfigSet - env2 = ConfigSet() <4> - env2.load(node.abspath()) <5> - - print(node.read()) <6> ---------------- - -<1> Make a copy of _ctx.env_ - this is a shallow copy -<2> Use *ctx.path* to create a node object representing the file +test.txt+ -<3> Store the contents of *env_copy* into +test.txt+ -<4> Create a new empty ConfigSet object -<5> Load the values from +test.txt+ -<6> Print the contents of +test.txt+ - -Upon execution, the output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure -'distclean' finished successfully (0.005s) -FOO = 'TEST' -PREFIX = '/usr/local' -'configure' finished successfully (0.006s) ---------------- - -// ===== multiple configuration sets? - -=== Configuration utilities - -==== Configuration methods - -The method _ctx.find_program_ seen previously is an example of a configuration method. Here are more examples: - -// configuration_methods -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.find_program('touch', var='TOUCH') - ctx.check_waf_version(mini='1.8.8') - ctx.find_file('fstab', ['/opt', '/etc']) ---------------- - -Although these methods are provided by the context class _waflib.Configure.ConfigurationContext_, they will not appear on it in https://waf.io/apidocs/index.html[API documentation]. For modularity reasons, they are defined as simple functions and then bound dynamically: - -[source,python] ---------------- -top = '.' -out = 'build' - -from waflib.Configure import conf <1> - -@conf <2> -def hi(ctx): - print('→ hello, world!') - -# hi = conf(hi) <3> - -def configure(ctx): - ctx.hi() <4> ---------------- - -<1> Import the decorator *conf* -<2> Use the decorator to bind the method _hi_ to the configuration context and build context classes. In practice, the configuration methods are only used during the configuration phase. -<3> Decorators are simple python function. Python 2.3 does not support the *@* syntax so the function has to be called after the function declaration -<4> Use the method previously bound to the configuration context class - -The execution will produce the following output: - -[source,shishell] ---------------- -$ waf configure -→ hello, world! -'configure' finished successfully (0.005s) ---------------- - -==== Loading and using Waf tools - -For efficiency reasons, only a few configuration methods are present in the Waf core. Most configuration methods are loaded by extensions called *Waf tools*. -The main tools are located in the folder +waflib/Tools+, and the tools in testing phase are located under the folder +waflib/extras+. -Yet, Waf tools may be used from any location on the filesystem. - -We will now demonstrate a very simple Waf tool named +dang.py+ which will be used to set 'ctx.env.DANG' from a command-line option: - -// configuration_tool -[source,python] ---------------- -#! /usr/bin/env python -# encoding: utf-8 - -print('→ loading the dang tool') - -from waflib.Configure import conf - -def options(opt): <1> - opt.add_option('--dang', action='store', default='', dest='dang') - -@conf -def read_dang(ctx): <2> - ctx.start_msg('Checking for the variable DANG') - if ctx.options.dang: - ctx.env.DANG = ctx.options.dang <3> - ctx.end_msg(ctx.env.DANG) - else: - ctx.end_msg('DANG is not set') - -def configure(ctx): <4> - ctx.read_dang() ---------------- - -<1> Provide command-line options -<2> Bind the function 'read_dang' as a new configuration method to call ctx.read_dang() below -<3> Set an persistent value from the current command-line options -<4> Provide a command named _configure_ accepting a build context instance as parameter - -For loading a tool, the method 'load' must be used during the configuration: - -[source,python] ---------------- -top = '.' -out = 'build' - -def options(ctx): - ctx.load('dang', tooldir='.') <1> - -def configure(ctx): - ctx.load('dang', tooldir='.') <2> - -def build(ctx): - print(ctx.env.DANG) <3> ---------------- - -<1> Load the options defined in _dang.py_ -<2> Load the tool dang.py. By default, load calls the method 'configure' defined in the tools. -<3> The tool modifies the value of _ctx.env.DANG_ during the configuration - -Upon execution, the output will be the following: - -[source,shishell] ---------------- -$ waf configure --dang=hello -→ loading the dang tool -Checking for DANG : hello <1> -'configure' finished successfully (0.006s) - -$ waf -→ loading the dang tool <2> -Waf: Entering directory `/tmp/configuration_dang/build' -hello -Waf: Leaving directory `/tmp/configuration_dang/build' -'build' finished successfully (0.004s) ---------------- - -<1> First the tool is imported as a python module, and then the method _configure_ is called by _load_ -<2> The tools loaded during the configuration will be loaded during the build phase - -==== Multiple configurations - -The 'conf.env' object is an important point of the configuration which is accessed and modified by Waf tools and by user-provided configuration functions. The Waf tools do not enforce a particular structure for the build scripts, so the tools will only modify the contents of the default object. The user scripts may provide several 'env' objects in the configuration and pre-set or post-set specific values: - -[source,python] ---------------- -def configure(ctx): - env = ctx.env <1> - ctx.setenv('debug') <2> - ctx.env.CC = 'gcc' <3> - ctx.load('gcc') - - ctx.setenv('release', env) <4> - ctx.load('msvc') - ctx.env.CFLAGS = ['/O2'] - - print ctx.all_envs['debug'] <5> ---------------- - -<1> Save a reference to 'conf.env' -<2> Copy and replace 'conf.env' -<3> Modify 'conf.env' -<4> Copy and replace 'conf.env' again, from the initial data -<5> Recall a configuration set by its name - -=== Exception handling - -==== Launching and catching configuration exceptions - -Configuration helpers are methods provided by the conf object to help find parameters, for example the method 'conf.find_program' - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.find_program('some_app') ---------------- - -When a test cannot complete properly, an exception of the type 'waflib.Errors.ConfigurationError' is raised. This often occurs when something is missing in the operating system environment or because a particular condition is not satisfied. For example: - -[source,shishell] ---------------- -$ waf -Checking for program some_app : not found - error: The program some_app could not be found ---------------- - -These exceptions may be raised manually by using 'conf.fatal': - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.fatal("I'm sorry Dave, I'm afraid I can't do that") ---------------- - -Which will display the same kind of error: - -[source,shishell] ---------------- -$ waf configure - error: I'm sorry Dave, I'm afraid I can't do that -$ echo $? -1 ---------------- - -Here is how to catch configuration exceptions: - -// configuration_exception - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - try: - ctx.find_program('some_app') - except ctx.errors.ConfigurationError: <1> - ctx.to_log('some_app was not found (ignoring)') <2> ---------------- - -<1> For convenience, the module _waflib.Errors_ is bound to _ctx.errors_ -<2> Adding information to the log file - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf configure -Checking for program some_app : not found -'configure' finished successfully (0.029s) <1> - -$ cat build/config.log <2> -# project configured on Tue Jul 13 19:15:04 2010 by -# waf 1.8.8 (abi 98, python 20605f0 on linux2) -# using /home/waf/bin/waf configure -# -Checking for program some_app -not found -find program=['some_app'] paths=['/usr/local/bin', '/usr/bin'] var=None -> '' -from /tmp/configuration_exception: The program ['some_app'] could not be found -some_app was not found (ignoring) <3> ---------------- - -<1> The configuration completes without errors -<2> The log file contains useful information about the configuration execution -<3> Our log entry - -Catching the errors by hand can be inconvenient. For this reason, all *@conf* methods accept a parameter named 'mandatory' to suppress configuration errors. The code snippet is therefore equivalent to: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.find_program('some_app', mandatory=False) ---------------- - -As a general rule, clients should never rely on exit codes or returned values and must catch configuration exceptions. The tools should always raise configuration errors to display the errors and to give a chance to the clients to process the exceptions. - -==== Transactions - -Waf tools called during the configuration may use and modify the contents of 'conf.env' at will. Those changes may be complex to track and to undo. Fortunately, the configuration exceptions make it possible to simplify the logic and to go back to a previous state easily. The following example illustrates how to use a transaction to to use several tools at once: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - for compiler in ('gcc', 'msvc'): - try: - ctx.env.stash() - ctx.load(compiler) - except ctx.errors.ConfigurationError: - ctx.env.revert() - else: - break - else: - ctx.fatal('Could not find a compiler') ---------------- - -Though several calls to 'stash' can be made, the copies made are shallow, which means that any complex object (such as a list) modification will be permanent. For this reason, the following is a configuration anti-pattern: - -[source,python] ---------------- -def configure(ctx): - ctx.env.CFLAGS.append('-O2') ---------------- - -The methods should always be used instead: - -[source,python] ---------------- -def configure(ctx): - ctx.env.append_value('CFLAGS', '-O2') ---------------- - -//// -To conclude this chapter on the configuration, we will now insist a little bit on the roles of the configuration context and of the configuration set objects. The configuration context is meant as a container for non-persistent data such as methods, functions, code and utilities. This means in particular that the following is an acceptable way of sharing data with scripts and tools: - -[source,python] ---------------- -def configure(ctx): - ctx.logfile = ctx.bldnode.make_node('config.log').abspath() - ctx.load('some_tool') # ... def configure(ctx): print ctx.logfile ... ---------------- - -In practice, values are frequently needed in the build section too. Adding the data to 'conf.env' is therefore a logical way of separating the concerns between the code (configuration methods) and the persistent data. - -A typical application of this is -//// - diff --git a/docs/book/conftest.dia b/docs/book/conftest.dia deleted file mode 100644 index 6bbd2dea1b..0000000000 --- a/docs/book/conftest.dia +++ /dev/null @@ -1,867 +0,0 @@ - - - - - - - - - - - - - #A4# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Get one method# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Execute the test# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Error handler# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Failure# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #no# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #More tests -to execute?# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Success# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Fatal error# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Recoverable -error# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Start# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #End# - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/book/core.dot b/docs/book/core.dot deleted file mode 100644 index b3e7bfa58e..0000000000 --- a/docs/book/core.dot +++ /dev/null @@ -1,46 +0,0 @@ -digraph G { - label="Dependencies between the core modules"; - - Build[fillcolor="#fffea6",style=filled] - ConfigSet[fillcolor="#fffea6",style=filled] - Configure[fillcolor="#fffea6",style=filled] - Context[fillcolor="#fffea6",style=filled] - Logs[fillcolor="#fffea6",style=filled] - Nod3[fillcolor="#fffea6",style=filled, label="Node"] - Options[fillcolor="#fffea6",style=filled] - Runner[fillcolor="#fffea6",style=filled] - Scripting[fillcolor="#fffea6",style=filled] - TaskGen[fillcolor="#fffea6",style=filled] - Task[fillcolor="#fffea6",style=filled] - Utils[fillcolor="#fffea6",style=filled] - Errors[fillcolor="#fffea6",style=filled] - - Build -> Runner; - Build -> TaskGen; - Build -> ConfigSet; - Build -> Options; - - ConfigSet -> Utils; - ConfigSet -> Logs; - - Configure -> Build; - - Context -> Logs; - Context -> Nod3; - - Nod3 -> Utils; - - Options -> Context; - - Runner -> Task; - - Scripting -> Configure; - - TaskGen -> Task; - - //Task -> Utils; - Task -> Logs; - - Utils -> Errors; -} - diff --git a/docs/book/cprog.txt b/docs/book/cprog.txt deleted file mode 100644 index f9c7babab0..0000000000 --- a/docs/book/cprog.txt +++ /dev/null @@ -1,757 +0,0 @@ -== C and C++ projects - -Although Waf is language neutral, it is used very often for C and C++ projects. This chapter describes the Waf tools and functions used for these languages. - -=== Common script for C, C++ and D applications - -==== Predefined task generators - -The C/C++ builds consist in transforming (compiling) source files into object files, and to assemble (link) the object files at the end. In theory a single programming language should be sufficient for writing any application, but the situation is usually more complicated: - -. Source files may be created by other compilers in other languages (IDL, ASN1, etc) -. Additional files may enter in the link step (libraries, object files) and applications may be divided in dynamic or static libraries -. Different platforms may require different processing rules (manifest files on MS-Windows, etc) - -To conceal the implementation details and the portability concerns, each target (program, library) can be wrapped as single task generator object as in the following example: - -// cprog_wrappers -[source,python] ---------------- -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') <1> - -def build(bld): - bld.program(source='main.c', target='app', use='myshlib mystlib') <2> - bld.stlib(source='a.c', target='mystlib') <3> - bld.shlib(source='b.c', target='myshlib', use='myobjects') <4> - bld.objects(source='c.c', target='myobjects') ---------------- - -<1> Use compiler_c to load the c routines and to find a compiler (for c++ use 'compiler_cxx' and 'compiler_d' for d) -<2> Declare a program built from _main.c_ and using two other libraries -<3> Declare a static library -<4> Declare a shared library, using the objects from 'myobjects' - -The targets will have different extensions and names depending on the platform. For example on Linux, the contents of the build directory will be: - -[source,shishell] ---------------- -$ tree build -build/ -|-- c4che -| |-- build.config.py -| `-- _cache.py -|-- a.c.1.o -|-- app <1> -|-- b.c.2.o -|-- c.c.3.o -|-- config.log -|-- libmyshlib.so <2> -|-- libmystlib.a -`-- main.c.0.o <3> ---------------- - -<1> Programs have no extension on Linux but will have '.exe' on Windows -<2> The '.so' extension for shared libraries on Linux will be '.dll' on Windows -<3> The '.o' object files use the original file name and an index to avoid errors in multiple compilations - -The build context methods _program_, _shlib_, _stlib_ and _objects_ return a single task generator with the appropriate features detected from the source list. For example, for a program having _.c_ files in the source attribute, the features added will be _"c cprogram"_, for a _d_ static library, _"d dstlib"_. - -==== Additional attributes - -The methods described previously can process many more attributes than just 'use'. Here is an advanced example: - -[source,python] ---------------- -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.program( - source = 'main.c', <1> - target = 'appname', <2> - features = ['more', 'features'], <3> - - includes = ['.'], <4> - defines = ['LINUX=1', 'BIDULE'], - - lib = ['m'], <5> - libpath = ['/usr/lib'], - stlib = ['dl'], <6> - stlibpath = ['/usr/local/lib'], - linkflags = ['-g'], <7> - rpath = ['/opt/kde/lib'] <8> - vnum = '1.2.3', - - install_path = '${SOME_PATH}/bin', <9> - cflags = ['-O2', '-Wall'], <10> - cxxflags = ['-O3'], - dflags = ['-g'], - ) ---------------- - -<1> Source file list -<2> Target, converted automatically to +target.exe+ or +libtarget.so+, depending on the platform and type -<3> Additional features to add (for a program consisting in c files, the default will be _'c cprogram'_) -<4> Includes and defines -<5> Shared libraries and shared libraries link paths -<6> Static libraries and link paths -<7> Use linkflags for specific link flags (not for passing libraries) -<8> rpath and vnum, ignored on platforms that do not support them -<9> Programs and shared libraries are installed by default. To disable the installation, set None. -<10> Miscalleneous flags, applied to the source files that support them (if present) - -=== Include processing - -==== Execution path and flags - -Include paths are used by the C/C++ compilers for finding headers. When one header changes, the files are recompiled automatically. For example on a project having the following structure: - -[source,shishell] ---------------- -$ tree -. -|-- foo.h -|-- src -| |-- main.c -| `-- wscript -`-- wscript ---------------- - -The file 'src/wscript' will contain the following code: - -[source,python] ---------------- -def build(bld): - bld.program( - source = 'main.c', - target = 'myapp', - includes = '.. .') ---------------- - -The command-line (output by `waf -v`) will have the following form: - -[source,shishell] ---------------- -cc -I. -I.. -Isrc -I../src ../src/main.c -c -o src/main_1.o ---------------- - -Because commands are executed from the build directory, the folders have been converted to include flags in the following way: - -[source,shishell] ---------------- -.. -> -I.. -I. -. -> -I../src -Isrc ---------------- - -There are the important points to remember: - -. The includes are always given relative to the directory containing the wscript file -. The includes add both the source directory and the corresponding build directory for the task generator variant -. Commands are executed from the build directory, so the include paths must be converted -. System include paths should be defined during the configuration and added to INCLUDES variables (uselib) - -==== The Waf preprocessor - -Waf uses a preprocessor written in Python for adding the dependencies on the headers. A simple parser looking at #include statements would miss constructs such as: - -[source,c] ---------------- -#define mymacro "foo.h" -#include mymacro ---------------- - -Using the compiler for finding the dependencies would not work for applications requiring file preprocessing such as Qt. For Qt, special include files having the '.moc' extension must be detected by the build system and produced ahead of time. The c compiler could not parse such files. - -[source,c] ---------------- -#include "foo.moc" ---------------- - -Since system headers are not tracked by default, the waf preprocessor may miss dependencies written in the following form: - -[source,c] ---------------- -#if SOMEMACRO - /* an include in the project */ - #include "foo.h" -#endif ---------------- - -To write portable code and to ease debugging, it is strongly recommended to put all the conditions used within a project into a 'config.h' file. - -[source,python] ---------------- -def configure(conf): - conf.check( - fragment = 'int main() { return 0; }\n', - define_name = 'FOO', - mandatory = True) - conf.write_config_header('config.h') ---------------- - -For performance reasons, the implicit dependency on the system headers is ignored by default. The following code may be used to enable this behaviour: - -[source,python] ---------------- -from waflib import c_preproc -c_preproc.go_absolute = True ---------------- - -Additional tools such as https://github.com/waf-project/waf/blob/master/waflib/extras/gccdeps.py[gccdeps] or https://github.com/waf-project/waf/blob/master/waflib/extras//dumbpreproc.py[dumbpreproc] provide alternate dependency scanners that can be faster in certain cases (boost). - -NOTE: The Waf engine will detect if tasks generate headers necessary for the compilation and compute the build order accordingly. It may sometimes improve the performance of the scanner if the tasks creating headers provide the hint 'ext_out=[".h"]'. - -==== Dependency debugging - -The Waf preprocessor contains a specific debugging zone: - -[source,shishell] ---------------- -$ waf --zones=preproc ---------------- - -To display the dependencies obtained or missed, use the following: - -[source,shishell] ---------------- -$ waf --zones=deps - -23:53:21 deps deps for src:///comp/waf/demos/qt4/src/window.cpp: <1> - [src:///comp/waf/demos/qt4/src/window.h, bld:///comp/waf/demos/qt4/src/window.moc]; <2> - unresolved ['QtGui', 'QGLWidget', 'QWidget'] <3> ---------------- - -<1> File being preprocessed -<2> Headers found -<3> System headers discarded - -The dependency computation is performed only when the files are not up-to-date, so these commands will display something only when there is a file to compile. - -NOTE: The scanner is only called when C files or dependencies change. In the rare case of adding headers after a successful compilation, then it may be necessary to run 'waf clean build' to force a full scanning. - -=== Library interaction (use) - -==== Local libraries - -The attribute 'use' enables the link against libraries (static or shared), or the inclusion of object files when the task generator referenced is not a library. - -// cprog_use -[source,python] ---------------- -def build(bld): - bld.stlib( - source = 'test_staticlib.c', - target = 'mylib', - name = 'stlib1') <1> - - bld.program( - source = 'main.c', - target = 'app', - includes = '.', - use = ['stlib1']) <2> ---------------- - -<1> The name attribute must point at exactly one task generator -<2> The attribute 'use' contains the task generator names to use - -In this example, the file 'app' will be re-created whenever 'mylib' changes (order and dependency). By using task generator names, the programs and libraries declarations may appear in any order and across scripts. For convenience, the name does not have to be defined, and will be pre-set from the target name: - -[source,python] ---------------- -def build(bld): - bld.stlib( - source = 'test_staticlib.c', - target = 'mylib') - - bld.program( - source = 'main.c', - target = 'app', - includes = '.', - use = ['mylib']) ---------------- - -The 'use' processing also exhibits a recursive behaviour. Let's illustrate it by the following example: - -// cprog_propagation -[source,python] ---------------- -def build(bld): - bld.shlib( - source = 'a.c', <1> - target = 'lib1') - - bld.stlib( - source = 'b.c', - use = 'cshlib', <2> - target = 'lib2') - - bld.shlib( - source = 'c.c', - target = 'lib3', - use = 'lib1 lib2') <3> - - bld.program( <4> - source = 'main.c', - target = 'app', - use = 'lib3') ---------------- - -<1> A simple shared library -<2> The 'cshlib' flags will be propagated to both the library and the program. -<3> 'lib3' uses both a shared library and a static library -<4> A program using 'lib3' - -Because of the shared library dependency 'lib1' → 'lib2', the program 'app' should link against both 'lib1' and 'lib3', but not against 'lib2': - -[source,shishell] ---------------- -$ waf -v -'clean' finished successfully (0.004s) -Waf: Entering directory `/tmp/cprog_propagation/build' -[1/8] c: a.c -> build/a.c.0.o -12:36:17 runner ['/usr/bin/gcc', '-fPIC', '../a.c', '-c', '-o', 'a.c.0.o'] -[2/8] c: b.c -> build/b.c.1.o -12:36:17 runner ['/usr/bin/gcc', '../b.c', '-c', '-o', 'b.c.1.o'] -[3/8] c: c.c -> build/c.c.2.o -12:36:17 runner ['/usr/bin/gcc', '-fPIC', '../c.c', '-c', '-o', 'c.c.2.o'] -[4/8] c: main.c -> build/main.c.3.o -12:36:17 runner ['/usr/bin/gcc', '../main.c', '-c', '-o', 'main.c.3.o'] -[5/8] cstlib: build/b.c.1.o -> build/liblib2.a -12:36:17 runner ['/usr/bin/ar', 'rcs', 'liblib2.a', 'b.c.1.o'] -[6/8] cshlib: build/a.c.0.o -> build/liblib1.so -12:36:17 runner ['/usr/bin/gcc', 'a.c.0.o', '-o', 'liblib1.so', '-shared'] -[7/8] cshlib: build/c.c.2.o -> build/liblib3.so -12:36:17 runner ['/usr/bin/gcc', 'c.c.2.o', '-o', 'liblib3.so', '-Wl,-Bstatic', '-L.', '-llib2', '-Wl,-Bdynamic', '-L.', '-llib1', '-shared'] -[8/8] cprogram: build/main.c.3.o -> build/app -12:36:17 runner ['/usr/bin/gcc', 'main.c.3.o', '-o', 'app', '-Wl,-Bdynamic', '-L.', '-llib1', '-llib3'] -Waf: Leaving directory `/tmp/cprog_propagation/build' -'build' finished successfully (0.144s) ---------------- - -To sum up the two most important aspects of the 'use' attribute: - -. The task generators may be created in any order and in different files, but must provide a unique name for the 'use' attribute -. The 'use' processing will iterate recursively over all the task generators involved, but the flags added depend on the target kind (shared/static libraries) - -==== Special local libraries - -===== Includes folders - -The use keywork may point at special libraries that do not actually declare a target. For example, header-only libraries are commonly used to add specific include paths to several targets: - -// cprog_incdirs -[source,python] ---------------- -def build(bld): - bld( - includes = '. src', - export_includes = 'src', <1> - name = 'com_includes') - - bld.stlib( - source = 'a.c', - target = 'shlib1', - use = 'com_includes') <2> - - bld.program( - source = 'main.c', - target = 'app', - use = 'shlib1', <3> - ) ---------------- - -<1> The 'includes' attribute is private, but 'export_includes' will be used by other task generators -<2> The paths added are relative to the other task generator -<3> The 'export_includes' will be propagated to other task generators - -===== Object files - -Here is how to enable specific compilation flags for particular files: - -// cprog_objects -[source,python] ---------------- -def build(bld): - bld.objects( <1> - source = 'test.c', - cflags = '-O3', - target = 'my_objs') - - bld.shlib( - source = 'a.c', - cflags = '-O2', <2> - target = 'lib1', - use = 'my_objs') <3> - - bld.program( - source = 'main.c', - target = 'test_c_program', - use = 'lib1') <4> ---------------- - -<1> Files will be compiled in c mode, but no program or library will be produced -<2> Different compilation flags may be used -<3> The objects will be added automatically in the link stage -<4> There is no object propagation to other programs or libraries to avoid duplicate symbol errors - -WARNING: Like static libraries, object files are often abused to copy-paste binary code. Try to minimize the executables size by using shared libraries whenever possible. - -===== Fake libraries - -Local libraries will trigger a recompilation whenever they change. The methods 'read_shlib' and 'read_stlib' can be used to add this behaviour to external libraries or to binary files present in the project. - -// cprog_fakelibs -[source,python] ---------------- -def build(bld): - bld.read_shlib('m', paths=['.', '/usr/lib64']) - bld.program(source='main.c', target='app', use='m') ---------------- - -The methods will try to find files such as 'libm.so' or 'libm.dll' in the specified paths to compute the required paths and dependencies. In this example, the target 'app' will be re-created whenever '/usr/lib64/libm.so' changes. These libraries are propagated between task generators just like shared or static libraries declared locally. - -==== Foreign libraries and flags - -When an element in the attribute 'use' does not match a local library, it is assumed that it represents a system library, and the the required flags are present in the configuration set 'env'. This system enables the addition of several compilation and link flags at once, as in the following example: - -// cprog_system -[source,python] ---------------- -import sys - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - conf.env.INCLUDES_TEST = ['/usr/include'] <1> - - if sys.platform != 'win32': <2> - conf.env.DEFINES_TEST = ['TEST'] - conf.env.CFLAGS_TEST = ['-O0'] <3> - conf.env.LIB_TEST = ['m'] - conf.env.LIBPATH_TEST = ['/usr/lib'] - conf.env.LINKFLAGS_TEST = ['-g'] - conf.env.INCLUDES_TEST = ['/opt/gnome/include'] - -def build(bld): - mylib = bld.stlib( - source = 'test_staticlib.c', - target = 'teststaticlib', - use = 'TEST') <4> - - if mylib.env.CC_NAME == 'gcc': - mylib.cxxflags = ['-O2'] <5> ---------------- - -<1> For portability reasons, it is recommended to use INCLUDES instead of giving flags of the form -I/include. Note that the INCLUDES use used by both c and c++ -<2> Variables may be left undefined in platform-specific settings, yet the build scripts will remain identical. -<3> Declare a few variables during the configuration, the variables follow the convention VAR_NAME -<4> Add all the VAR_NAME corresponding to the _use variable_ NAME, which is 'TEST' in this example -<5> 'Model to avoid': setting the flags and checking for the configuration should be performed in the configuration section - -The variables used for C/C++ are the following: - -.Use variables and task generator attributes for C/C++ -[options="header",cols="1,1,3"] -|================= -|Uselib variable | Attribute | Usage -|LIB |lib | list of sharedlibrary names to use, without prefix or extension -|LIBPATH |libpath | list of search path for shared libraries -|STLIB |stlib | list of static library names to use, without prefix or extension -|STLIBPATH|stlibpath| list of search path for static libraries -|LINKFLAGS|linkflags| list of link flags (use other variables whenever possible) -|RPATH |rpath | list of paths to hard-code into the binary during linking time -|CFLAGS |cflags | list of compilation flags for c files -|CXXFLAGS |cxxflags | list of compilation flags for c++ files -|DFLAGS |dflags | list of compilation flags for d files -|INCLUDES |includes | include paths -|CXXDEPS | | a variable/list to trigger c++ file recompilations when it changes -|CCDEPS | | same as above, for c -|LINKDEPS | | same as above, for the link tasks -|DEFINES |defines | list of defines in the form [`key=value', ...] -|FRAMEWORK|framework| list of frameworks to use -|FRAMEWORKPATH|frameworkpath| list of framework paths to use -|ARCH |arch | list of architectures in the form ['ppc', 'x86'] -|================= - -The variables may be left empty for later use, and will not cause errors. During the development, the configuration cache files (for example, _cache.py) may be modified from a text editor to try different configurations without forcing a whole project reconfiguration. The files affected will be rebuilt however. - -=== Configuration helpers - -==== Configuration tests - -The method 'check' is used to detect parameters using a small build project. The main parameters are the following - -. msg: title of the test to execute -. okmsg: message to display when the test succeeds -. errmsg: message to display when the test fails -. env: environment to use for the build (conf.env is used by default) -. compile_mode: 'cc' or 'cxx' -. define_name: add a define for the configuration header when the test succeeds (in most cases it is calculated automatically) - -The errors raised are instances of 'waflib.Errors.ConfigurationError'. There are no return codes. - -Besides the main parameters, the attributes from c/c++ task generators may be used. Here is a concrete example: - -// cprog_conf -[source,python] ---------------- -def configure(conf): - - conf.check(header_name='time.h', features='c cprogram') <1> - conf.check_cc(function_name='printf', header_name="stdio.h", mandatory=False) <2> - conf.check_cc(fragment='int main() {2+2==4;}\n', define_name="boobah") <3> - conf.check_cc(lib='m', cflags='-Wall', defines=['var=foo', 'x=y'], - uselib_store='M') <4> - conf.check_cxx(lib='linux', use='M', cxxflags='-O2') <5> - - conf.check_cc(fragment=''' - #include - int main() { printf("4"); return 0; } ''', - define_name = "booeah", - execute = True, - define_ret = True, - msg = "Checking for something") <6> - - conf.check(features='c', fragment='int main(){return 0;}') <7> - - conf.write_config_header('config.h') <8> ---------------- - -<1> Try to compile a program using the configuration header time.h, if present on the system, if the test is successful, the define HAVE_TIME_H will be added -<2> Try to compile a program with the function printf, adding the header stdio.h (the header_name may be a list of additional headers). All configuration tests are required by default (@conf methods) and will raise configuration exceptions. To conceal them, set the attribute 'mandatory' to False. -<3> Try to compile a piece of code, and if the test is successful, define the name boobah -<4> Modifications made to the task generator environment are not stored. When the test is successful and when the attribute uselib_store is provided, the names lib, cflags and defines will be converted into _use variables_ LIB_M, CFLAGS_M and DEFINES_M and the flag values are added to the configuration environment. -<5> Try to compile a simple c program against a library called 'linux', and reuse the previous parameters for libm by _use_ -<6> Execute a simple program, collect the output, and put it in a define when successful -<7> The tests create a build with a single task generator. By passing the 'features' attribute directly it is possible to disable the compilation or to create more complicated configuration tests. -<8> After all the tests are executed, write a configuration header in the build directory (optional). The configuration header is used to limit the size of the command-line. - -Here is an example of a +config.h+ produced with the previous test code: - -[source,c] ---------------- -/* Configuration header created by Waf - do not edit */ -#ifndef _CONFIG_H_WAF -#define _CONFIG_H_WAF - -#define HAVE_PRINTF 1 -#define HAVE_TIME_H 1 -#define boobah 1 -#define booeah "4" - -#endif /* _CONFIG_H_WAF */ ---------------- - -The file +_cache.py+ will contain the following variables: - -[source,python] ---------------- -DEFINES_M = ['var=foo', 'x=y'] -CXXFLAGS_M = ['-Wall'] -CFLAGS_M = ['-Wall'] -LIB_M = ['m'] -boobah = 1 -booeah = '4' -defines = {'booeah': '"4"', 'boobah': 1, 'HAVE_TIME_H': 1, 'HAVE_PRINTF': 1} -dep_files = ['config.h'] -waf_config_files = ['/compilation/waf/demos/adv/build/config.h'] ---------------- - -==== Advanced tests - -The methods 'conf.check' create a build context and a task generator internally. This means that the attributes 'includes', 'defines', 'cxxflags' may be used (not all shown here). Advanced tests may be created by passing feature arguments: - -// cprog_cfg_advanced -[source,python] ---------------- -from waflib.TaskGen import feature, before_method - -@feature('special_test') -@before_method('process_source') -def my_special_test(self): - self.bld(rule='touch ${TGT}', target='foo') <1> - self.bld(rule='cp ${SRC} ${TGT}', source='foo', target='bar') - self.source = [] <2> - -def configure(conf): - conf.check_cc(features='special_test', msg='my test!') <3> ---------------- - -<1> Create a task generator from another task generator -<2> Disable the compilation of +test.c+ by setting no source files -<3> Use the feature special_test - -==== Creating configuration headers - -Adding lots of command-line define values increases the size of the command-line and makes it harder to review the flags when errors occur. Besides that, the defines passed on the command-line may fail unexpectedly with different compilers and command execution contexts. For example, define values containing quotes may be misinterpreted in Visual Studio response files. It is therefore a best practice to use configuration headers whenever possible. - -Writing configuration headers can be performed using the following methods: - -[source,python] ---------------- -def configure(conf): - conf.define('NOLIBF', 1) - conf.undefine('NOLIBF') - conf.define('LIBF', 1) - conf.define('LIBF_VERSION', '1.0.2') - conf.write_config_header('config.h') ---------------- - -The code snipped will produce the following 'config.h' in the build directory: - -[source,shishell] ---------------- -build/ -|-- c4che -| |-- build.config.py -| `-- _cache.py -|-- config.log -`-- config.h ---------------- - -The contents of the config.h for this example are: - -[source,c] ---------------- -/* Configuration header created by Waf - do not edit */ -#ifndef _CONFIG_H_WAF -#define _CONFIG_H_WAF - -/* #undef NOLIBF */ -#define LIBF 1 -#define LIBF_VERSION "1.0.2" - -#endif /* _CONFIG_H_WAF */ ---------------- - -NOTE: By default, the defines are moved from the command-line into the configuration header. This means that the attribute _conf.env.DEFINE_ is cleared by this operation. To prevent this behaviour, use 'conf.write_config_header(remove=False)' - -==== Pkg-config - -Instead of duplicating the configuration detection in all dependent projects, configuration files may be written when libraries are installed. To ease the interaction with build systems based on Make (cannot query databases or apis), small applications have been created for reading the cache files and to interpret the parameters (with names traditionally ending in '-config'): http://pkg-config.freedesktop.org/wiki/[pkg-config], wx-config, sdl-config, etc. - -The method 'check_cfg' is provided to ease the interaction with these applications. Here are a few examples: - -// cprog_pkgconfig -[source,python] ---------------- -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - - conf.check_cfg(atleast_pkgconfig_version='0.0.0') <1> - pango_version = conf.check_cfg(modversion='pango') <2> - - conf.check_cfg(package='pango') <3> - conf.check_cfg(package='pango', uselib_store='MYPANGO', - args=['--cflags', '--libs']) <4> - - conf.check_cfg(package='pango', <5> - args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'], - msg="Checking for 'pango 0.1.0'") <6> - - conf.check_cfg(path='sdl-config', args='--cflags --libs', - package='', uselib_store='SDL') <7> - conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', - package='', uselib_store='OPEN_MPI', mandatory=False) <8> ---------------- - -<1> Check for the pkg-config version -<2> Retrieve the module version for a package as a string. If there were no errors, 'PANGO_VERSION' is defined. It can be overridden with the attribute _uselib_store='MYPANGO'_. -<3> Check if the pango package is present, and define _HAVE_PANGO_ (calculated automatically from the package name) -<4> Beside defining _HAVE_MYPANGO_, extract and store the relevant flags to the _use variable_ MYPANGO (_LIB_MYPANGO_, _LIBPATH_MYPANGO_, etc) -<5> Like the previous test, but with pkg-config clauses to enforce a particular version number -<6> Display a custom message on the output. The attributes 'okmsg' and 'errmsg' represent the messages to display in case of success and error respectively -<7> Obtain the flags for sdl-config. The example is applicable for other configuration programs such as wx-config, pcre-config, etc -<8> Suppress the configuration error which is raised whenever the program to execute is not found or returns a non-zero exit status - -Due to the amount of flags, the lack of standards between config applications, and to the compiler-dependent flags (-I for gcc, /I for msvc), the pkg-config output is parsed before setting the corresponding _use variables_ in a go. The function 'parse_flags(line, uselib, env)' in the Waf module c_config.py performs the flag extraction. - -The outputs are written in the build directory into the file 'config.log': - -[source,shishell] ------------------- -# project configured on Tue Aug 31 17:30:21 2010 by -# waf 1.8.8 (abi 98, python 20605f0 on linux2) -# using /home/waf/bin/waf configure -# ---- -Setting top to -/disk/comp/waf/docs/book/examples/cprog_pkgconfig ---- -Setting out to -/disk/comp/waf/docs/book/examples/cprog_pkgconfig/build ---- -Checking for program pkg-config -/usr/bin/pkg-config -find program=['pkg-config'] paths=['/usr/local/bin', '/usr/bin'] var='PKGCONFIG' -> '/usr/bin/pkg-config' ---- -Checking for pkg-config version >= 0.0.0 -['/usr/bin/pkg-config', '--atleast-pkgconfig-version=0.0.0'] -yes -['/usr/bin/pkg-config', '--modversion', 'pango'] -out: 1.28.0 - ---- -Checking for pango -['/usr/bin/pkg-config', 'pango'] -yes ---- -Checking for pango -['/usr/bin/pkg-config', 'pango'] -yes ---- -Checking for pango 0.1.0 -['/usr/bin/pkg-config', 'pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs', 'pango'] -out: -pthread -I/usr/include/pango-1.0 -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include - -pthread -lpango-1.0 -lgobject-2.0 -lgmodule-2.0 -lgthread-2.0 -lrt -lglib-2.0 - -yes ---- -Checking for sdl-config -['sdl-config', '--cflags', '--libs'] -out: -I/usr/include/SDL -D_GNU_SOURCE=1 -D_REENTRANT --L/usr/lib64 -lSDL -lpthread - -yes ---- -Checking for mpicc -['mpicc', '--showme:compile', '--showme:link'] -out: -pthread libtool: link: -pthread -L/usr/lib64 -llammpio -llamf77mpi -lmpi -llam -lutil -ldl ------------------- - -After such a configuration, the configuration set contents will be similar to the following: - -[source,python] ---------------- -'CFLAGS_OPEN_MPI' ['-pthread'] -'CFLAGS_PANGO' ['-pthread'] -'CXXFLAGS_OPEN_MPI' ['-pthread'] -'CXXFLAGS_PANGO' ['-pthread'] -'DEFINES' ['HAVE_PANGO=1', 'HAVE_MYPANGO=1', 'HAVE_SDL=1', 'HAVE_OPEN_MPI=1'] -'DEFINES_SDL' ['_GNU_SOURCE=1', '_REENTRANT'] -'INCLUDES_PANGO' ['/usr/include/pango-1.0', '/usr/include/glib-2.0', '/usr/lib64/glib-2.0/include'] -'INCLUDES_SDL' ['/usr/include/SDL'] -'LIBPATH_OPEN_MPI' ['/usr/lib64'] -'LIBPATH_SDL' ['/usr/lib64'] -'LIB_OPEN_MPI' ['lammpio', 'lamf77mpi', 'mpi', 'lam', 'util', 'dl'] -'LIB_PANGO' ['pango-1.0', 'gobject-2.0', 'gmodule-2.0', 'gthread-2.0', 'rt', 'glib-2.0'] -'LIB_SDL' ['SDL', 'pthread'] -'LINKFLAGS_OPEN_MPI' ['-pthread'] -'LINKFLAGS_PANGO' ['-pthread'] -'PKGCONFIG' '/usr/bin/pkg-config' -'PREFIX' '/usr/local' -'define_key' ['HAVE_PANGO', 'HAVE_MYPANGO', 'HAVE_SDL', 'HAVE_OPEN_MPI'] ---------------- - diff --git a/docs/book/dag_nodes.dot b/docs/book/dag_nodes.dot deleted file mode 100644 index 3f37f81ff7..0000000000 --- a/docs/book/dag_nodes.dot +++ /dev/null @@ -1,12 +0,0 @@ -digraph G { - - foo [label="foo.txt",fillcolor="#aef9a5",style=filled] - bar [label="bar.txt",fillcolor="#aef9a5",style=filled] - wscript [label="wscript",fillcolor="#aef9a5",style=filled] - foobar [label="foobar.txt",fillcolor="#aef9a5",style=filled] - wscript -> foo; - wscript -> bar; - foo -> foobar; - bar -> foobar; -} - diff --git a/docs/book/dag_tasks.dot b/docs/book/dag_tasks.dot deleted file mode 100644 index cf0ec0babe..0000000000 --- a/docs/book/dag_tasks.dot +++ /dev/null @@ -1,9 +0,0 @@ -digraph G { - - A [label="cp: wscript -> foo.txt",fillcolor="#fffea6",style=filled] - B [label="cp: wscript -> bar.txt",fillcolor="#fffea6",style=filled] - C [label="cat: foo.txt, bar.txt -> foobar.txt",fillcolor="#fffea6",style=filled] - A -> C; - B -> C; -} - diff --git a/docs/book/default.style b/docs/book/default.style deleted file mode 100644 index 712f0c2f0d..0000000000 --- a/docs/book/default.style +++ /dev/null @@ -1,76 +0,0 @@ -bgcolor "white"; // the background color for documents -context gray; // the color for context lines (when specified with line ranges) - -normal black ; -keyword darkblue; // for language keywords -type darkgreen ; // for basic types -usertype teal ; // for user defined types -string darkgreen ; // for strings and chars -regexp orange f ; // for strings and chars -specialchar pink f ; // for special chars, e.g., \n, \t, \\ -comment gray; // for comments -number black ; // for literal numbers -preproc teal b; // for preproc directives (e.g. #include, import) -symbol black ; // for simbols (e.g. <, >, +) -function black; // for function calls and declarations -cbracket black; // for block brackets (e.g. {, }) -todo bg:cyan b; // for TODO and FIXME -code bg:brightgreen b; // for code snippets - -//Predefined variables and functions (for instance glsl) -predef_var darkblue ; -predef_func darkblue b ; - -// for OOP -classname teal ; // for class names, e.g., in Java and C++ - -// line numbers -linenum black f; - -// Internet related -url blue u, f; - -// other elements for ChangeLog and Log files -date blue b ; -time, file darkblue b ; -ip, name darkgreen ; - -// for Prolog, Perl... -variable darkgreen ; - -// explicit for Latex -italics darkgreen i; -bold darkgreen b; -underline darkgreen u; -fixed green f; -argument darkgreen; -optionalargument purple b; -math orange; -bibtex blue; - -// for diffs -oldfile orange; -newfile darkgreen; -difflines blue; - -// for css -selector purple; -property blue; -value darkgreen i; - -// for oz -atom orange; -meta i; - -// for file system -path orange; - -// for C (or other language) labels -label teal b; - -// for errors -error purple; -warning darkgreen; - - - diff --git a/docs/book/dev.eps b/docs/book/dev.eps deleted file mode 100644 index c945bc2f39..0000000000 --- a/docs/book/dev.eps +++ /dev/null @@ -1,719 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: inkscape 0.46 -%%Pages: 1 -%%Orientation: Portrait -%%BoundingBox: 20 6 469 366 -%%HiResBoundingBox: 20.371133 6.367207 468.24456 365.78906 -%%EndComments -%%BeginSetup -%%EndSetup -%%Page: 1 1 -0 384 translate -0.8 -0.8 scale -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -gsave [1 0 0 1 0 0] concat -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 415.4 moveto -80.9 415.4 lineto -579.2 415.4 moveto -570.2 415.4 lineto -stroke -gsave [1 0 0 1 63.6 419.9] concat -gsave [1 0 0 1 -6.6737 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/newlatin1font {findfont dup length dict copy dup /Encoding ISOLatin1Encoding put definefont} def -/ArialMT-ISOLatin1 /ArialMT newlatin1font -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(0) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 345.9 moveto -80.9 345.9 lineto -579.2 345.9 moveto -570.2 345.9 lineto -stroke -gsave [1 0 0 1 63.6 350.4] concat -gsave [1 0 0 1 -6.6737 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(1) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 276.4 moveto -80.9 276.4 lineto -579.2 276.4 moveto -570.2 276.4 lineto -stroke -gsave [1 0 0 1 63.6 280.9] concat -gsave [1 0 0 1 -6.6737 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(2) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 206.9 moveto -80.9 206.9 lineto -579.2 206.9 moveto -570.2 206.9 lineto -stroke -gsave [1 0 0 1 63.6 211.4] concat -gsave [1 0 0 1 -6.6737 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(3) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 137.4 moveto -80.9 137.4 lineto -579.2 137.4 moveto -570.2 137.4 lineto -stroke -gsave [1 0 0 1 63.6 141.9] concat -gsave [1 0 0 1 -6.6737 0.0004882812] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(4) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 67.9 moveto -80.9 67.9 lineto -579.2 67.9 moveto -570.2 67.9 lineto -stroke -gsave [1 0 0 1 63.6 72.4] concat -gsave [1 0 0 1 -6.6737 0.0004882812] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(5) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 422.4 moveto -71.9 413.4 lineto -71.9 54 moveto -71.9 63 lineto -stroke -gsave [1 0 0 1 71.9 444.9] concat -gsave [1 0 0 1 -3.337 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(0) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -122.6 422.4 moveto -122.6 413.4 lineto -122.6 54 moveto -122.6 63 lineto -stroke -gsave [1 0 0 1 122.6 444.9] concat -gsave [1 0 0 1 -3.3368 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(2) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -173.4 422.4 moveto -173.4 413.4 lineto -173.4 54 moveto -173.4 63 lineto -stroke -gsave [1 0 0 1 173.4 444.9] concat -gsave [1 0 0 1 -3.337 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(4) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -224.1 422.4 moveto -224.1 413.4 lineto -224.1 54 moveto -224.1 63 lineto -stroke -gsave [1 0 0 1 224.1 444.9] concat -gsave [1 0 0 1 -3.3368 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(6) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -274.8 422.4 moveto -274.8 413.4 lineto -274.8 54 moveto -274.8 63 lineto -stroke -gsave [1 0 0 1 274.8 444.9] concat -gsave [1 0 0 1 -3.3372 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(8) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -325.6 422.4 moveto -325.6 413.4 lineto -325.6 54 moveto -325.6 63 lineto -stroke -gsave [1 0 0 1 325.6 444.9] concat -gsave [1 0 0 1 -6.6742 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(10) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -376.3 422.4 moveto -376.3 413.4 lineto -376.3 54 moveto -376.3 63 lineto -stroke -gsave [1 0 0 1 376.3 444.9] concat -gsave [1 0 0 1 -6.6741 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(12) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -427 422.4 moveto -427 413.4 lineto -427 54 moveto -427 63 lineto -stroke -gsave [1 0 0 1 427 444.9] concat -gsave [1 0 0 1 -6.6738 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(14) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -477.7 422.4 moveto -477.7 413.4 lineto -477.7 54 moveto -477.7 63 lineto -stroke -gsave [1 0 0 1 477.7 444.9] concat -gsave [1 0 0 1 -6.6736 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(16) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -528.5 422.4 moveto -528.5 413.4 lineto -528.5 54 moveto -528.5 63 lineto -stroke -gsave [1 0 0 1 528.5 444.9] concat -gsave [1 0 0 1 -6.6738 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(18) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -579.2 422.4 moveto -579.2 413.4 lineto -579.2 54 moveto -579.2 63 lineto -stroke -gsave [1 0 0 1 579.2 444.9] concat -gsave [1 0 0 1 -6.6736 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(20) show -grestore -grestore -grestore -grestore -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 54 moveto -71.9 422.4 lineto -579.2 422.4 lineto -579.2 54 lineto -71.9 54 lineto -closepath -stroke -gsave [0 -1 1 0 34.2 238.2] concat -gsave [1 0 0 1 -89.054 0.0002441406] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(Amount of jobs running in parallel) show -grestore -grestore -grestore -grestore -gsave [1 0 0 1 325.5 471.9] concat -gsave [1 0 0 1 -43.4619 0.0003662109] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(Time in seconds) show -grestore -grestore -grestore -grestore -gsave [1 0 0 1 325.5 31.5] concat -gsave [1 0 0 1 -112.3887 0] concat -gsave [1 0 0 -1 0 0] concat -gsave -/ArialMT-ISOLatin1 findfont -12 scalefont -setfont -0 0 0 setrgbcolor -newpath -0 0 moveto -(Amount of jobs running in parallel \(waf -j5\)) show -grestore -grestore -grestore -grestore -0 0.50196081 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 415.4 moveto -74.4 415.4 lineto -77 415.4 lineto -79.5 415.4 lineto -82.1 415.4 lineto -84.6 415.4 lineto -87.2 415.4 lineto -90 67.9 lineto -92.5 67.9 lineto -95.1 67.9 lineto -97.7 67.9 lineto -100.2 67.9 lineto -102.9 137.4 lineto -106.1 137.4 lineto -108.7 67.9 lineto -111.3 67.9 lineto -114.6 67.9 lineto -117.1 67.9 lineto -119.7 206.9 lineto -122.2 67.9 lineto -126.1 137.4 lineto -128.6 67.9 lineto -131.6 137.4 lineto -134.1 67.9 lineto -136.9 137.4 lineto -140.1 206.9 lineto -143 67.9 lineto -145.9 67.9 lineto -148.4 206.9 lineto -151 67.9 lineto -154 67.9 lineto -156.8 67.9 lineto -159.7 67.9 lineto -162.9 67.9 lineto -165.6 67.9 lineto -168.2 67.9 lineto -170.7 67.9 lineto -173.5 67.9 lineto -176.1 67.9 lineto -178.7 67.9 lineto -181.2 67.9 lineto -184.4 67.9 lineto -187 67.9 lineto -189.5 67.9 lineto -192.1 67.9 lineto -195.7 67.9 lineto -198.4 67.9 lineto -201.6 67.9 lineto -204.2 206.9 lineto -206.9 137.4 lineto -209.7 67.9 lineto -212.6 137.4 lineto -215.3 206.9 lineto -218.5 137.4 lineto -221 67.9 lineto -223.7 67.9 lineto -226.2 137.4 lineto -228.7 67.9 lineto -231.3 206.9 lineto -233.9 67.9 lineto -236.4 67.9 lineto -239.4 67.9 lineto -242.4 137.4 lineto -245 67.9 lineto -247.6 67.9 lineto -250.1 137.4 lineto -252.7 67.9 lineto -255.2 67.9 lineto -257.8 67.9 lineto -260.3 67.9 lineto -262.9 67.9 lineto -266.1 67.9 lineto -268.7 67.9 lineto -271.8 67.9 lineto -274.5 67.9 lineto -277.4 67.9 lineto -280 67.9 lineto -282.5 67.9 lineto -285 67.9 lineto -288.1 67.9 lineto -290.7 67.9 lineto -294 67.9 lineto -296.5 67.9 lineto -299.1 67.9 lineto -301.7 137.4 lineto -304.2 67.9 lineto -307.3 67.9 lineto -310 137.4 lineto -312.6 137.4 lineto -315.1 67.9 lineto -318.1 67.9 lineto -320.701 67.9 lineto -323.201 67.9 lineto -325.801 137.4 lineto -328.402 67.9 lineto -331.601 67.9 lineto -334.202 415.4 lineto -336.702 415.4 lineto -339.302 415.4 lineto -341.802 415.4 lineto -344.302 415.4 lineto -346.903 415.4 lineto -349.403 67.9 lineto -352.003 67.9 lineto -354.503 67.9 lineto -357.103 67.9 lineto -360.303 67.9 lineto -363.203 137.4 lineto -365.803 137.4 lineto -368.404 67.9 lineto -371.704 137.4 lineto -374.204 67.9 lineto -376.904 137.4 lineto -379.404 67.9 lineto -382.204 67.9 lineto -385.004 67.9 lineto -387.704 67.9 lineto -390.404 67.9 lineto -392.904 67.9 lineto -395.504 67.9 lineto -399.004 67.9 lineto -401.504 67.9 lineto -404.104 137.4 lineto -408.304 137.4 lineto -410.804 67.9 lineto -413.604 67.9 lineto -416.104 67.9 lineto -418.705 67.9 lineto -421.205 67.9 lineto -423.805 67.9 lineto -426.906 67.9 lineto -429.406 67.9 lineto -432.006 67.9 lineto -434.706 276.4 lineto -437.306 67.9 lineto -439.806 67.9 lineto -442.407 67.9 lineto -445.407 67.9 lineto -447.907 276.4 lineto -450.507 67.9 lineto -453.507 67.9 lineto -456.507 67.9 lineto -459.107 67.9 lineto -462.007 67.9 lineto -464.507 206.9 lineto -467.607 206.9 lineto -470.408 137.4 lineto -473.008 67.9 lineto -476.208 67.9 lineto -478.708 67.9 lineto -481.808 67.9 lineto -484.508 67.9 lineto -487.108 67.9 lineto -490.209 67.9 lineto -492.709 67.9 lineto -495.309 67.9 lineto -498.109 67.9 lineto -501.109 67.9 lineto -503.71 67.9 lineto -506.609 67.9 lineto -509.509 137.4 lineto -512.009 67.9 lineto -514.509 67.9 lineto -518.309 137.4 lineto -521.41 67.9 lineto -524.01 67.9 lineto -526.71 67.9 lineto -529.41 206.9 lineto -532.21 67.9 lineto -534.71 137.4 lineto -537.31 67.9 lineto -539.911 206.9 lineto -542.511 276.4 lineto -545.111 67.9 lineto -547.611 67.9 lineto -550.712 137.4 lineto -553.412 137.4 lineto -stroke -0 0 0 setrgbcolor -[] 0 setdash -1 setlinewidth -0 setlinejoin -0 setlinecap -newpath -71.9 54 moveto -71.9 422.4 lineto -579.2 422.4 lineto -579.2 54 lineto -71.9 54 lineto -closepath -stroke -grestore -showpage -%%EOF diff --git a/docs/book/development.txt b/docs/book/development.txt deleted file mode 100644 index 8cb6031ec5..0000000000 --- a/docs/book/development.txt +++ /dev/null @@ -1,235 +0,0 @@ -== Using the development version - -A few notes on the waf development follow. - -=== Execution traces - -==== Logging - -The generic flags to add more information to the stack traces or to the messages is '-v' (verbosity), it is used to display the command-lines executed during a build: - -[source,shishell] ---------------- -$ waf -v ---------------- - -To display all the traces (useful for bug reports), use the following flag: - -[source,shishell] ---------------- -$ waf -vvv ---------------- - -Debugging information can be filtered easily with the flag 'zones': - -[source,shishell] ---------------- -$ waf --zones=action ---------------- - -Tracing zones must be comma-separated, for example: - -[source,shishell] ---------------- -$ waf --zones=action,envhash,task_gen ---------------- - -The Waf module 'Logs' replaces the Python module logging. In the source code, traces are provided by using the 'debug' function, they must obey the format "zone: message" like in the following: - -[source,python] ---------------- -Logs.debug("task: executing %r - it was never run before or its class changed" % self) ---------------- - -The following zones are used in Waf: - -.Debugging zones -[options="header",cols='1,5'] -|================= -|Zone | Description -|runner | command-lines executed (enabled when -v is provided without debugging zones) -|deps | implicit dependencies found (task scanners) -|task_gen| task creation (from task generators) and task generator method execution -|action | functions to execute for building the targets -|env | environment contents -|envhash | hashes of the environment objects - helps seeing what changes -|build | build context operations such as filesystem access -|preproc | preprocessor execution -|group | groups and task generators -|================= - -WARNING: Debugging information can be displayed only after the command-line has been parsed. For example, no debugging information will be displayed when a waf tool is being by for the command-line options 'opt.load()' or by the global init method function 'init.tool()' - -==== Build visualization - -The Waf tool named _parallel_debug_ is used to inject code in Waf modules and to obtain a detailed execution trace. This module is provided in the folder +waflib/extras+ and must be imported in one's project before use: - -[source,python] ---------------- -def options(ctx): - ctx.load('parallel_debug', tooldir='.') - -def configure(ctx): - ctx.load('parallel_debug', tooldir='.') - -def build(ctx): - bld(rule='touch ${TGT}', target='foo') ---------------- - -The execution will generate a diagram of the tasks executed during the build in the file +pdebug.svg+: - -image::pdebug{PIC}["Parallel execution diagram"{backend@docbook:,width=500:},align="center"] - -The details will be generated in the file +pdebug.dat+ as space-separated values. The file can be processed by other applications such as Gnuplot to obtain other diagrams: - -[source,shishell] ---------------- -#! /usr/bin/env gnuplot -set terminal png -set output "output.png" -set ylabel "Amount of active threads" -set xlabel "Time in seconds" -set title "Active threads on a parallel build (waf -j5)" -unset label -set yrange [-0.1:5.2] -set ytic 1 -plot 'pdebug.dat' using 3:7 with lines title "" lt 2 ---------------- - -image::dev{PIC}["Thread activity during the build"{backend@docbook:,width=410:},align="center"] - -The data file columns are the following: - -.pdebug file format -[options="header", cols="1,2,6"] -|================= -|Column | Type | Description -|1 |int| Identifier of the thread which has started or finished processing a task -|2 |int| Identifier of the task processed -|3 |float| Event time -|4 |string| Type of the task processed -|5 |int| Amount of tasks processed -|6 |int| Amount of tasks waiting to be processed by the task consumers -|7 |int| Amount of active threads -|================= - - -=== Profiling - -==== Benchmark projects - -The script +utils/genbench.py+ is used as a base to create large c-like project files. The habitual use is the following: - -[source,shishell] ---------------- -$ utils/genbench.py /tmp/build 50 100 15 5 -$ cd /tmp/build -$ waf configure -$ waf -p -j2 ---------------- - -The C++ project created will generate 50 libraries from 100 class files for each, each source file having 15 include headers pointing at the same library and 5 headers pointing at other headers randomly chosen. - -The compilation time may be discarded easily by disabling the actual compilation, for example: - -[source,python] ---------------- -def build(bld): - from waflib import Task - def touch_func(task): - for x in task.outputs: - x.write('') - for x in Task.TaskBase.classes.keys(): - cls = Task.TaskBase.classes[x] - cls.func = touch_func - cls.color = 'CYAN' ---------------- - -==== Profile traces - -Profiling information is obtained by calling the module cProfile and by injecting specific code. The most interesting methods to profile is 'waflib.Build.BuildContext.compile'. The amount of function calls is usually a bottleneck, and reducing it results in noticeable speedups. Here is an example on the method compile: - -[source,python] ---------------- -from waflib.Build import BuildContext -def ncomp(self): - import cProfile, pstats - cProfile.runctx('self.orig_compile()', {}, {'self': self}, 'profi.txt') - p = pstats.Stats('profi.txt') - p.sort_stats('time').print_stats(45) - -BuildContext.orig_compile = BuildContext.compile -BuildContext.compile = ncomp ---------------- - -Here the output obtained on a benchmark build created as explained in the previous section: - -[source,shishell] ---------------- -Fri Jul 23 15:11:15 2010 profi.txt - - 1114979 function calls (1099879 primitive calls) in 5.768 CPU seconds - - Ordered by: internal time - List reduced from 139 to 45 due to restriction 45 - - ncalls tottime percall cumtime percall filename:lineno(function) - 109500 0.523 0.000 1.775 0.000 /comp/waf/waflib/Node.py:615(get_bld_sig) - 5000 0.381 0.000 1.631 0.000 /comp/waf/waflib/Task.py:475(compute_sig_implicit_deps) - 154550 0.286 0.000 0.286 0.000 {method 'update' of '_hashlib.HASH' objects} - 265350 0.232 0.000 0.232 0.000 {id} -40201/25101 0.228 0.000 0.228 0.000 /comp/waf/waflib/Node.py:319(abspath) - 10000 0.223 0.000 0.223 0.000 {open} - 20000 0.197 0.000 0.197 0.000 {method 'read' of 'file' objects} - 15000 0.193 0.000 0.349 0.000 /comp/waf/waflib/Task.py:270(uid) - 10000 0.189 0.000 0.850 0.000 /comp/waf/waflib/Utils.py:96(h_file) ---------------- - -A few known hot spots are present in the library: - -. The persistence implemented by the cPickle module (the cache file to serialize may take a few megabytes) -. Accessing configuration data from the Environment instances -. Computing implicit dependencies in general - -==== Optimizations tips - -The Waf source code has already been optimized in various ways. In practice, the projects may use additional assumptions to replace certain methods or parameters from its build scripts. For example, if a project is always executed on Windows, then the _framework_ and _rpath_ variables may be removed: - -[source,python] ---------------- -from waflib.Tools.ccroot import USELIB_VARS -USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = \ - set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'LINKDEPS']) ---------------- - -=== Waf programming - -==== Setting up a Waf directory for development - -Waf is hosted on https://github.com/waf-project/waf[Github], and uses Git for source control. To obtain the development copy, use: - -[source,shishell] ---------------- -$ git clone https://github.com/waf-project/waf.git wafdir -$ cd wafdir -$ ./waf-light --make-waf ---------------- - -To avoid regenerating Waf each time, the environment variable *WAFDIR* should be used to point at the directory containing _waflib_: - -[source,shishell] ---------------- -$ export WAFDIR=/path/to/directory/ ---------------- - -==== Specific guidelines - -Though Waf is written in Python, additional restrictions apply to the source code: - -. Identation is tab-only, and the maximum line length should be about 200 characters -. The development code is kept compatible with Python 2.6, and the code is processed so that it runs on Python 2.5 -. The _waflib_ modules must be insulated from the _Tools_ modules to keep the Waf core small and language independent -. Api compatibility is maintained in the cycle of a minor version (from 1.8.0 to 1.8.n) - -NOTE: More code always means more bugs. Whenever possible, unnecessary code must be removed, and the existing code base should be simplified. - diff --git a/docs/book/download.txt b/docs/book/download.txt deleted file mode 100644 index c4f04d03fa..0000000000 --- a/docs/book/download.txt +++ /dev/null @@ -1,152 +0,0 @@ -== Download and installation - -=== Obtaining the Waf file - -The Waf project is located on https://waf.io[waf.io]. -The current Waf version requires an interpreter for the Python programming language such as http://www.python.org[cPython] 2.5 to 3.4, http://pypy.org[Pypy] or http://www.jython.org[Jython] >= 2.5. - -==== Downloading and using the Waf binary - -The Waf binary is a python script which does not require any installation whatsoever. It may be executed directly from a writable folder. Just rename it as +waf+ if necessary: - -[source,shishell] ---------------- -$ wget http://ftp.waf.io/pub/release/waf-1.8.8 -$ mv waf-1.8.8 waf -$ python waf --version -waf 1.8.8 (54dc13ba5f51bfe2ae277451ec5ac1d0a91c7aaf) ---------------- - -The +waf+ file has its own library compressed in a binary stream in the same file. Upon execution, the library is uncompressed in a hidden folder in the current directory. The folder will be re-created if removed. This scheme enables different Waf versions to be executed from the same folders: - -[source,shishell] ---------------- -$ ls -ld .waf* -.waf-1.8.8-2c924e3f453eb715218b9cc852291170 ---------------- - -NOTE: The binary file requires http://docs.python.org/library/bz2.html[bzip2] compression support, which may be unavailable in some self-compiled cPython installations. - -==== Building Waf from the source code - -Building Waf requires a Python interpreter having a version number in the range 2.6-3.4. The source code is then processed to support Python 2.5. - -[source,shishell] ---------------- -$ wget http://ftp.waf.io/pub/release/waf-1.8.8.tar.bz2 -$ tar xjvf waf-1.8.8.tar.bz2 -$ cd waf-1.8.8 -$ python waf-light -Configuring the project -'build' finished successfully (0.001s) -Checking for program python : /usr/bin/python -Checking for python version : (2, 6, 5, 'final', 0) -'configure' finished successfully (0.176s) -Waf: Entering directory `/waf-1.8.8/build' -[1/1] create_waf: -> waf -Waf: Leaving directory `/waf-1.8.8/build' -'build' finished successfully (2.050s) ---------------- - -For older interpreters, it is possible to build the +waf+ file with gzip compression instead of bzip2: - -[source,shishell] ---------------- -$ python waf-light --zip-type=gz ---------------- - -The files present in the folder _waflib/extras_ represent extensions (Waf tools) that are in a testing phase. They may be added to the Waf binary by using the _--tools_ switch: - -[source,shishell] ---------------- -$ python waf-light --tools=compat15,swig,doxygen ---------------- - -The tool _compat15_ is required to provide some compatibility with previous Waf versions. -To remove it, it is necessary to modify the initialization by changing the _--prelude_ switch: - -[source,shishell] ---------------- -$ python waf-light --make-waf --prelude='' --tools=swig ---------------- - -Finally, here is how to import an external tool and load it in the initialization. Assuming the file `aba.py` is present in the current directory: - -[source,python] ---------------- -def foo(): - from waflib.Context import WAFVERSION - print("This is Waf %s" % WAFVERSION) ---------------- - -The following will create a custom waf file which will import and execute 'foo' whenever it is executed: - -[source,shishell] ---------------- -$ python waf-light --make-waf --tools=compat15,$PWD/aba.py - --prelude=$'\tfrom waflib.extras import aba\n\taba.foo()' -$ ./waf --help -This is Waf 1.8.8 -[...] ---------------- - -Foreign files to add into the folder 'extras' must be given by absolute paths in the _--tools_ switch. -Such files do not have to be Python files, yet, a typical scenario is to add an initializer to modify existing -functions and classes from the Waf modules. Various from the https://github.com/waf-project/waf/tree/master/build_system_kit/[build system kit] illustrate how to create custom -build systems derived from Waf. - -=== Using the Waf file - -==== Permissions and aliases - -Because the waf script is a python script, it is usually executed by calling +python+ on it: - -[source,shishell] ---------------- -$ python waf ---------------- - -On unix-like systems, it is usually much more convenient to set the executable permissions and avoid calling +python+ each time: - -[source,shishell] ---------------- -$ chmod 755 waf -$ ./waf --version -waf 1.8.8 (54dc13ba5f51bfe2ae277451ec5ac1d0a91c7aaf) ---------------- - -If the command-line interpreter supports aliases, it is recommended to set the alias once: - -[source,shishell] ---------------- -$ alias waf=$PWD/waf -$ waf --version -waf 1.8.8 (54dc13ba5f51bfe2ae277451ec5ac1d0a91c7aaf) ---------------- - -Or, the execution path may be modified to point at the location of the waf binary: - -[source,shishell] ---------------- -$ export PATH=$PWD:$PATH -$ waf --version -waf 1.8.8 (54dc13ba5f51bfe2ae277451ec5ac1d0a91c7aaf) ---------------- - -In the next sections of the book, we assume that either an alias or the execution path have been set in a way that +waf+ may be called directly. - -==== Local waflib folders - -Although the waf library is unpacked automatically from the waf binary file, it is sometimes necessary to keep the files in a visible folder, which may even be kept in a source control tool (subversion, git, etc). For example, the +waf-light+ script does not contain the waf library, yet it is used to create the +waf+ script by using the directory +waflib+. - -The following diagram represents the process used to find the +waflib+ directory: - -image::waflib{PIC}["Waflib discovery"{backend@docbook:,width=450:},align="center"] - - -==== Portability concerns - -By default, the recommended Python interpreter is cPython, for which the supported versions are 2.5 to 3.4. For maximum convenience for the user, a copy of the http://www.jython.org[Jython] interpreter (version >= 2.5) could be redistributed along with a copy of the Waf executable. - -WARNING: The 'waf' script must reside in a writable folder to unpack its cache files. - diff --git a/docs/book/examples/advbuild_cmdtool/some_tool.py b/docs/book/examples/advbuild_cmdtool/some_tool.py deleted file mode 100644 index aad90b7859..0000000000 --- a/docs/book/examples/advbuild_cmdtool/some_tool.py +++ /dev/null @@ -1,10 +0,0 @@ -#! /usr/bin/env python - -from waflib import Context - -def cnt(ctx): - """do something""" - print('just a test') - -Context.g_module.__dict__['cnt'] = cnt - diff --git a/docs/book/examples/advbuild_cmdtool/wscript b/docs/book/examples/advbuild_cmdtool/wscript deleted file mode 100644 index c2001ea70c..0000000000 --- a/docs/book/examples/advbuild_cmdtool/wscript +++ /dev/null @@ -1,17 +0,0 @@ -#! /usr/bin/env python - -""" -Execute -$ 'waf cnt' -the cnt context is defined in 'some_tool.py' -""" - -top = '.' -out = 'build' - -def options(opt): - opt.load('some_tool', tooldir='.') - -def configure(conf): - pass - diff --git a/docs/book/examples/advbuild_composition/wscript b/docs/book/examples/advbuild_composition/wscript deleted file mode 100644 index 1f6caf9f11..0000000000 --- a/docs/book/examples/advbuild_composition/wscript +++ /dev/null @@ -1,17 +0,0 @@ -#! /usr/bin/env python - -""" -Calling 'waf clean build' can be shortened to 'waf cleanbuild' -The cleanbuild command is defined below -""" - -def configure(ctx): - pass - -def build(ctx): - pass - -def cleanbuild(ctx): - from waflib import Options - Options.commands = ['clean', 'build'] + Options.commands - diff --git a/docs/book/examples/advbuild_confdata/wscript b/docs/book/examples/advbuild_confdata/wscript deleted file mode 100644 index 9ec249dda8..0000000000 --- a/docs/book/examples/advbuild_confdata/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -""" -the command 'foo' uses a build context subclass internally -try calling 'waf configure foo' -""" - -def configure(ctx): - ctx.env.FOO = 'some data' - -def build(ctx): - print('build command') - -def foo(ctx): - print(ctx.env.FOO) - -from waflib.Build import BuildContext -class one(BuildContext): - cmd = 'foo' - fun = 'foo' diff --git a/docs/book/examples/advbuild_subclass/wscript b/docs/book/examples/advbuild_subclass/wscript deleted file mode 100644 index 985076e362..0000000000 --- a/docs/book/examples/advbuild_subclass/wscript +++ /dev/null @@ -1,27 +0,0 @@ -#! /usr/bin/env python - -""" -Context commands usually derive from different classe. -The command 'foo' uses a normal context, while bar uses -a different class. Try executing -$ waf configure foo bar tak -""" - -def configure(ctx): - print(type(ctx)) - -def foo(ctx): - print(type(ctx)) - -def bar(ctx): - print(type(ctx)) - -from waflib.Context import Context - -class one(Context): - cmd = 'foo' - -class two(Context): - cmd = 'tak' - fun = 'bar' - diff --git a/docs/book/examples/advbuild_testcase/wscript b/docs/book/examples/advbuild_testcase/wscript deleted file mode 100644 index c2f8130aeb..0000000000 --- a/docs/book/examples/advbuild_testcase/wscript +++ /dev/null @@ -1,36 +0,0 @@ -#! /usr/bin/env python - -""" -Commands may aggregate other commands - -Try for example -$ waf configure -$ waf test -""" - -def options(ctx): - ctx.load('compiler_c') - -def configure(ctx): - ctx.load('compiler_c') - -def setup(ctx): - n = ctx.path.make_node('main.c') - n.write('#include "foo.h"\nint main() {return 0;}\n') - - global v - m = ctx.path.make_node('foo.h') - m.write('int k = %d;\n' % v) - v += 1 - -def build(ctx): - ctx.program(source='main.c', target='app') - -def test(ctx): - global v - v = 12 - - from waflib import Options - lst = ['configure', 'setup', 'build', 'setup', 'build'] - Options.commands = lst + Options.commands - diff --git a/docs/book/examples/advbuild_variant/wscript b/docs/book/examples/advbuild_variant/wscript deleted file mode 100644 index 3a102ba5f5..0000000000 --- a/docs/book/examples/advbuild_variant/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -""" -Typical projects are build by using: -$ waf configure build -Other build commands may build the same project in a different output directory -$ waf configure debug -""" - -def configure(ctx): - pass - -def build(ctx): - ctx(rule='touch ${TGT}', target=ctx.cmd + '.txt') - -from waflib.Build import BuildContext -class debug(BuildContext): - cmd = 'debug' - variant = 'debug' - diff --git a/docs/book/examples/advbuild_variant_env/wscript b/docs/book/examples/advbuild_variant_env/wscript deleted file mode 100644 index abcc7104a2..0000000000 --- a/docs/book/examples/advbuild_variant_env/wscript +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2010 (ita) - -""" -Define build commands for several variants at once -Try executing -$ waf clean_debug build_debug clean_release build_release -""" - -VERSION='0.0.1' -APPNAME='cc_test' - -top = '.' -out = 'build' - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.setenv('debug') - conf.load('compiler_c') - conf.env.CFLAGS = ['-g'] - - conf.setenv('release') - conf.load('compiler_c') - conf.env.CFLAGS = ['-O2'] - -def build(bld): - if not bld.variant: - bld.fatal('call "waf build_debug" or "waf build_release", and try "waf --help"') - bld.program(source='main.c', target='app', includes='.') - -from waflib.Build import BuildContext, CleanContext, \ - InstallContext, UninstallContext - -for x in 'debug release'.split(): - for y in (BuildContext, CleanContext, InstallContext, UninstallContext): - name = y.__name__.replace('Context','').lower() - class tmp(y): - cmd = name + '_' + x - variant = x - diff --git a/docs/book/examples/advbuild_waflock/wscript b/docs/book/examples/advbuild_waflock/wscript deleted file mode 100644 index 74f5ed4aa2..0000000000 --- a/docs/book/examples/advbuild_waflock/wscript +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python - -""" -Here we do not define the top or the out folders -The build directory may be controlled by setting -the WAFLOCK variable with the name convention .lock-wafNAME -where NAME will be the build directory - -WAFLOCK=.lock-waffoo waf configure build -WAFLOCK=.lock-wafbar waf configure build -""" - -def configure(conf): - pass - -def build(bld): - bld(rule='touch ${TGT}', target='foo.txt') - diff --git a/docs/book/examples/architecture_link/faa.ext b/docs/book/examples/architecture_link/faa.ext deleted file mode 100644 index 2b3145e1ae..0000000000 --- a/docs/book/examples/architecture_link/faa.ext +++ /dev/null @@ -1,2 +0,0 @@ -gibberish there - diff --git a/docs/book/examples/architecture_link/foo.ext b/docs/book/examples/architecture_link/foo.ext deleted file mode 100644 index 83599ef3c1..0000000000 --- a/docs/book/examples/architecture_link/foo.ext +++ /dev/null @@ -1 +0,0 @@ -gibberish here diff --git a/docs/book/examples/architecture_link/wscript b/docs/book/examples/architecture_link/wscript deleted file mode 100644 index b3eb15f433..0000000000 --- a/docs/book/examples/architecture_link/wscript +++ /dev/null @@ -1,36 +0,0 @@ -#! /usr/bin/env python - -""" -This example demonstrates how to create custom compilation and link tasks - -NOTE: to avoid redefining the method call_apply_link, you could use this: -import waflib.TaskGen -waflib.TaskGen.feats['mylink'] = ['apply_link'] -""" - -def configure(ctx): - pass - -def build(ctx): - ctx(features='mylink', source='foo.ext faa.ext', target='bingo') - -from waflib.Task import Task -from waflib.TaskGen import feature, extension, after_method -from waflib.Tools import ccroot - -@after_method('process_source') -@feature('mylink') -def call_apply_link(self): - self.apply_link() - -class mylink(ccroot.link_task): - run_str = 'cat ${SRC} > ${TGT}' - -class ext2o(Task): - run_str = 'cp ${SRC} ${TGT}' - -@extension('.ext') -def process_ext(self, node): - self.create_compiled_task('ext2o', node) - - diff --git a/docs/book/examples/architecture_subcontext/wscript b/docs/book/examples/architecture_subcontext/wscript deleted file mode 100644 index 0c26b64a26..0000000000 --- a/docs/book/examples/architecture_subcontext/wscript +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env python - -""" -Task generators may create any kind of task - -In this example, the tasks create configuration contexts -and execute configuration tests. - -Execute -$ waf configure build -""" - -import os -from waflib.Configure import conf, ConfigurationContext -from waflib import Task, Build, Logs - -def run_test(self): - top = self.generator.bld.srcnode.abspath() - out = self.generator.bld.bldnode.abspath() - - ctx = ConfigurationContext(top_dir=top, out_dir=out) - ctx.init_dirs() - - ctx.in_msg = 1 - ctx.env = self.env.derive() - - header = self.generator.header_name - logfile = self.generator.path.get_bld().abspath() + os.sep \ - + header + '.log' - ctx.logger = Logs.make_logger(logfile, header) - - ctx.check(header_name=header) - -def options(ctx): - ctx.load('compiler_c') - -def configure(ctx): - ctx.load('compiler_c') - -def build(ctx): - ctx(rule=run_test, always=True, header_name='stdio.h') - ctx(rule=run_test, always=True, header_name='unistd.h') - diff --git a/docs/book/examples/build_lazy_tg/wscript b/docs/book/examples/build_lazy_tg/wscript deleted file mode 100644 index cdc0f73f2b..0000000000 --- a/docs/book/examples/build_lazy_tg/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -""" -Task generators do not create their tasks immediately -Here is an illustration - -$ waf configure clean build -""" - -def configure(ctx): - pass - -def build(ctx): - tg = ctx(rule='touch ${TGT}', target='foo') - print(type(tg)) - print(tg.tasks) - tg.post() - print(tg.tasks) - print(type(tg.tasks[0])) - diff --git a/docs/book/examples/build_list/wscript b/docs/book/examples/build_list/wscript deleted file mode 100644 index c036015e1a..0000000000 --- a/docs/book/examples/build_list/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -""" -To list the targets to build, use 'waf list' - -Try the following -$ waf configure clean build -$ waf list -$ waf clean build --targets=bar -""" - -top = '.' -out = 'build' - -def configure(ctx): - pass - -def build(ctx): - ctx(source='wscript', target='foo.txt', rule='cp ${SRC} ${TGT}') - ctx(target='bar.txt', rule='touch ${TGT}', name='bar') diff --git a/docs/book/examples/build_manual_tasks/wscript b/docs/book/examples/build_manual_tasks/wscript deleted file mode 100644 index b511cccc27..0000000000 --- a/docs/book/examples/build_manual_tasks/wscript +++ /dev/null @@ -1,53 +0,0 @@ -#! /usr/bin/env python - -""" -Task objects may be created manually -This is tedious and leads to spaghetti code - -Yet, it it interesting to see this to understand -why the task generator abstraction is necessary - -$ waf configure build -""" - -def configure(ctx): - pass - -from waflib.Task import Task -class cp(Task): - def run(self): - return self.exec_command('cp %s %s' % ( - self.inputs[0].abspath(), - self.outputs[0].abspath() - ) - ) - -class cat(Task): - def run(self): - return self.exec_command('cat %s %s > %s' % ( - self.inputs[0].abspath(), - self.inputs[1].abspath(), - self.outputs[0].abspath() - ) - ) - -def build(ctx): - - cp_1 = cp(env=ctx.env) - cp_1.set_inputs(ctx.path.find_resource('wscript')) - cp_1.set_outputs(ctx.path.find_or_declare('foo.txt')) - ctx.add_to_group(cp_1) - - cp_2 = cp(env=ctx.env) - cp_2.set_inputs(ctx.path.find_resource('wscript')) - cp_2.set_outputs(ctx.path.find_or_declare('bar.txt')) - ctx.add_to_group(cp_2) - - cat_1 = cat(env=ctx.env) - cat_1.set_inputs(cp_1.outputs + cp_2.outputs) - cat_1.set_outputs(ctx.path.find_or_declare('foobar.txt')) - ctx.add_to_group(cat_1) - - cat_1.set_run_after(cp_1) - cat_1.set_run_after(cp_2) - diff --git a/docs/book/examples/build_manual_tasks/wscript-noshell b/docs/book/examples/build_manual_tasks/wscript-noshell deleted file mode 100644 index 8502b6428e..0000000000 --- a/docs/book/examples/build_manual_tasks/wscript-noshell +++ /dev/null @@ -1,42 +0,0 @@ -#! /usr/bin/env python - -""" -The wscript file in this directory uses external commands in the -task execution. We may as well use node objects to write to them -directly. -""" - -def configure(ctx): - pass - -from waflib.Task import Task -class cp(Task): - def run(self): - txt = self.inputs[0].read() - self.outputs[0].write(txt) - -class cat(Task): - def run(self): - txt = self.inputs[0].read() + self.inputs[1].read() - self.outputs[0].write(txt) - -def build(ctx): - - cp_1 = cp(env=ctx.env) - cp_1.set_inputs(ctx.path.find_resource('wscript')) - cp_1.set_outputs(ctx.path.find_or_declare('foo.txt')) - ctx.add_to_group(cp_1) - - cp_2 = cp(env=ctx.env) - cp_2.set_inputs(ctx.path.find_resource('wscript')) - cp_2.set_outputs(ctx.path.find_or_declare('bar.txt')) - ctx.add_to_group(cp_2) - - cat_1 = cat(env=ctx.env) - cat_1.set_inputs(cp_1.outputs + cp_2.outputs) - cat_1.set_outputs(ctx.path.find_or_declare('foobar.txt')) - ctx.add_to_group(cat_1) - - cat_1.set_run_after(cp_1) - cat_1.set_run_after(cp_2) - diff --git a/docs/book/examples/build_pre_post/wscript b/docs/book/examples/build_pre_post/wscript deleted file mode 100644 index 73a4d16912..0000000000 --- a/docs/book/examples/build_pre_post/wscript +++ /dev/null @@ -1,35 +0,0 @@ -#! /usr/bin/env python - -""" -Demonstrate how to bind functions to be executed before or after the build - -Try executing: -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def options(ctx): - ctx.add_option('--exe', action='store_true', default=False, - help='execute the program after it is built') - -def configure(ctx): - pass - -def pre(ctx): - print('before the build is started') - -def post(ctx): - print('after the build is complete') - if ctx.cmd == 'install': - from waflib import Options, Utils - if Options.options.exe: - ctx.exec_command('/sbin/ldconfig') - -def build(ctx): - ctx(rule='touch ${TGT}', target='bar.txt', always=True) - - ctx.add_pre_fun(pre) - ctx.add_post_fun(post) - diff --git a/docs/book/examples/build_task_gen/wscript b/docs/book/examples/build_task_gen/wscript deleted file mode 100644 index 0d26ba6e20..0000000000 --- a/docs/book/examples/build_task_gen/wscript +++ /dev/null @@ -1,17 +0,0 @@ -#! /usr/bin/env python - -""" -Create task generators to create tasks when necessary, -it is equivalent to the example 'build_manual_tasks' - -$ waf configure clean build -""" - -def configure(ctx): - pass - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='foo.txt') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') - ctx(rule='cat ${SRC} > ${TGT}', source='foo.txt bar.txt', target='foobar.txt') - diff --git a/docs/book/examples/configuration_build/wscript b/docs/book/examples/configuration_build/wscript deleted file mode 100644 index 5a065ec8a7..0000000000 --- a/docs/book/examples/configuration_build/wscript +++ /dev/null @@ -1,24 +0,0 @@ -#! /usr/bin/env python - -""" -The variable conf.env.TOUCH (set by conf.find_program) is re-used during the build - -Try: -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def options(ctx): - ctx.add_option('--foo', action='store', default=False, help='Silly test') - -def configure(ctx): - ctx.env.FOO = ctx.options.foo - ctx.find_program('touch', var='TOUCH', mandatory=True) # a configuration helper - -def build(ctx): - print(ctx.env.TOUCH) - print(ctx.env.FOO) - ctx(rule='${TOUCH} ${TGT}', target='bar.txt') - diff --git a/docs/book/examples/configuration_copysets/wscript b/docs/book/examples/configuration_copysets/wscript deleted file mode 100644 index 97328aa8e7..0000000000 --- a/docs/book/examples/configuration_copysets/wscript +++ /dev/null @@ -1,24 +0,0 @@ -#! /usr/bin/env python - -""" -The conf.env object is an instance of waflib.ConfigSet.ConfigSet - -It is used as a serializable dict to hold any kind of useful data -""" - -top = '.' -out = 'build' - -def configure(ctx): - ctx.env.FOO = 'TEST' - node = ctx.path.make_node('test.txt') - - env_copy = ctx.env.derive() - env_copy.store(node.abspath()) - - from waflib.ConfigSet import ConfigSet - env2 = ConfigSet() - env2.load(node.abspath()) - - print(node.read()) - diff --git a/docs/book/examples/configuration_dang/dang.py b/docs/book/examples/configuration_dang/dang.py deleted file mode 100644 index 4c7f6ea5fd..0000000000 --- a/docs/book/examples/configuration_dang/dang.py +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -print('→ loading the dang tool') - -from waflib.Configure import conf - -def options(opt): - opt.add_option('--dang', action='store', default='', dest='dang') - -@conf -def read_dang(ctx): - ctx.start_msg('Checking for DANG') - if ctx.options.dang: - ctx.env.DANG = ctx.options.dang - ctx.end_msg(ctx.env.DANG) - else: - ctx.end_msg('DANG is not set') - -def configure(ctx): - ctx.read_dang() diff --git a/docs/book/examples/configuration_dang/wscript b/docs/book/examples/configuration_dang/wscript deleted file mode 100644 index 8ed9577c75..0000000000 --- a/docs/book/examples/configuration_dang/wscript +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env python - -""" -The context methods may execute the same methods from waf tools - -observe how the dang.py file is used and compare - -$ waf configure build -$ waf configure build --dang=test -""" - -top = '.' -out = 'build' - -def options(ctx): - ctx.load('dang', tooldir='.') - -def configure(ctx): - ctx.load('dang', tooldir='.') - -def build(ctx): - print(ctx.env.DANG) - diff --git a/docs/book/examples/configuration_deco/wscript b/docs/book/examples/configuration_deco/wscript deleted file mode 100644 index e4edf743f3..0000000000 --- a/docs/book/examples/configuration_deco/wscript +++ /dev/null @@ -1,25 +0,0 @@ -#! /usr/bin/env python - -""" -The @conf is a python decorator - decorators are used to replace functions - -This particular decorator will bind the function to -the configuration context - -Try: -$ waf configure -""" - -top = '.' -out = 'build' - -from waflib.Configure import conf - -@conf -def hi(ctx): - print('→ hello, world!') - -# hi = conf(hi) - -def configure(ctx): - ctx.hi() diff --git a/docs/book/examples/configuration_exception/wscript b/docs/book/examples/configuration_exception/wscript deleted file mode 100644 index 1df947e343..0000000000 --- a/docs/book/examples/configuration_exception/wscript +++ /dev/null @@ -1,15 +0,0 @@ -#! /usr/bin/env python - -""" -Tests should not rely on return codes and will return exceptions -in case of errors -""" - -top = '.' -out = 'build' - -def configure(ctx): - try: - ctx.find_program('some_app') - except ctx.errors.ConfigurationError: - ctx.to_log('some_app was not found (ignoring)') diff --git a/docs/book/examples/configuration_methods/wscript b/docs/book/examples/configuration_methods/wscript deleted file mode 100644 index b82debedec..0000000000 --- a/docs/book/examples/configuration_methods/wscript +++ /dev/null @@ -1,17 +0,0 @@ -#! /usr/bin/env python - -""" -This example illustrates several configuration methods: -. find_program for finding executables -. check_waf_version to throw a configuration error if the waf version is too old/too new -. find_file for finding files in particular folders -""" - -top = '.' -out = 'build' - -def configure(ctx): - ctx.find_program('touch', var='TOUCH') - ctx.check_waf_version(mini='1.6.0') - ctx.find_file('fstab', ['/opt/', '/etc']) - diff --git a/docs/book/examples/configuration_sets/wscript b/docs/book/examples/configuration_sets/wscript deleted file mode 100644 index ebed2344a7..0000000000 --- a/docs/book/examples/configuration_sets/wscript +++ /dev/null @@ -1,25 +0,0 @@ -#! /usr/bin/env python - -""" -The configuration set such as conf.env behave like dicts -Lists are usually stored in them, and may be shared by several -configuration sets. - -For this reason, the methods append_unique, append_value -and prepend_value should be used whenever possible -""" - -top = '.' -out = 'build' - -def configure(ctx): - ctx.env['CFLAGS'] = ['-g'] - ctx.env.CFLAGS = ['-g'] - ctx.env.append_value('CXXFLAGS', ['-O2', '-g']) - ctx.env.append_unique('CFLAGS', ['-g', '-O2']) - ctx.env.prepend_value('CFLAGS', ['-O3']) - - print(type(ctx.env)) - print(ctx.env) - print(ctx.env.FOO) - diff --git a/docs/book/examples/cprog_attributes/wscript b/docs/book/examples/cprog_attributes/wscript deleted file mode 100644 index 83a0fd2ff8..0000000000 --- a/docs/book/examples/cprog_attributes/wscript +++ /dev/null @@ -1,35 +0,0 @@ -#! /usr/bin/env python - -""" -This example demonstrates a few attributes that may be passed to -bld.program, bld.shlib and bld.stlib -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.program( - source = 'main.c', - target = 'appname', - features = ['more', 'features', 'here'], - - includes = ['.'], - defines = ['LINUX=1', 'BIDULE'], - - lib = ['m'], - libpath = ['/usr/lib64'], - stlib = ['dl'], - stlibpath = ['/usr/local/lib'], - linkflags = ['-g'], - - install_path = '${SOME_PATH}/bin', # None to disable - vnum = '1.2.3', - ccflags = ['-O2', '-Wall'], - cxxflags = ['-O3'], - rpath = ['/opt/kde/lib'] - ) - diff --git a/docs/book/examples/cprog_cfg_advanced/wscript b/docs/book/examples/cprog_cfg_advanced/wscript deleted file mode 100644 index 155df9cc7c..0000000000 --- a/docs/book/examples/cprog_cfg_advanced/wscript +++ /dev/null @@ -1,22 +0,0 @@ -#! /usr/bin/env python - -""" -The configuration tests such as conf.check* do not have to execute -c/c++ tests. By changing the features, it is possible to turn -almost any kind of build into a configuration test - -Try 'waf configure' and look at the config.log file -""" - -from waflib.TaskGen import feature, before_method - -@feature('special_test') -@before_method('process_source') -def my_special_test(self): - self.bld(rule='touch ${TGT}', target='foo') - self.bld(rule='cp ${SRC} ${TGT}', source='foo', target='bar') - self.source = [] - -def configure(conf): - conf.check_cc(features='special_test', msg='my test!') - diff --git a/docs/book/examples/cprog_conf/wscript b/docs/book/examples/cprog_conf/wscript deleted file mode 100644 index ad569088e8..0000000000 --- a/docs/book/examples/cprog_conf/wscript +++ /dev/null @@ -1,22 +0,0 @@ -#! /usr/bin/env python - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - conf.check(header_name='time.h', features='c cprogram') - conf.check_cc(function_name='printf', header_name="stdio.h") - conf.check_cc(fragment='int main() {2+2==4;}\n', define_name="boobah") - conf.check_cc(lib='m', cflags='-Wall', defines=['var=foo', 'x=y'], uselib_store='M') - #conf.check_cxx(lib='linux', use='M', cxxflags='-O2') - - conf.check_cc(fragment=''' - #include - int main() { printf("4"); return 0; } ''', - define_name = "booeah", - execute = True, - define_ret = True, - msg = "Checking for something") - - conf.write_config_header('config.h') diff --git a/docs/book/examples/cprog_fakelibs/main.c b/docs/book/examples/cprog_fakelibs/main.c deleted file mode 100644 index cb3f7482fa..0000000000 --- a/docs/book/examples/cprog_fakelibs/main.c +++ /dev/null @@ -1,3 +0,0 @@ -int main() { - return 0; -} diff --git a/docs/book/examples/cprog_fakelibs/wscript b/docs/book/examples/cprog_fakelibs/wscript deleted file mode 100644 index c6770df7ff..0000000000 --- a/docs/book/examples/cprog_fakelibs/wscript +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python - -""" -Use a system library as if it were part of the project -Among others: -- it may be present in the 'use' argument -- the program will be rebuilt if the library changes (hash) -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.read_shlib('m', paths=['/usr/lib64']) - bld.program(source='main.c', target='app', use='m') diff --git a/docs/book/examples/cprog_incdirs/a.c b/docs/book/examples/cprog_incdirs/a.c deleted file mode 100644 index a91cc3b130..0000000000 --- a/docs/book/examples/cprog_incdirs/a.c +++ /dev/null @@ -1,5 +0,0 @@ -#include "a.h" - -int foo() { - return ke; -} diff --git a/docs/book/examples/cprog_incdirs/main.c b/docs/book/examples/cprog_incdirs/main.c deleted file mode 100644 index cb3f7482fa..0000000000 --- a/docs/book/examples/cprog_incdirs/main.c +++ /dev/null @@ -1,3 +0,0 @@ -int main() { - return 0; -} diff --git a/docs/book/examples/cprog_incdirs/src/a.h b/docs/book/examples/cprog_incdirs/src/a.h deleted file mode 100644 index 6082b66c61..0000000000 --- a/docs/book/examples/cprog_incdirs/src/a.h +++ /dev/null @@ -1 +0,0 @@ -int ke = 55; diff --git a/docs/book/examples/cprog_incdirs/wscript b/docs/book/examples/cprog_incdirs/wscript deleted file mode 100644 index c34c8a7ddb..0000000000 --- a/docs/book/examples/cprog_incdirs/wscript +++ /dev/null @@ -1,29 +0,0 @@ -#! /usr/bin/env python - -""" -include paths added by export_includes are propagated to other targets -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld( - includes = '. src', - export_includes = 'src', - name = 'com_includes') - - bld.stlib( - source = 'a.c', - target = 'shlib1', - use = 'com_includes') - - bld.program( - source = 'main.c', - target = 'app', - use = 'shlib1', - ) - diff --git a/docs/book/examples/cprog_objects/a.c b/docs/book/examples/cprog_objects/a.c deleted file mode 100644 index d4e4ecb632..0000000000 --- a/docs/book/examples/cprog_objects/a.c +++ /dev/null @@ -1 +0,0 @@ -int k = 44; diff --git a/docs/book/examples/cprog_objects/wscript b/docs/book/examples/cprog_objects/wscript deleted file mode 100644 index 4095250f64..0000000000 --- a/docs/book/examples/cprog_objects/wscript +++ /dev/null @@ -1,28 +0,0 @@ -#! /usr/bin/env python - -""" -Compile some files with -O2 and others with -O3 -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.objects( - source = 'test.c', - ccflags = '-O3', - target = 'my_objs') - - bld.shlib( - source = 'a.c', - ccflags = '-O2', - target = 'lib1', - use = 'my_objs') - - bld.program( - source = 'main.c', - target = 'test_c_program', - use = 'lib1') diff --git a/docs/book/examples/cprog_pkgconfig/wscript b/docs/book/examples/cprog_pkgconfig/wscript deleted file mode 100644 index db6220bcc8..0000000000 --- a/docs/book/examples/cprog_pkgconfig/wscript +++ /dev/null @@ -1,24 +0,0 @@ -#! /usr/bin/env python - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - - conf.check_cfg(atleast_pkgconfig_version='0.0.0') - pango_version = conf.check_cfg(modversion='pango') - - conf.check_cfg(package='pango') - conf.check_cfg(package='pango', uselib_store='MYPANGO', - args=['--cflags', '--libs']) - - conf.check_cfg(package='pango', - args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'], - msg="Checking for 'pango 0.1.0'") - - conf.check_cfg(path='sdl-config', args='--cflags --libs', - package='', uselib_store='SDL') - conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', - package='', uselib_store='OPEN_MPI', mandatory=False) - diff --git a/docs/book/examples/cprog_propagation/a.c b/docs/book/examples/cprog_propagation/a.c deleted file mode 100644 index d4e4ecb632..0000000000 --- a/docs/book/examples/cprog_propagation/a.c +++ /dev/null @@ -1 +0,0 @@ -int k = 44; diff --git a/docs/book/examples/cprog_propagation/c.c b/docs/book/examples/cprog_propagation/c.c deleted file mode 100644 index 8a50df4448..0000000000 --- a/docs/book/examples/cprog_propagation/c.c +++ /dev/null @@ -1 +0,0 @@ -int kde = 4.5; diff --git a/docs/book/examples/cprog_propagation/main.c b/docs/book/examples/cprog_propagation/main.c deleted file mode 100644 index a23183e1e9..0000000000 --- a/docs/book/examples/cprog_propagation/main.c +++ /dev/null @@ -1,5 +0,0 @@ -int k; - -int main() { - return k; -} diff --git a/docs/book/examples/cprog_propagation/wscript b/docs/book/examples/cprog_propagation/wscript deleted file mode 100644 index ecc5203d81..0000000000 --- a/docs/book/examples/cprog_propagation/wscript +++ /dev/null @@ -1,50 +0,0 @@ -#! /usr/bin/env python - -""" -The program below will link against all other libraries (except the static one) -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.shlib( - source = 'a.c', - target = 'lib1') - - bld.stlib( - source = 'b.c', - use = 'cshlib', # add the shared library flags - target = 'lib2') - - bld.shlib( - source = 'c.c', - target = 'lib3', - use = 'lib1 lib2') - - bld.program( - source = 'main.c', - target = 'app', - use = 'lib3') - - """ - The static library from this example is completely useless, and will add the -fPIC - flags to the program which might be annoying. It will be much better - to get rid of those static libraries but if you cannot live without them, use the following: - """ - - from waflib.TaskGen import feature, after_method - @feature('c', 'cxx') - @after_method('propagate_uselib_vars', 'process_use') - def skip_cshlib_or_cxxshlib(self): - self.uselib = self.to_list(getattr(self, 'uselib', [])) - self.use = self.to_list(getattr(self, 'use', [])) - for x in ('cshlib', 'cxxshlib', 'dshlib'): - while x in self.uselib: - self.uselib.remove(x) - while x in self.use: - self.use.remove(x) - diff --git a/docs/book/examples/cprog_system/test_staticlib.c b/docs/book/examples/cprog_system/test_staticlib.c deleted file mode 100644 index e590264df4..0000000000 --- a/docs/book/examples/cprog_system/test_staticlib.c +++ /dev/null @@ -1 +0,0 @@ -int k = 334; diff --git a/docs/book/examples/cprog_system/wscript b/docs/book/examples/cprog_system/wscript deleted file mode 100644 index fae5244b3a..0000000000 --- a/docs/book/examples/cprog_system/wscript +++ /dev/null @@ -1,34 +0,0 @@ -#! /usr/bin/env python - -""" -System libraries may be linked by setting flags into the variables XYZ_NAME -where NAME is added to the use keyword, and XYZ can be DEFINES, INCLUDES, LINKFLAGS, etc -""" - -import sys - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - - conf.env.INCLUDES_TEST = ['/usr/include'] - - if sys.platform != 'win32': - conf.env.DEFINES_TEST = ['TEST'] - conf.env.CFLAGS_TEST = ['-O0'] - conf.env.LIB_TEST = ['m'] - conf.env.LIBPATH_TEST = ['/usr/lib'] - conf.env.LINKFLAGS_TEST = ['-g'] - conf.env.INCLUDES_TEST = ['/opt/gnome/include'] - -def build(bld): - mylib = bld.stlib( - source = 'test_staticlib.c', - target = 'teststaticlib', - use = 'TEST') - - if mylib.env.CC_NAME == 'gcc': - mylib.cxxflags = ['-O2'] - diff --git a/docs/book/examples/cprog_use/main.c b/docs/book/examples/cprog_use/main.c deleted file mode 100644 index a23183e1e9..0000000000 --- a/docs/book/examples/cprog_use/main.c +++ /dev/null @@ -1,5 +0,0 @@ -int k; - -int main() { - return k; -} diff --git a/docs/book/examples/cprog_use/test_staticlib.c b/docs/book/examples/cprog_use/test_staticlib.c deleted file mode 100644 index 75b2934197..0000000000 --- a/docs/book/examples/cprog_use/test_staticlib.c +++ /dev/null @@ -1 +0,0 @@ -int k = 32; diff --git a/docs/book/examples/cprog_use/wscript b/docs/book/examples/cprog_use/wscript deleted file mode 100644 index 484b551e26..0000000000 --- a/docs/book/examples/cprog_use/wscript +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env python - -""" -Link against existing libraries from the project -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.stlib( - source = 'test_staticlib.c', - target = 'mylib', - name = 'stlib1') - - bld.program( - source = 'main.c', - target = 'app', - includes = '.', - use = ['stlib1']) diff --git a/docs/book/examples/cprog_wrappers/a.c b/docs/book/examples/cprog_wrappers/a.c deleted file mode 100644 index 2b64da6be9..0000000000 --- a/docs/book/examples/cprog_wrappers/a.c +++ /dev/null @@ -1 +0,0 @@ -int a = 32; diff --git a/docs/book/examples/cprog_wrappers/b.c b/docs/book/examples/cprog_wrappers/b.c deleted file mode 100644 index bf54fb27a5..0000000000 --- a/docs/book/examples/cprog_wrappers/b.c +++ /dev/null @@ -1 +0,0 @@ -int b = 32; diff --git a/docs/book/examples/cprog_wrappers/c.c b/docs/book/examples/cprog_wrappers/c.c deleted file mode 100644 index aa920a1203..0000000000 --- a/docs/book/examples/cprog_wrappers/c.c +++ /dev/null @@ -1 +0,0 @@ -int c = 32; diff --git a/docs/book/examples/cprog_wrappers/main.c b/docs/book/examples/cprog_wrappers/main.c deleted file mode 100644 index cb3f7482fa..0000000000 --- a/docs/book/examples/cprog_wrappers/main.c +++ /dev/null @@ -1,3 +0,0 @@ -int main() { - return 0; -} diff --git a/docs/book/examples/cprog_wrappers/wscript b/docs/book/examples/cprog_wrappers/wscript deleted file mode 100644 index c28478aefb..0000000000 --- a/docs/book/examples/cprog_wrappers/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python - -""" -The 4 wrappers program, stlib, shlib and objects are aliases for bld(features='..', ..) -where the features can be -c, cshlib, cstlib, cprogram, cxx, cxxshlib, cxxstlib, cxxprogram, d, dshlib, ... -""" - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.program(source='main.c', target='app', use='myshlib mystlib') - bld.stlib(source='a.c', target='mystlib') - bld.shlib(source='b.c', target='myshlib', use='myobjects') - bld.objects(source='c.c', target='myobjects') - diff --git a/docs/book/examples/execution_build/wscript b/docs/book/examples/execution_build/wscript deleted file mode 100644 index 9de0436a47..0000000000 --- a/docs/book/examples/execution_build/wscript +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env python - -""" -A simple build - the file foo.txt is used both as target and as source -Note that the correct build order is computed automatically - -$ waf configure clean build -""" - -top = '.' -out = 'build_directory' - -def configure(ctx): - pass - -def build(ctx): - ctx(rule='touch ${TGT}', target='foo.txt') - ctx(rule='cp ${SRC} ${TGT}', source='foo.txt', target='bar.txt') - diff --git a/docs/book/examples/execution_cmd/wscript b/docs/book/examples/execution_cmd/wscript deleted file mode 100644 index e0facb7680..0000000000 --- a/docs/book/examples/execution_cmd/wscript +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env python - -""" -Several build commands are defined by default, for example: -$ waf configure clean build list install uninstall -""" - -top = '.' -out = 'build_directory' - -def configure(ctx): - print(ctx.cmd) - -def build(ctx): - if ctx.cmd == 'clean': - print('cleaning!') - else: - print(ctx.cmd) - diff --git a/docs/book/examples/execution_configure/src/wscript b/docs/book/examples/execution_configure/src/wscript deleted file mode 100644 index 981fe128ba..0000000000 --- a/docs/book/examples/execution_configure/src/wscript +++ /dev/null @@ -1,4 +0,0 @@ -#! /usr/bin/env python - -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) diff --git a/docs/book/examples/execution_configure/wscript b/docs/book/examples/execution_configure/wscript deleted file mode 100644 index d1cbb33621..0000000000 --- a/docs/book/examples/execution_configure/wscript +++ /dev/null @@ -1,15 +0,0 @@ -#! /usr/bin/env python - -""" -$ waf configure ping -""" - -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ configuring the project in ' + ctx.path.abspath()) - -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) - ctx.recurse('src') diff --git a/docs/book/examples/execution_dist/wscript b/docs/book/examples/execution_dist/wscript deleted file mode 100644 index c288934028..0000000000 --- a/docs/book/examples/execution_dist/wscript +++ /dev/null @@ -1,22 +0,0 @@ -#! /usr/bin/env python - -""" -$ waf configure dist -""" - -APPNAME = 'webe' -VERSION = '1.0' - -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ configuring the project in ' + ctx.path.abspath()) - -def dist(ctx): - ctx.base_name = 'foo_2.0' - #ctx.base_path = ctx.path.find_node('build_directory') - ctx.algo = 'zip' - ctx.excl = ' **/.waf-1* **/*~ **/*.pyc **/*.swp **/.lock-w*' - ctx.files = ctx.path.ant_glob('**/wscript') - diff --git a/docs/book/examples/execution_hello/wscript b/docs/book/examples/execution_hello/wscript deleted file mode 100644 index f2c1a8db8e..0000000000 --- a/docs/book/examples/execution_hello/wscript +++ /dev/null @@ -1,10 +0,0 @@ -#! /usr/bin/env python - -""" -Move the folder to /tmp for example, and execute -$ waf hello -""" - -def hello(ctx): - print('hello, world!') - diff --git a/docs/book/examples/execution_ping/wscript b/docs/book/examples/execution_ping/wscript deleted file mode 100644 index f2971e55b4..0000000000 --- a/docs/book/examples/execution_ping/wscript +++ /dev/null @@ -1,14 +0,0 @@ -#! /usr/bin/env python - -""" -Move the folder to /tmp for example and call -$ waf ping pong - -The context objects 'ctx' are different instances -""" - -def ping(ctx): - print(' ping! %d' % id(ctx)) - -def pong(ctx): - print(' pong! %d' % id(ctx)) diff --git a/docs/book/examples/execution_recurse/src/wscript b/docs/book/examples/execution_recurse/src/wscript deleted file mode 100644 index c2857340c2..0000000000 --- a/docs/book/examples/execution_recurse/src/wscript +++ /dev/null @@ -1,4 +0,0 @@ -#! /usr/bin/env python - -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) diff --git a/docs/book/examples/execution_recurse/wscript b/docs/book/examples/execution_recurse/wscript deleted file mode 100644 index d53e019a9e..0000000000 --- a/docs/book/examples/execution_recurse/wscript +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env python - -""" -The following command: -$ waf ping -Will not give the expected result because this folder belongs to a bigger project. -Call 'configure' first: -$ waf configure -Then after: -$ waf ping -""" - -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) - ctx.recurse('src') - diff --git a/docs/book/examples/nodes_ant_glob/wscript b/docs/book/examples/nodes_ant_glob/wscript deleted file mode 100644 index 628a639e1d..0000000000 --- a/docs/book/examples/nodes_ant_glob/wscript +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env python - -""" -A few examples of ant_glob. Try -$ waf configure dosomething -""" - -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.ant_glob('wsc*')) - print(ctx.path.ant_glob('w?cr?p?')) - print(ctx.root.ant_glob('usr/include/**/zlib*', dir=False, src=True)) - print(ctx.path.ant_glob(['**/*py'], excl=['**/default*'])) - diff --git a/docs/book/examples/nodes_cache/wscript b/docs/book/examples/nodes_cache/wscript deleted file mode 100644 index 6472dc7d38..0000000000 --- a/docs/book/examples/nodes_cache/wscript +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python - -""" -The node objects behave like singletons (one node <-> one path) - -Try: -$ waf configure dosomething -""" - -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.root.children) - diff --git a/docs/book/examples/nodes_search/wscript b/docs/book/examples/nodes_search/wscript deleted file mode 100644 index eacf402689..0000000000 --- a/docs/book/examples/nodes_search/wscript +++ /dev/null @@ -1,33 +0,0 @@ -#! /usr/bin/env python - -""" -find_node -> create or return a node corresponding to an existing path -make_node -> create a node object, even if there is no folder or file -search -> search for a node in the existing node hierarchy (do not ask the file system) - -Try: -$ waf configure dosomething -""" - -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.find_node('wscript')) - - nd1 = ctx.path.make_node('foo.txt') - print(nd1) - - nd2 = ctx.path.search('foo.txt') - print(nd2) - - nd3 = ctx.path.search('bar.txt') - print(nd3) - - nd2.write('some text') - print(nd2.read()) - - print(ctx.path.listdir()) diff --git a/docs/book/examples/nodes_tree/wscript b/docs/book/examples/nodes_tree/wscript deleted file mode 100644 index e8f4f252fb..0000000000 --- a/docs/book/examples/nodes_tree/wscript +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env python - -""" -This example illustrates how to navigate in the node tree - -Try: -$ waf configure dosomething -""" - -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.abspath()) - print(ctx.root.abspath()) - print("ctx.path contents %r" % (ctx.path.children)) - print("ctx.path parent %r" % ctx.path.parent.abspath()) - print("ctx.root parent %r" % ctx.root.parent) diff --git a/docs/book/examples/rules_function/wscript b/docs/book/examples/rules_function/wscript deleted file mode 100644 index 84f623193e..0000000000 --- a/docs/book/examples/rules_function/wscript +++ /dev/null @@ -1,32 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -Task generators with a rule= attribute will create a single task -that will execute the corresponding function. The rule below is -equivalent to rule='cp ${SRC} ${TGT}' - -Try: -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - def run(task): - src = task.inputs[0].abspath() - tgt = task.outputs[0].abspath() - cmd = 'cp %s %s' % (src, tgt) - print(cmd) - return task.generator.bld.exec_command(cmd) - - bld( - rule = run, - source = 'wscript', - target = 'same.txt', - ) - diff --git a/docs/book/examples/rules_simple/wscript b/docs/book/examples/rules_simple/wscript deleted file mode 100644 index 66d2e7794a..0000000000 --- a/docs/book/examples/rules_simple/wscript +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env python - -""" -A task generator that will copy a file from the source directory -to another file in the build directory - -Try: -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld( - rule = 'cp ${SRC} ${TGT}', - source = 'wscript', - target = 'foobar.txt', - ) - diff --git a/docs/book/examples/scenarios_compiler/a.src b/docs/book/examples/scenarios_compiler/a.src deleted file mode 100644 index 75b2934197..0000000000 --- a/docs/book/examples/scenarios_compiler/a.src +++ /dev/null @@ -1 +0,0 @@ -int k = 32; diff --git a/docs/book/examples/scenarios_compiler/comp.cpp b/docs/book/examples/scenarios_compiler/comp.cpp deleted file mode 100644 index 13520919ca..0000000000 --- a/docs/book/examples/scenarios_compiler/comp.cpp +++ /dev/null @@ -1,40 +0,0 @@ -#include -#include - -using namespace std; - -int main(int argc, char**argv) -{ - if (argc != 3) { - cout<<"usage ./comp in out"< -#include - -void ping() { - printf("Project compiled: %s %s\n", __DATE__, __TIME__); -} diff --git a/docs/book/examples/scenarios_end/main.c b/docs/book/examples/scenarios_end/main.c deleted file mode 100644 index f4be97cfb3..0000000000 --- a/docs/book/examples/scenarios_end/main.c +++ /dev/null @@ -1,8 +0,0 @@ -#include "a.h" - -int main() { - ping(); - return 0; -} - - diff --git a/docs/book/examples/scenarios_end/test_staticlib.c b/docs/book/examples/scenarios_end/test_staticlib.c deleted file mode 100644 index a2449ef559..0000000000 --- a/docs/book/examples/scenarios_end/test_staticlib.c +++ /dev/null @@ -1,3 +0,0 @@ -void foo() {} - - diff --git a/docs/book/examples/scenarios_end/wscript b/docs/book/examples/scenarios_end/wscript deleted file mode 100644 index 09fa1fec0e..0000000000 --- a/docs/book/examples/scenarios_end/wscript +++ /dev/null @@ -1,62 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) - -""" -Compile 'about.c' after all other c tasks have been compiled - -$ waf configure clean build -""" - -VERSION='1.0.1' -APPNAME='cc_test' - -top = '.' -out = 'build' - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.program( - source = 'main.c about.c', - target = 'app', - includes = '.', - use = 'my_static_lib') - - bld.stlib( - source = 'test_staticlib.c', - target = 'my_static_lib') - -import os -from waflib import Task -def runnable_status(self): - if self.inputs[0].name == 'about.c': - h = 0 - for g in self.generator.bld.groups: - for tg in g: - if isinstance(tg, Task.TaskBase): - continue - h = hash((self.generator.bld.hash_env_vars(self.generator.env, ['LINKFLAGS']), h)) - for tsk in getattr(tg, 'compiled_tasks', []): # all .c or .cpp compilations - if id(tsk) == id(self): - # but not 'about.c' (skip other tasks too if necessary) - continue - if not tsk.hasrun: - return Task.ASK_LATER - h = hash((tsk.signature(), h)) - self.env.CCDEPS = h - - try: - os.stat(self.generator.link_task.outputs[0].abspath()) - except: - return Task.RUN_ME - - return Task.Task.runnable_status(self) - -from waflib.Tools.c import c -c.runnable_status = runnable_status - diff --git a/docs/book/examples/scenarios_expansion/wscript b/docs/book/examples/scenarios_expansion/wscript deleted file mode 100644 index cec56c81fb..0000000000 --- a/docs/book/examples/scenarios_expansion/wscript +++ /dev/null @@ -1,44 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) - -""" -Add a task generator method that will expand ${} expressons from various -attributes such as includes, target, source, etc -""" - -VERSION='0.0.1' -APPNAME='cc_test' - -top = '.' -out = 'build' - -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.env.FOO = '/usr/includes' - bld.env.MAIN = 'main.c' - bld( - features = 'c cprogram', - source = '${MAIN}', - target = 'app', - includes = '. ${FOO}') - -from waflib import Utils, TaskGen -@TaskGen.feature('*') -@TaskGen.before('process_source', 'process_rule', 'apply_incpaths', 'apply_link') -def transform_strings(self): - for x in 'includes target source use libpath linkflags'.split(): - val = getattr(self, x, None) - if val: - if isinstance(val, str): - setattr(self, x, Utils.subst_vars(val, self.env)) - elif isinstance(val, list): - for i in range(len(val)): - if isinstance(val[i], str): - val[i] = Utils.subst_vars(val[i], self.env) - diff --git a/docs/book/examples/scenarios_idl/foo.idl b/docs/book/examples/scenarios_idl/foo.idl deleted file mode 100644 index c2b636b906..0000000000 --- a/docs/book/examples/scenarios_idl/foo.idl +++ /dev/null @@ -1,3 +0,0 @@ -// not a real idl file -int k = 31492; - diff --git a/docs/book/examples/scenarios_idl/main.cpp b/docs/book/examples/scenarios_idl/main.cpp deleted file mode 100644 index b1f1eb6efd..0000000000 --- a/docs/book/examples/scenarios_idl/main.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include "foo.hpp" - -int main() { - return 0; -} - diff --git a/docs/book/examples/scenarios_idl/wscript b/docs/book/examples/scenarios_idl/wscript deleted file mode 100644 index c8abcf999b..0000000000 --- a/docs/book/examples/scenarios_idl/wscript +++ /dev/null @@ -1,35 +0,0 @@ -#! /usr/bin/env python - -""" -An idl task -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def configure(conf): - conf.load('g++') - -def build(bld): - bld.program( - source = 'foo.idl main.cpp', - target = 'myapp', - includes = '.' - ) - -from waflib.Task import Task -from waflib.TaskGen import extension - -class idl(Task): - run_str = 'cp ${SRC} ${TGT[0].abspath()} && touch ${TGT[1].abspath()}' - color = 'BLUE' - ext_out = ['.h'] - -@extension('.idl') -def process_idl(self, node): - cpp_node = node.change_ext('.cpp') - hpp_node = node.change_ext('.hpp') - self.create_task('idl', node, [cpp_node, hpp_node]) - self.source.append(cpp_node) - diff --git a/docs/book/examples/scenarios_idl2/foo.idl b/docs/book/examples/scenarios_idl2/foo.idl deleted file mode 100644 index c2b636b906..0000000000 --- a/docs/book/examples/scenarios_idl2/foo.idl +++ /dev/null @@ -1,3 +0,0 @@ -// not a real idl file -int k = 31492; - diff --git a/docs/book/examples/scenarios_idl2/main.cpp b/docs/book/examples/scenarios_idl2/main.cpp deleted file mode 100644 index b1f1eb6efd..0000000000 --- a/docs/book/examples/scenarios_idl2/main.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include "foo.hpp" - -int main() { - return 0; -} - diff --git a/docs/book/examples/scenarios_idl2/wscript b/docs/book/examples/scenarios_idl2/wscript deleted file mode 100644 index bfc608eca5..0000000000 --- a/docs/book/examples/scenarios_idl2/wscript +++ /dev/null @@ -1,48 +0,0 @@ -#! /usr/bin/env python - -""" -share the idl outputs by other task generators -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def configure(ctx): - ctx.load('g++') - -def build(ctx): - ctx( - source = 'foo.idl', - name = 'idl_gen') - - ctx.program( - source = ['main.cpp'], - target = 'testprog', - includes = '.', - add_idl = 'idl_gen') - -from waflib.Task import Task -from waflib.TaskGen import feature, before_method, extension - -class idl(Task): - run_str = 'cp ${SRC} ${TGT[0].abspath()} && touch ${TGT[1].abspath()}' - color = 'BLUE' - ext_out = ['.h'] - -@extension('.idl') -def process_idl(self, node): - cpp_node = node.change_ext('.cpp') - hpp_node = node.change_ext('.hpp') - self.create_task('idl', node, [cpp_node, hpp_node]) - self.more_source = [cpp_node] - -@feature('*') -@before_method('process_source') -def process_add_source(self): - for x in self.to_list(getattr(self, 'add_idl', [])): - y = self.bld.get_tgen_by_name(x) - y.post() - if getattr(y, 'more_source', None): - self.source.extend(y.more_source) - diff --git a/docs/book/examples/scenarios_impfile/wscript b/docs/book/examples/scenarios_impfile/wscript deleted file mode 100644 index da4348b956..0000000000 --- a/docs/book/examples/scenarios_impfile/wscript +++ /dev/null @@ -1,22 +0,0 @@ -#! /usr/bin/env python - -""" -Create a file in the build directory before the build starts -""" - -cfg_file = 'somedir/foo.txt' - -def configure(conf): - - orig = conf.root.find_node('/etc/fstab') - txt = orig.read() - - dest = conf.bldnode.make_node(cfg_file) - dest.parent.mkdir() - dest.write(txt) - - conf.env.append_value('cfg_files', dest.abspath()) - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source=cfg_file, target='bar.txt') - diff --git a/docs/book/examples/scenarios_incflags/main.cpp b/docs/book/examples/scenarios_incflags/main.cpp deleted file mode 100644 index a46866d92e..0000000000 --- a/docs/book/examples/scenarios_incflags/main.cpp +++ /dev/null @@ -1,4 +0,0 @@ -int main() -{ - return 0; -} diff --git a/docs/book/examples/scenarios_incflags/wscript b/docs/book/examples/scenarios_incflags/wscript deleted file mode 100644 index 1fa37e1935..0000000000 --- a/docs/book/examples/scenarios_incflags/wscript +++ /dev/null @@ -1,29 +0,0 @@ -#! /usr/bin/env python - -""" -special include flags -$ waf configure clean build -""" - -top = '.' -out = 'build' - -def configure(conf): - conf.load('g++') - -def build(bld): - bld.program(features='cxx cxxprogram', source='main.cpp', target='test') - -from waflib.TaskGen import after, feature - -@feature('cxx') -@after('apply_incpaths') -def insert_blddir(self): - self.env.prepend_value('INCPATHS', '.') - -@feature('cxx') -@after('apply_incpaths', 'insert_blddir') -def insert_srcdir(self): - path = self.bld.srcnode.abspath() - self.env.prepend_value('INCPATHS', path) - diff --git a/docs/book/examples/scenarios_unknown/evil_comp.py b/docs/book/examples/scenarios_unknown/evil_comp.py deleted file mode 100755 index 84769daf5e..0000000000 --- a/docs/book/examples/scenarios_unknown/evil_comp.py +++ /dev/null @@ -1,42 +0,0 @@ -#! /usr/bin/env python - -""" -example of an ill-behaving compiler -* the output files cannot be known in advance -* the output file names are written to stdout -""" - -import sys, os - -def write_file(filename, contents): - a_file = None - try: - a_file = open(filename, 'w') - a_file.write(contents) - finally: - if a_file: - a_file.close() - -name = sys.argv[1] -file = open(name, 'r') -txt = file.read() -file.close() - -lst = txt.splitlines() -for line in lst: - source_filename = line.strip() - if not source_filename: continue - (dirs, name) = os.path.split(source_filename) - try: - os.makedirs(dirs) - except: - pass - - header_filename = os.path.splitext(source_filename)[0] + '.h' - varname = name.replace('.', '_') - write_file(header_filename, 'int %s=4;\n' % varname) - write_file(source_filename, '#include "%s"\nint get_%s() {return %s;}\n' % (os.path.split(header_filename)[1], varname, varname)) - - print (source_filename) - print (header_filename) - diff --git a/docs/book/examples/scenarios_unknown/foo.src b/docs/book/examples/scenarios_unknown/foo.src deleted file mode 100644 index ef4f68cda4..0000000000 --- a/docs/book/examples/scenarios_unknown/foo.src +++ /dev/null @@ -1,2 +0,0 @@ -shpip/a12.c -shpop/a13.c diff --git a/docs/book/examples/scenarios_unknown/mytool.py b/docs/book/examples/scenarios_unknown/mytool.py deleted file mode 100644 index 252261cbb1..0000000000 --- a/docs/book/examples/scenarios_unknown/mytool.py +++ /dev/null @@ -1,65 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008-2010 (ita) - -import os -from waflib import Task, Utils, Context -from waflib.Utils import subprocess -from waflib.TaskGen import extension - -@extension('.src') -def process_shpip(self, node): - self.create_task('src2c', node) - -class src2c(Task.Task): - color = 'PINK' - quiet = 1 - ext_out = ['.h'] - - def run(self): - cmd = '%s %s' % (self.env.COMP, self.inputs[0].abspath()) - cwd = self.inputs[0].parent.get_bld().abspath() - out = self.generator.bld.cmd_and_log(cmd, cwd=cwd, quiet=Context.STDOUT) - - out = Utils.to_list(out) - self.outputs = [self.generator.path.find_or_declare(x) for x in out] - self.generator.bld.raw_deps[self.uid()] = [self.signature()] + self.outputs - self.add_c_tasks(self.outputs) - - def add_c_tasks(self, lst): - self.more_tasks = [] - for node in lst: - if node.name.endswith('.h'): - continue - tsk = self.generator.create_compiled_task('c', node) - self.more_tasks.append(tsk) - - tsk.env.append_value('INCPATHS', [node.parent.abspath()]) - - if getattr(self.generator, 'link_task', None): - self.generator.link_task.set_run_after(tsk) - self.generator.link_task.inputs.append(tsk.outputs[0]) - self.generator.link_task.inputs.sort(key=lambda x: x.abspath()) - - def runnable_status(self): - - ret = super(src2c, self).runnable_status() - if ret == Task.SKIP_ME: - - lst = self.generator.bld.raw_deps[self.uid()] - if lst[0] != self.signature(): - return Task.RUN_ME - - nodes = lst[1:] - for x in nodes: - try: - os.stat(x.abspath()) - except: - return Task.RUN_ME - - nodes = lst[1:] - self.set_outputs(nodes) - self.add_c_tasks(nodes) - - return ret - diff --git a/docs/book/examples/scenarios_unknown/wscript b/docs/book/examples/scenarios_unknown/wscript deleted file mode 100644 index 6bf400fffb..0000000000 --- a/docs/book/examples/scenarios_unknown/wscript +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008-2010 (ita) - -""" -a compiler that creates files unknown in advance -see mytool.py -""" - -top = '.' -out = 'build' - -def configure(conf): - conf.load('gcc') - conf.load('mytool', tooldir='.') - -def build(bld): - bld.env.COMP = bld.path.find_resource('evil_comp.py').abspath() - bld.stlib(source='x.c foo.src', target='astaticlib') - diff --git a/docs/book/examples/scenarios_unknown/x.c b/docs/book/examples/scenarios_unknown/x.c deleted file mode 100644 index e5588a62fa..0000000000 --- a/docs/book/examples/scenarios_unknown/x.c +++ /dev/null @@ -1 +0,0 @@ -int k=3; diff --git a/docs/book/examples/tasks_copy/wscript b/docs/book/examples/tasks_copy/wscript deleted file mode 100644 index 0cd3350c7b..0000000000 --- a/docs/book/examples/tasks_copy/wscript +++ /dev/null @@ -1,22 +0,0 @@ -#! /usr/bin/env python - -""" -Simple copy task - -The attribute 'run_str' is compiled into the task method 'run' by a metaclass -""" - -def configure(ctx): - pass - -def build(ctx): - from waflib import Task - class copy(Task.Task): - run_str = 'cp ${SRC} ${TGT}' - copy = Task.always_run(copy) - - tsk = copy(env=ctx.env) - tsk.set_inputs(ctx.path.find_resource('wscript')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) - diff --git a/docs/book/examples/tasks_groups/wscript b/docs/book/examples/tasks_groups/wscript deleted file mode 100644 index 1c58d20fb4..0000000000 --- a/docs/book/examples/tasks_groups/wscript +++ /dev/null @@ -1,38 +0,0 @@ -#! /usr/bin/env python - -""" -waf configure clean build -j4 --dwidth=800 --dtitle='Parallel build representation for "waf -j4"' - -for this to work, make sure to either use waf git or to create waf with './waf-light --make-waf --tools=compat15,parallel_debug' -""" - -def options(ctx): - ctx.load('parallel_debug') - -def configure(ctx): - ctx.load('parallel_debug') - -def build(ctx): - for i in range(8): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_a_%d' % i, - color='YELLOW', name='tasks a') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_a_%d' % i, target='wscript_b_%d' % i, - color='GREEN', name='tasks b') - ctx.add_group() - for i in range(8): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_c_%d' % i, - color='BLUE', name='tasks c') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_c_%d' % i, target='wscript_d_%d' % i, - color='PINK', name='tasks d') - -# just to make the diagrams more interesting, shuffle the tasks - -from waflib import Task - -old = Task.set_file_constraints -def meth(lst): - import random - random.shuffle(lst) - old(lst) -Task.set_file_constraints = meth - diff --git a/docs/book/examples/tasks_groups2/wscript b/docs/book/examples/tasks_groups2/wscript deleted file mode 100644 index dbe0403ce5..0000000000 --- a/docs/book/examples/tasks_groups2/wscript +++ /dev/null @@ -1,32 +0,0 @@ -#! /usr/bin/env python - -""" -waf configure clean build -j4 --dwidth=800 --dtitle='Parallel build representation for "waf -j4"' - -for this to work, make sure to either use waf git or to create waf with './waf-light --make-waf --tools=compat15,parallel_debug' -""" - -def options(ctx): - ctx.load('parallel_debug') - -def configure(ctx): - ctx.load('parallel_debug') - -def build(ctx): - - ctx.add_group('group1') - ctx.add_group('group2') - - for i in range(8): - ctx.set_group('group1') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_a_%d' % i, - color='YELLOW', name='tasks a') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_a_%d' % i, target='wscript_b_%d' % i, - color='GREEN', name='tasks b') - - ctx.set_group('group2') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_c_%d' % i, - color='BLUE', name='tasks c') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_c_%d' % i, target='wscript_d_%d' % i, - color='PINK', name='tasks d') - diff --git a/docs/book/examples/tasks_manual_deps/testfile b/docs/book/examples/tasks_manual_deps/testfile deleted file mode 100644 index 163b4d3f45..0000000000 --- a/docs/book/examples/tasks_manual_deps/testfile +++ /dev/null @@ -1 +0,0 @@ - diff --git a/docs/book/examples/tasks_manual_deps/wscript b/docs/book/examples/tasks_manual_deps/wscript deleted file mode 100644 index 205734998c..0000000000 --- a/docs/book/examples/tasks_manual_deps/wscript +++ /dev/null @@ -1,15 +0,0 @@ -#! /usr/bin/env python - -""" -ctx.add_manual_dependency - -change the file 'testfile' and see how it affects the rebuild of 'somecopy' -""" - -def configure(ctx): - pass - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='somecopy') - ctx.add_manual_dependency(ctx.path.find_node('wscript'), ctx.path.find_node('testfile')) - diff --git a/docs/book/examples/tasks_scan/a.in b/docs/book/examples/tasks_scan/a.in deleted file mode 100644 index 8d1c8b69c3..0000000000 --- a/docs/book/examples/tasks_scan/a.in +++ /dev/null @@ -1 +0,0 @@ - diff --git a/docs/book/examples/tasks_scan/wscript b/docs/book/examples/tasks_scan/wscript deleted file mode 100644 index a36db1742e..0000000000 --- a/docs/book/examples/tasks_scan/wscript +++ /dev/null @@ -1,37 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -A simple scanner method -""" - -import time -from waflib.Task import Task -class copy(Task): - - def scan(self): - print('→ calling the scanner method') - node = self.inputs[0].parent.find_resource('wscript') - return ([node], time.time()) - - def run(self): - return self.exec_command('cp %s %s' % (self.inputs[0].abspath(), self.outputs[0].abspath()) - ) - - def runnable_status(self): - ret = super(copy, self).runnable_status() - bld = self.generator.bld - print('nodes: %r' % bld.node_deps[self.uid()]) - print('custom data: %r' % bld.raw_deps[self.uid()]) - return ret - -def configure(ctx): - pass - -def build(ctx): - tsk = copy(env=ctx.env) - tsk.set_inputs(ctx.path.find_resource('a.in')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) - - diff --git a/docs/book/examples/tasks_update_outputs/wscript b/docs/book/examples/tasks_update_outputs/wscript deleted file mode 100644 index 4745086817..0000000000 --- a/docs/book/examples/tasks_update_outputs/wscript +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env python - -def configure(ctx): - pass - -def build(ctx): - ctx( - rule = 'touch ${TGT}', - source = 'wscript', - target = ctx.path.make_node('wscript2'), - ) - ctx( - rule = 'cp ${SRC} ${TGT}', - source = ctx.path.make_node('wscript2'), - target = ctx.path.make_node('wscript3') - ) diff --git a/docs/book/examples/tasks_values/wscript b/docs/book/examples/tasks_values/wscript deleted file mode 100644 index 85e89d1527..0000000000 --- a/docs/book/examples/tasks_values/wscript +++ /dev/null @@ -1,25 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -The variables from cls.vars are then used to compute the task signature -and may trigger rebuilds -""" - -from waflib.Task import Task -class foo(Task): - vars = ['FLAGS'] - def run(self): - print('the flags are %r' % self.env.FLAGS) - -def options(ctx): - ctx.add_option('--flags', default='-f', dest='flags', type='string') - -def configure(ctx): - pass - -def build(ctx): - ctx.env.FLAGS = ctx.options.flags - tsk = foo(env=ctx.env) - ctx.add_to_group(tsk) - diff --git a/docs/book/examples/tasks_values2/wscript b/docs/book/examples/tasks_values2/wscript deleted file mode 100644 index 560796fa79..0000000000 --- a/docs/book/examples/tasks_values2/wscript +++ /dev/null @@ -1,23 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -The attribute run_str is assumed to be a string and will be compiled into -a method 'run' by a metaclass (that is, when the class is defined) -""" - -def configure(ctx): - ctx.env.COPY = '/bin/cp' - ctx.env.COPYFLAGS = ['-f'] - -def build(ctx): - from waflib.Task import Task - class copy(Task): - run_str = '${COPY} ${COPYFLAGS} ${SRC} ${TGT}' - print(copy.vars) - - tsk = copy(env=ctx.env) - tsk.set_inputs(ctx.path.find_resource('wscript')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) - diff --git a/docs/book/examples/tasks_weak/wscript b/docs/book/examples/tasks_weak/wscript deleted file mode 100644 index 78159d8c70..0000000000 --- a/docs/book/examples/tasks_weak/wscript +++ /dev/null @@ -1,30 +0,0 @@ -#! /usr/bin/env python - -""" -Illustrate the effect of weak order constraints for the build (generates diagrams) - -waf configure clean build -j2 --dwidth=800 --dtitle='No particular order for "waf -j2"' --dmaxtime=7.08 -waf configure clean build -j2 --dwidth=800 --dtitle='Weak order for "waf -j2"' --dmaxtime=7.08 -""" - -def options(ctx): - ctx.load('parallel_debug') - -def configure(ctx): - ctx.load('parallel_debug') - -def build(ctx): - for x in range(5): - ctx(rule='sleep 1', color='GREEN', name='short task') - ctx(rule='sleep 5', color='RED', name='long task') - -import random -from waflib import Task - -old = Task.set_file_constraints -def meth(lst): - lst.sort(cmp=lambda x, y: cmp(x.__class__.__name__, y.__class__.__name__)) - #random.shuffle(lst) - old(lst) -Task.set_file_constraints = meth - diff --git a/docs/book/examples/tasks_weak2/wscript b/docs/book/examples/tasks_weak2/wscript deleted file mode 100644 index aa744b5cd8..0000000000 --- a/docs/book/examples/tasks_weak2/wscript +++ /dev/null @@ -1,47 +0,0 @@ -#! /usr/bin/env python - -""" -Illustrate the effect of weak order constraints for the build (generates diagrams) - -waf configure clean build -j2 --dwidth=800 --dtitle='Default constraints on "waf -j2"' --dmaxtime=15 -waf configure clean build -j2 --dwidth=800 --dtitle='Additional constraints on "waf -j2"' --dmaxtime=15 -""" - -def options(ctx): - ctx.load('parallel_debug') - -def configure(ctx): - ctx.load('parallel_debug') - -def build(ctx): - ctx(rule='sleep 1', color='BLUE', name='blue') - for x in range(80): - ctx(rule='sleep 0.1', color='GREEN', name='green', always=True) - ctx(rule='sleep 6', color='RED', name='red', after='blue') - - -from waflib import Task, Runner - -old = Task.set_file_constraints -def bluefirst(lst): - lst.sort(cmp=lambda x, y: cmp(x.__class__.__name__, y.__class__.__name__)) - old(lst) -Task.set_file_constraints = bluefirst - -def get_out(self): - tsk = self.prev_get_out() - if tsk.__class__.__name__ == 'blue': - def remove_red(lst): - reds = [] - lst.reverse() - for tsk in lst: - if tsk.__class__.__name__ == 'red': - lst.remove(tsk) - reds.append(tsk) - lst.reverse() - return reds - self.outstanding = remove_red(self.outstanding) + remove_red(self.frozen) + self.outstanding - return tsk -Runner.Parallel.prev_get_out = Runner.Parallel.get_out -Runner.Parallel.get_out = get_out - diff --git a/docs/book/execution.txt b/docs/book/execution.txt deleted file mode 100644 index c3e15fec85..0000000000 --- a/docs/book/execution.txt +++ /dev/null @@ -1,583 +0,0 @@ -== Projects and commands - -The _waf_ script is meant to build software projects, and is of little use when taken alone. This chapter describes what is necessary to set up a waf project and how to use the _waf_ script. - -=== Waf commands - -Waf projects use description files of the name _wscript_ which are python scripts containing functions and variables that may be used by Waf. Particular functions named _waf commands_ may be used by Waf on the command-line. - -==== Declaring Waf commands - -Waf commands are really simple functions and may execute arbitrary python code such as calling other functions. -They take a single parameter as input and do not have to return any particular value as in the following example: - -// execution_hello -[source,python] ---------------- -#! /usr/bin/env python -# encoding: utf-8 - -def <1> hello(ctx <2>): - print('hello world') ---------------- - -<1> The _waf command_ *hello* -<2> A waf context, used to share data between scripts - -And here is how to have +waf+ call the function hello from the command-line: - -[source,shishell] ---------------- -$ waf hello -hello world -'hello' finished successfully (0.001s) ---------------- - -==== Chaining Waf commands - -Several commands may be declared in the same _wscript_ file: - -// execution_ping -[source,python] ---------------- -def ping(ctx): - print(' ping! %d' % id(ctx)) - -def pong(ctx): - print(' pong! %d' % id(ctx)) ---------------- - -And may be chained for execution by Waf: - -[source,shishell] ---------------- -$ waf ping pong ping ping - ping! 140704847272272 -'ping' finished successfully (0.001s) - pong! 140704847271376 -'pong' finished successfully (0.001s) - ping! 140704847272336 -'ping' finished successfully (0.001s) - ping! 140704847272528 -'ping' finished successfully (0.001s) ---------------- - -NOTE: The context parameter is a new object for each command executed. The classes are also different: ConfigureContext for configure, BuildContext for build, OptionContext for option, and Context for any other command. - -==== Using several scripts and folders - -Although a Waf project must contain a top-level _wscript_ file, the contents may be split into several sub-project files. We will now illustrate this concept on a small project: - -[source,shishell] ---------------- -$ tree -|-- src -| `-- wscript -`-- wscript ---------------- - -The commands in the top-level _wscript_ will call the same commands from a subproject _wscript_ file by calling a context method named _recurse_: - -// execution_recurse -[source,python] ---------------- -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) - ctx.recurse('src') ---------------- - -And here is the contents of 'src/wscript' - -[source,python] ---------------- -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) ---------------- - -Upon execution, the results will be: - -[source,shishell] ---------------- -$ cd /tmp/execution_recurse - -$ waf ping -→ ping from /tmp/execution_recurse -→ ping from /tmp/execution_recurse/src -'ping' finished successfully (0.002s) - -$ cd src - -$ waf ping -→ ping from /tmp/execution_recurse/src -'ping' finished successfully (0.001s) ---------------- - -NOTE: The method _recurse_, and the attribute _path_ are available on all waf context classes - -=== Waf project definition - -==== Configuring a project (the _configure_ command) - -Although Waf may be called from any folder containing a 'wscript' file, it is usually a good idea to have a single entry point in the scripts. -Besides ensuring a consistent behaviour, it also saves the redefinition of the same imports and function redefinitions in all wscript files. -The following concepts help to structure a Waf project: - -. Project directory: directory containing the source files that will be packaged and redistributed to other developers or to end users -. Build directory: directory containing the files generated by the project (configuration sets, build files, logs, etc) -. System files: files and folders which do not belong to the project (operating system files, etc) - -The predefined command named _configure_ is used to gather and store the information about these folders. -We will now extend the example from the previous section with the following top-level wscript file: - -// execution_configure -[source,python] ---------------- -top = '.' <1> -out = 'build_directory' <2> - -def configure(ctx): <3> - print('→ configuring the project in ' + ctx.path.abspath()) - -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) - ctx.recurse('src') ---------------- - -<1> string representing the project directory. In general, top is set to '.', except for some proprietary projects where the wscript cannot be added to the top-level, top may be set to '../..' or even some other folder such as '/checkout/perforce/project' -<2> string representing the build directory. In general, it is set to 'build', except for some proprietary projects where the build directory may be set to an absolute path such as '/tmp/build'. It is important to be able to remove the build directory safely, so it should never be given as '.' or '..'. -<3> the _configure_ function is called by the 'configure' command - -The script in 'src/wscript' is left unchanged: - -[source,python] ---------------- -def ping(ctx): - print('→ ping from ' + ctx.path.abspath()) ---------------- - -The execution output will be the following: - -//// -$ waf ping -→ ping from /tmp/execution_configure -→ ping from /tmp/execution_configure/src -'ping' finished successfully (0.001s) - -$ cd src - -$ waf ping -→ ping from /tmp/execution_configure/src -'ping' finished successfully (0.001s) - -$ cd .. - -//// - -[source,shishell] ---------------- -$ cd /tmp/execution_configure <1> -$ tree -|-- src -| `-- wscript -`-- wscript - -$ waf configure <2> -→ configuring the project in /tmp/execution_configure -'configure' finished successfully (0.021s) - -$ tree -a -|-- build_directory/ <3> -| |-- c4che/ <4> -| | |-- build.config.py <5> -| | `-- _cache.py <6> -| `-- config.log <7> -|--.lock-wafbuild <8> -|-- src -| `-- wscript -`-- wscript - -$ waf ping -→ ping from /tmp/execution_configure -→ ping from /tmp/execution_configure/src -'ping' finished successfully (0.001s) - -$ cd src -$ waf ping <9> -→ ping from /tmp/execution_configure -→ ping from /tmp/execution_configure/src -'ping' finished successfully (0.001s) ---------------- - -<1> To configure the project, change to the directory containing the top-level project file -<2> The execution is called by calling _waf configure_ -<3> The build directory was created -<4> The configuration data is stored in the folder 'c4che/' -<5> The command-line options and environment variables in use are stored in 'build.config.py' -<6> The user configuration set is stored in '_cache.py' -<7> Configuration log (duplicate of the output generated during the configuration) -<8> Hidden file pointing at the relevant project file and build directory -<9> Calling _waf_ from a subfolder will execute the commands from the same wscript file used for the configuration - -NOTE: _waf configure_ is always called from the directory containing the wscript file - -==== Removing generated files (the _distclean_ command) - -A command named _distclean_ is provided to remove the build directory and the lock file created during the configuration. On the example from the previous section: - -[source,shishell] ---------------- -$ waf configure -→ configuring the project in /tmp/execution_configure -'configure' finished successfully (0.001s) - -$ tree -a -|-- build_directory/ -| |-- c4che/ -| | |-- build.config.py -| | `-- _cache.py -| `-- config.log -|--.lock-wafbuild -`-- wscript - -$ waf distclean <1> -'distclean' finished successfully (0.001s) - -$ tree <2> -|-- src -| `-- wscript -`-- wscript ---------------- - -<1> The _distclean_ command definition is implicit (no declaration in the wscript file) -<2> The tree is reverted to its original state: no build directory and no lock file - -The behaviour of _distclean_ is fairly generic and the corresponding function does not have to be defined in the wscript files. It may be defined to alter its behaviour though, see for example the following: - -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ configuring the project') - -def distclean(ctx): - print(' Not cleaning anything!') ---------------- - -Upon execution: - -[source,shishell] ---------------- -$ waf distclean - Not cleaning anything! -'distclean' finished successfully (0.000s) ---------------- - -==== Packaging the project sources (the _dist_ command) - -The _dist_ command is provided to create an archive of the project. By using the script presented previously: - -// execution_dist - -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ configuring the project in ' + ctx.path.abspath()) ---------------- - -Execute the _dist_ command to get: - -[source,shishell] ---------------- -$ cd /tmp/execution_dist - -$ waf configure -→ configuring the project in /tmp/execution_dist -'configure' finished successfully (0.005s) - -$ waf dist -New archive created: noname-1.0.tar.bz2 (sha='a4543bb438456b56d6c89a6695f17e6cb69061f5') -'dist' finished successfully (0.035s) ---------------- - -By default, the project name and version are set to 'noname' and '1.0'. To change them, it is necessary to provide two additional variables in the top-level project file: - -[source,python] ---------------- -APPNAME = 'webe' -VERSION = '2.0' - -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ configuring the project in ' + ctx.path.abspath()) ---------------- - -Because the project was configured once, it is not necessary to configure it once again: - -[source,shishell] ---------------- -$ waf dist -New archive created: webe-2.0.tar.bz2 (sha='7ccc338e2ff99b46d97e5301793824e5941dd2be') -'dist' finished successfully (0.006s) ---------------- - -More parameters may be given to alter the archive by adding a function 'dist' in the script: - -[source,python] ---------------- -def dist(ctx): - ctx.base_name = 'foo_2.0' <1> - ctx.algo = 'zip' <2> - ctx.excl = ' **/.waf-1* **/*~ **/*.pyc **/*.swp **/.lock-w*' <3> - ctx.files = ctx.path.ant_glob('**/wscript') <4> ---------------- - -<1> The archive name may be given directly instead of computing from 'APPNAME' and 'VERSION' -<2> The default compression format is 'tar.bz2'. Other valid formats are 'zip' and 'tar.gz' -<3> Exclude patterns passed to give to 'ctx.path.ant_glob()' which is used to find the files -<4> The files to add to the archive may be given as Waf node objects ('excl' is therefore ignored) - -==== Defining command-line options (the _options_ command) - -The Waf script provides various default command-line options, which may be consulted by executing +waf --help+: - -[source,shishell] ---------------- -$ waf --help -waf [command] [options] - -Main commands (example: ./waf build -j4) - build : executes the build - clean : cleans the project - configure: configures the project - dist : makes a tarball for redistributing the sources - distcheck: checks if the project compiles (tarball from 'dist') - distclean: removes the build directory - install : installs the targets on the system - list : lists the targets to execute - step : executes tasks in a step-by-step fashion, for debugging - uninstall: removes the targets installed - -Options: - --version show program's version number and exit - -h, --help show this help message and exit - -j JOBS, --jobs=JOBS amount of parallel jobs (2) - -k, --keep keep running happily even if errors are found - -v, --verbose verbosity level -v -vv or -vvv [default: 0] - --zones=ZONES debugging zones (task_gen, deps, tasks, etc) - - configure options: - -o OUT, --out=OUT build dir for the project - -t TOP, --top=TOP src dir for the project - --prefix=PREFIX installation prefix [default: '/usr/local/'] - --download try to download the tools if missing - - build and install options: - -p, --progress -p: progress bar; -pp: ide output - --targets=TARGETS task generators, e.g. "target1,target2" - - step options: - --files=FILES files to process, by regexp, e.g. "*/main.c,*/test/main.o" - - install/uninstall options: - --destdir=DESTDIR installation root [default: ''] - -f, --force force file installation ---------------- - -Accessing a command-line option is possible from any command. Here is how to access the value _prefix_: - -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def configure(ctx): - print('→ prefix is ' + ctx.options.prefix) ---------------- - -Upon execution, the following will be observed: - -[source,shishell] ---------------- -$ waf configure -→ prefix is /usr/local/ -'configure' finished successfully (0.001s) ---------------- - -To define project command-line options, a special command named _options_ may be defined in user scripts. This command will be called once before any other command executes. - -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def options(ctx): - ctx.add_option('--foo', action='store', default=False, help='Silly test') - -def configure(ctx): - print('→ the value of foo is %r' % ctx.options.foo) ---------------- - -Upon execution, the following will be observed: - -[source,shishell] ---------------- -$ waf configure --foo=test -→ the value of foo is 'test' -'configure' finished successfully (0.001s) ---------------- - -The command context for options is a shortcut to access the optparse functionality. For more information on the optparse module, consult the http://docs.python.org/library/optparse.html[Python documentation] - - -=== The _build_ commands - -==== Building targets (the _build_ command) - -The 'build' command is used for building targets. We will now create a new project in '/tmp/execution_build/', and add a script to create an empty file +foo.txt+ and then copy it into another file +bar.txt+: - -// execution_build -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def configure(ctx): - pass - -def build(ctx): - ctx(rule='touch ${TGT}', target='foo.txt') - ctx(rule='cp ${SRC} ${TGT}', source='foo.txt', target='bar.txt') ---------------- - -Calling _waf build_ directly results in an error: - -[source,shishell] ---------------- -$ cd /tmp/execution_build/ - -$ waf build -The project was not configured: run "waf configure" first! ---------------- - -The build requires a configured folder to know where to look for source files and where to output the created files. Let's try again: - -[source,shishell] ---------------- -$ waf configure build -'configure' finished successfully (0.007s) -Waf: Entering directory `/tmp/execution_build/build_directory' -[1/2] foo.txt: -> build_directory/foo.txt <1> -[2/2] bar.txt: build_directory/foo.txt -> build_directory/bar.txt -Waf: Leaving directory `/tmp/examples/execution_build/build_directory' -'build' finished successfully (0.041s) - -$ tree -a -|-- build_directory/ -| |-- bar.txt <2> -| |-- c4che/ -| | |-- build.config.py -| | `-- _cache.py -| |-- foo.txt -| |-- config.log -| `-- .wafpickle <3> -|--.lock-wafbuild -`-- wscript - -$ waf build -Waf: Entering directory `/tmp/execution_build/build_directory' -Waf: Leaving directory `/tmp/execution_build/build_directory' -'build' finished successfully (0.008s) <4> ---------------- - -<1> Note that the build _deduced_ that +bar.txt+ has to be created after +foo.txt+ -<2> The targets are created in the build directory -<3> A pickle file is used to store the information about the targets -<4> Since the targets are up-to-date, they do not have to be created once again - -Since the command _waf build_ is usually executed very often, a shortcut is provided to call it implicitly: - -[source,shishell] ---------------- -$ waf -Waf: Entering directory `/tmp/execution_build/build_directory' -Waf: Leaving directory `/tmp/execution_build/build_directory' ---------------- - -==== Cleaning the targets (the _clean_ command) - -The _clean_ command is used to remove the information about the files and targets created during the build. It uses the same function _build_ from the wscript files so there is no need to add a function named _clean_ in the wscript file. - -After cleaning, the targets will be created once again even if they were up-to-date: - -[source,shishell] ---------------- -$ waf clean build -v -'clean' finished successfully (0.003s) -Waf: Entering directory `/tmp/execution_build/build_directory' <1> -[1/2] foo.txt: -> build_directory/foo.txt <2> -14:58:34 runner 'touch foo.txt' <3> -[2/2] bar.txt: build_directory/foo.txt -> build_directory/bar.txt -14:58:34 runner 'cp foo.txt bar.txt' -Waf: Leaving directory `/tmp/execution_build/build_directory' -'build' finished successfully (0.040s) ---------------- - -<1> All commands are executed from the build directory by default -<2> The information about the files +foo.txt+ was lost so it is rebuilt -<3> By using the _-v_ flag, the command-lines executed are displayed - -==== More build commands - -The following commands all use the same function _build_ from the wscript file: - -. +build:+ process the source code to create the object files -. +clean:+ remove the object files that were created during a build (unlike distclean, do not remove the configuration) -. +install:+ check that all object files have been generated and copy them on the system (programs, libraries, data files, etc) -. +uninstall:+ undo the installation, remove the object files from the system without touching the ones in the build directory -. +list:+ list the task generators in the build section (to use with waf --targets=name) -. +step:+ force the rebuild of particular files for debugging purposes - -The attribute 'cmd' holds the name of the command being executed: - -// execution_cmd -[source,python] ---------------- -top = '.' -out = 'build_directory' - -def configure(ctx): - print(ctx.cmd) - -def build(ctx): - if ctx.cmd == 'clean': - print('cleaning!') - else: - print(ctx.cmd) ---------------- - -The execution will produce the following output: - -[source,shishell] ---------------- -$ waf configure clean build -Setting top to : /tmp/execution_cmd -Setting out to : /tmp/execution_cmd/build_directory -configure -'configure' finished successfully (0.002s) -cleaning! -'clean' finished successfully (0.002s) -Waf: Entering directory `/tmp/execution_cmd/build_directory' -build -Waf: Leaving directory `/tmp/execution_cmd/build_directory' -'build' finished successfully (0.001s) ---------------- - -The build command usage will be described in details in the next chapters. - diff --git a/docs/book/glossary.txt b/docs/book/glossary.txt deleted file mode 100644 index ee86eaefdd..0000000000 --- a/docs/book/glossary.txt +++ /dev/null @@ -1,20 +0,0 @@ - -== Glossary - -[glossary] -Build Order:: - The build order is the sequence in which tasks must be executed. Because tasks can be executed in parallel, several build orders can be computed depending on the constraints between the tasks. When a build order cannot be computed (usually by contradictory order constraints), the build is said to be in a deadlock. -Dependency:: - A dependency represents the conditions by which a task can be considered up-to-date or not (execution status). The dependencies can be explicit (file inputs and outputs) or abstract (dependency on a value for example). -Task generator:: - A task generator is an object instance of the class Task.task_gen. The task generators encapsulate the creation of various task instances at a time, and simplify the creation of ordering constraints between them (for example, compilation tasks are executed before link tasks). -Task:: - A Waf task is an object instance of the class Task.TaskBase. Waf tasks may be simple (Task.TaskBase) or related to the filesystem (Task.Task). Tasks represent the production of something during the build (files in general), and may be executed in sequence (with ordering constraints) or in parallel. -Tool:: - A Waf tool is a python module containing Waf-specific extensions. The Waf tools are located in the folder +waflib/Tools/+ and usually contain a global variable 'configure' which may reference functions to execute in the configuration. -Node:: - The Node class is a data structure used to represent the filesystem in an efficient manner. The node objects may represent files or folders. File nodes are associated to signatures objects. The signature can be hashes of the file contents (source files) or task signatures (build files). -Command:: - Function present in the top-level project file (wscript) and accepting a 'waflib.Context.Context' instance as unique input parameter. The function is executed when its name is given on the command-line (for example running 'waf configure' will execute the function 'configure') -Variant:: - Additional output directory used to enable several (build) commands to create the same targets with different compilation flags. diff --git a/docs/book/lang.map b/docs/book/lang.map deleted file mode 100644 index bd587565e5..0000000000 --- a/docs/book/lang.map +++ /dev/null @@ -1,146 +0,0 @@ -java = java.lang -moc = cpp.lang -cpp = cpp.lang -c = c.lang -C = cpp.lang -cc = cpp.lang -cs = csharp.lang -csharp = csharp.lang -h = cpp.lang -hh = cpp.lang -H = cpp.lang -hpp = cpp.lang -javascript = javascript.lang -js = javascript.lang -prolog = prolog.lang -pl = prolog.lang -perl = perl.lang -pm = perl.lang -php3 = php.lang -php4 = php.lang -php5 = php.lang -php = php.lang -ctp = php.lang -protobuf = proto.lang -proto = proto.lang -python = python.lang -py = python.lang -ruby = ruby.lang -rb = ruby.lang -flex = flex.lang -lex = flex.lang -l = flex.lang -ll = flex.lang -bison = bison.lang -yacc = bison.lang -y = bison.lang -yy = bison.lang -changelog = changelog.lang -lua = lua.lang -ml = caml.lang -caml = caml.lang -mli = caml.lang -sml = sml.lang -sig = sml.lang -syslog = log.lang -log = log.lang -pas = pascal.lang -pascal = pascal.lang -fortran = fortran.lang -free-fortran = fortran.lang -fixed-fortran = fixed-fortran.lang -html = html.lang -htm = html.lang -tex = latex.lang -latex = latex.lang -cls = latex.lang -sty = latex.lang -dtx = latex.lang -lgt = logtalk.lang -logtalk = logtalk.lang -diff = diff.lang -patch = diff.lang -lang = langdef.lang -langdef = langdef.lang -outlang = outlang.lang -style = style.lang -ps = postscript.lang -eps = postscript.lang -postscript = postscript.lang -kcfg = xml.lang -ui = xml.lang -kdevelop = xml.lang -rc = xml.lang -docbook = xml.lang -kidl = xml.lang -xml = xml.lang -xhtml = xml.lang -bash = sh.lang -sh = sh.lang -csh = sh.lang -ksh = sh.lang -tcsh = sh.lang -shell = sh.lang -tcl = tcl.lang -tk = tcl.lang -txt = nohilite.lang -sql = sql.lang -bib = bib.lang -makefile = makefile.lang -make = makefile.lang -am = makefile.lang -in = makefile.lang -css = css.lang -m4 = m4.lang -ac = m4.lang -autoconf = m4.lang -sl = slang.lang -slsh = slang.lang -slang = slang.lang -properties = properties.lang -desktop = desktop.lang -ini = desktop.lang -conf = conf.lang -lsm = lsm.lang -spec = spec.lang -haxe = haxe.lang -hx = haxe.lang -ldap = ldap.lang -ldif = ldap.lang -glsl = glsl.lang -xorg = xorg.lang -scala = scala.lang -ada = ada.lang -adb = ada.lang -pc = pc.lang -pkgconfig = pc.lang -oz = oz.lang -texinfo = texinfo.lang -texi = texinfo.lang -hs = haskell.lang -hs = haskell.lang -lhs = haskell_literate.lang -haskell = haskell.lang -manifest = manifest.lang -mf = manifest.lang -asm = asm.lang -s = asm.lang -applescript = applescript.lang -scpt = applescript.lang -vbscript = vbscript.lang -vbs = vbscript.lang -awk = awk.lang -bat = bat.lang -batch = bat.lang -clipper = clipper.lang -prg = clipper.lang -cbl = cobol.lang -cobol = cobol.lang -dmd = d.lang -d = d.lang -errors = errors.lang -erl = erlang.lang -erlang = erlang.lang -vala = vala.lang -lisp = lisp.lang -shishell = shishell.lang diff --git a/docs/book/make_like_rules.txt b/docs/book/make_like_rules.txt deleted file mode 100644 index 4c3ea53c58..0000000000 --- a/docs/book/make_like_rules.txt +++ /dev/null @@ -1,406 +0,0 @@ - -== Task generators - -=== Rule-based task generators (Make-like) - -This chapter illustrates the use of rule-based task generators for building simple targets. - -==== Declaration and usage - -Rule-based task generators are a particular category of task generators producing exactly one task. - -The following example shows a task generator producing the file 'foobar.txt' from the project file 'wscript' by executing the command _cp_ to perform a copy: - -// rule_simple -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld( <1> - rule = 'cp ${SRC} ${TGT}', <2> - source = 'wscript', <3> - target = 'foobar.txt', <4> - ) ---------------- - -<1> To instantiate a new task generator, remember that all arguments have the form 'key=value' -<2> The attribute _rule_ represents the command to execute in a readable manner (more on this in the next chapters). -<3> Source files, either in a space-delimited string, or in a list of python strings -<4> Target files, either in a space-delimited string, or in a list of python strings - -Upon execution, the following output will be observed: - -// rules_simple -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.000s) -'configure' finished successfully (0.021s) -Waf: Entering directory `/tmp/rules_simple/build' -[1/1] foobar.txt: wscript -> build/foobar.txt <1> -10:57:33 runner 'cp ../wscript foobar.txt' <2> -Waf: Leaving directory `/tmp/rules_simple/build' -'build' finished successfully (0.016s) - -$ tree -. -|-- build -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| `-- foobar.txt -`-- wscript - -$ waf <3> -Waf: Entering directory `/tmp/rules_simple/build' -Waf: Leaving directory `/tmp/rules_simple/build' -'build' finished successfully (0.006s) - -$ echo " " >> wscript <4> - -$ waf -Waf: Entering directory `/tmp/rules_simple/build' -[1/1] foobar.txt: wscript → build/foobar.txt <5> -Waf: Leaving directory `/tmp/rules_simple/build' -'build' finished successfully (0.013s) ---------------- - -<1> In the first execution, the target is correctly created -<2> Command-lines are only displayed in 'verbose mode' by using the option '-v' -<3> The target is up-to-date, so the task is not executed -<4> Modify the source file in place by appending a space character -<5> Since the source has changed, the target is created once again - -The string for the rule also enters in the dependency calculation. If the rule changes, then the task will be recompiled. - -==== Rule functions - -Rules may be given as expression strings or as python function. The function is assigned to the task class created: - -// rule_function -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - def run(task): <1> - src = task.inputs[0].abspath() <2> - tgt = task.outputs[0].abspath() <3> - cmd = 'cp %s %s' % (src, tgt) - print(cmd) - return task.exec_command(cmd) <4> - - bld( - rule = run, <5> - source = 'wscript', - target = 'same.txt', - ) ---------------- - -<1> Rule functions take the task instance as parameter. -<2> Sources and targets are represented internally as Node objects bound to the task instance. -<3> Commands are executed from the root of the build directory. Node methods such as 'bldpath' ease the command line creation. -<4> The task class holds a wrapper around subprocess.Popen(...) to execute commands. -<5> Use a function instead of a string expression - -The execution trace will be similar to the following: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/rule_function/out' -[1/1] same.txt: wscript -> out/same.txt -cp /tmp/rule_function/wscript /tmp/rule_function/build/same.txt -Waf: Leaving directory `/tmp/rule_function/out' -'build' finished successfully (0.010s) ---------------- - -The rule function must return a null value (0, None or False) to indicate success, and must generate the files corresponding to the outputs. The rule function is executed by threads internally so it is important to write thread-safe code (cannot search or create node objects). - -Unlike string expressions, functions may execute several commands at once. - -==== Shell usage - -The attribute 'shell' is used to enable the system shell for command execution. A few points are worth keeping in mind when declaring rule-based task generators: - -. The Waf tools do not use the shell for executing commands -. The shell is used by default for user commands and custom task generators -. String expressions containing the following symbols `>', `<' or `&' cannot be transformed into functions to execute commands without a shell, even if told to -. In general, it is better to avoid the shell whenever possible to avoid quoting problems (paths having blank characters in the name for example) -. The shell is creating a performance penalty which is more visible on win32 systems. - -Here is an example: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(rule='cp ${SRC} ${TGT}', source='wscript', target='f1.txt', shell=False) - bld(rule='cp ${SRC} ${TGT}', source='wscript', target='f2.txt', shell=True) ---------------- - -Upon execution, the results will be similar to the following: - -[source,shishell] ---------------- -$ waf distclean configure build --zones=runner,action -'distclean' finished successfully (0.004s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/rule/out' -23:11:23 action <1> -def f(task): - env = task.env - wd = getattr(task, 'cwd', None) - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - lst = [] - lst.extend(['cp']) - lst.extend([a.srcpath(env) for a in task.inputs]) - lst.extend([a.bldpath(env) for a in task.outputs]) - lst = [x for x in lst if x] - return task.exec_command(lst, cwd=wd) - -23:11:23 action -def f(task): - env = task.env - wd = getattr(task, 'cwd', None) - p = env.get_flat - cmd = ''' cp %s %s ''' % (" ".join([a.srcpath(env) for a in task.inputs]), <2> - " ".join([a.bldpath(env) for a in task.outputs])) - return task.exec_command(cmd, cwd=wd) - -[1/2] f1.txt: wscript -> out/f1.txt -23:11:23 runner system command -> ['cp', '../wscript', 'f1.txt'] <3> -[2/2] f2.txt: wscript -> out/f2.txt -23:11:23 runner system command -> cp ../wscript f2.txt -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.017s) ---------------- - -<1> String expressions are converted to functions (here, without the shell). -<2> Command execution by the shell. Notice the heavy use of string concatenation. -<3> Commands to execute are displayed by calling 'waf --zones=runner'. When called without the shell, the arguments are displayed as a list. - -NOTE: For performance and maintainability, try avoiding the shell whenever possible - -==== Inputs and outputs - -Source and target arguments are optional for make-like task generators, and may point at one or several files at once. Here are a few examples: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld( <1> - rule = 'cp ${SRC} ${TGT[0].abspath()} && cp ${SRC} ${TGT[1].abspath()}', - source = 'wscript', - target = 'f1.txt f2.txt', - shell = True - ) - - bld( <2> - source = 'wscript', - rule = 'echo ${SRC}' - ) - - bld( <3> - target = 'test.k3', - rule = 'echo "test" > ${TGT}', - ) - - bld( <4> - rule = 'echo 1337' - ) - - bld( <5> - rule = "echo 'task always run'", - always = True - ) ---------------- - -<1> Generate 'two files' whenever the input or the rule change. Likewise, a rule-based task generator may have multiple input files. -<2> The command is executed whenever the input or the rule change. There are no declared outputs. -<3> No input, the command is executed whenever it changes -<4> No input and no output, the command is executed only when the string expression changes -<5> No input and no output, the command is executed each time the build is called - -For the record, here is the output of the build: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.002s) -'configure' finished successfully (0.093s) -Waf: Entering directory `/tmp/rule/out' -[1/5] echo 1337: -1337 -[2/5] echo 'task always run': -[3/5] echo ${SRC}: wscript -../wscript -[4/5] f1.txt f2.txt: wscript -> out/f1.txt out/f2.txt -task always run -[5/5] test.k3: -> out/test.k3 -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.049s) - -$ waf -Waf: Entering directory `/tmp/rule/out' -[2/5] echo 'task always run': -task always run -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.014s) ---------------- - -==== Dependencies on file contents - -As a second example, we will create a file named 'r1.txt' from the current date. It will be updated each time the build is executed. A second file named 'r2.txt' will be created from 'r1.txt'. - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld( - name = 'r1', <1> - target = 'r1.txt', - rule = '(date > ${TGT}) && cat ${TGT}', <2> - always = True, <3> - ) - - bld( - name = 'r2', <4> - target = 'r2.txt', - rule = 'cp ${SRC} ${TGT}', - source = 'r1.txt', <5> - after = 'r1', <6> - ) ---------------- - -<1> Give the task generator a name, it will create a task class of the same name to execute the command -<2> Create 'r1.txt' with the date -<3> There is no source file to depend on and the rule never changes. The task is then set to be executed each time the build is started by using the attribute 'always' -<4> If no name is provided, the rule is used as a name for the task class -<5> Use 'r1.txt' as a source for 'r2.txt'. Since 'r1.txt' was declared before, the dependency will be added automatically ('r2.txt' will be re-created whenever 'r1.txt' changes) -<6> Set the command generating 'r2.txt' to be executed after the command generating 'r1.txt'. The attribute 'after' references task class names, not task generators. Here it will work because rule-based task generator tasks inherit the 'name' attribute - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.003s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/rule/out' -[1/2] r1: -> out/r1.txt -16:44:39 runner system command -> (date > r1.txt) && cat r1.txt -dom ene 31 16:44:39 CET 2010 -[2/2] r2: out/r1.txt -> out/r2.txt -16:44:39 runner system command -> cp r1.txt r2.txt -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.021s) - -$ waf -v -Waf: Entering directory `/tmp/rule/out' -[1/2] r1: -> out/r1.txt -16:44:41 runner system command -> (date > r1.txt) && cat r1.txt -dom ene 31 16:44:41 CET 2010 -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.016s) ---------------- - -Although r2 *depends* on 'r1.txt', r2 was not executed in the second build. As a matter of fact, the signature of the task r1 has not changed, and r1 was only set to be executed each time, regardless of its signature. Since the signature of the 'r1.txt' does not change, the signature of r2 will not change either, and 'r2.txt' is considered up-to-date. - -We will now illustrate how to make certain that the outputs reflect the file contents and trigger the rebuild for dependent tasks by enabling the attribute 'on_results': - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld( - name = 'r1', - target = 'r1.txt', - rule = '(date > ${TGT}) && cat ${TGT}', - always = True, - on_results = True, - ) - - bld( - target = 'r2.txt', - rule = 'cp ${SRC} ${TGT}', - source = 'r1.txt', - after = 'r1', - ) ---------------- - -Here 'r2.txt' will be re-created each time: - -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.003s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/rule/out' -[1/2] r1: -> out/r1.txt -16:59:49 runner system command -> (date > r1.txt) && cat r1.txt <1> -dom ene 31 16:59:49 CET 2010 <2> -[2/2] r2: out/r1.txt -> out/r2.txt -16:59:49 runner system command -> cp r1.txt r2.txt -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.020s) - -$ waf -v -Waf: Entering directory `/tmp/rule/out' -[1/2] r1: -> out/r1.txt -16:59:49 runner system command -> (date > r1.txt) && cat r1.txt -dom ene 31 16:59:49 CET 2010 <3> -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.016s) - -$ waf -v -Waf: Entering directory `/tmp/rule/out' -[1/2] r1: -> out/r1.txt -16:59:53 runner system command -> (date > r1.txt) && cat r1.txt -dom ene 31 16:59:53 CET 2010 <4> -[2/2] r2: out/r1.txt -> out/r2.txt -16:59:53 runner system command -> cp r1.txt r2.txt -Waf: Leaving directory `/tmp/rule/out' -'build' finished successfully (0.022s) ---------------- - -<1> Start with a clean build, both 'r1.txt' and 'r2.txt' are created -<2> Notice the date and time -<3> The second build was executed at the same date and time, so 'r1.txt' has not changed, therefore 'r2.txt' is up to date -<4> The third build is executed at another date and time. Since 'r1.txt' has changed, 'r2.txt' is created once again - diff --git a/docs/book/nodes.txt b/docs/book/nodes.txt deleted file mode 100644 index 8e3e302aa8..0000000000 --- a/docs/book/nodes.txt +++ /dev/null @@ -1,341 +0,0 @@ -== Node objects - -Node objects represent files or folders and are used to ease the operations dealing with the file system. This chapter provides an overview of their usage. - -=== Design of the node class - -==== The node tree - -The Waf nodes inherit the class _waflib.Node.Node_ and provide a tree structure to represent the file system: - -. *parent*: parent node -. *children*: folder contents - or empty if the node is a file - -In practice, the reference to the filesystem tree is bound to the context classes for access from Waf commands. Here is an illustration: - -// nodes_tree -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.abspath()) <1> - print(ctx.root.abspath()) <2> - print("ctx.path contents %r" % ctx.path.children) - print("ctx.path parent %r" % ctx.path.parent.abspath()) - print("ctx.root parent %r" % ctx.root.parent) ---------------- - -<1> *ctx.path* represents the path to the +wscript+ file being executed -<2> *ctx.root* is the root of the file system or the folder containing the drive letters (win32 systems) - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf configure dosomething -Setting top to : /tmp/node_tree -Setting out to : /tmp/node_tree/build -'configure' finished successfully (0.007s) -/tmp/node_tree <1> -/ -ctx.path contents {'wscript': /tmp/node_tree/wscript} <2> -ctx.path parent '/tmp' <3> -ctx.root parent None <4> -'dosomething' finished successfully (0.001s) ---------------- - -<1> Absolute paths are used frequently -<2> The folder contents are stored in the dict _children_ which maps names to node objects -<3> Each node keeps reference to his _parent_ node -<4> The root node has no _parent_ - -NOTE: There is a strict correspondance between nodes and filesystem elements: a node represents exactly one file or one folder, and only one node can represent a file or a folder. - -==== Node caching - -By default, only the necessary nodes are created: - -// nodes_cache -[source,python] ---------------- -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.root.children) ---------------- - -The filesystem root appears to only contain one node, although the real filesystem root contains more folders than just +/tmp+: - -[source,shishell] ---------------- -$ waf configure dosomething -Setting top to : /tmp/nodes_cache -Setting out to : /tmp/nodes_cache/build -'configure' finished successfully (0.086s) -{'tmp': /tmp} -'dosomething' finished successfully (0.001s) - -$ ls / -bin boot dev etc home tmp usr var ---------------- - -This means in particular that some nodes may have to be read from the file system or created before being used. - -// ==== nodes and signatures TODO - - -=== General usage - -==== Searching and creating nodes - -Nodes may be created manually or read from the file system. Three methods are provided for this purpose: - -// nodes_search -[source,python] ---------------- -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.find_node('wscript')) <1> - - nd1 = ctx.path.make_node('foo.txt') <2> - print(nd1) - - nd2 = ctx.path.search('foo.txt') <3> - print(nd2) - - nd3 = ctx.path.search('bar.txt') <4> - print(nd3) - - nd2.write('some text') <5> - print(nd2.read()) - - print(ctx.path.listdir()) ---------------- - -<1> Search for a node by reading the file system -<2> Search for a node or create it if it does not exist -<3> Search for a node but do not try to create it -<4> Search for a file which does not exist -<5> Write to the file pointed by the node, creating or overwriting the file - -The output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure dosomething -'distclean' finished successfully (0.005s) -Setting top to : /tmp/nodes_search -Setting out to : /tmp/nodes_search/build -'configure' finished successfully (0.006s) -wscript -foo.txt -foo.txt -None -some text -['.lock-wafbuild', 'foo.txt', 'build', 'wscript', '.git'] ---------------- - -NOTE: More methods may be found in the https://waf.io/apidocs/index.html[API documentation] - -WARNING: The Node methods are not meant to be safe for concurrent access. The code executed in parallel (method run() of task objects for example) must avoid modifying the Node object data structure. - -WARNING: The Node methods read/write must be used to prevent file handle inheritance issues on win32 systems instead of plain open/read/write. Such problems arise when spawning processes during parallel builds. - -==== Listing files and folders - -The method *ant_glob* is used to list files and folders recursively: - -// nodes_ant_glob -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - pass - -def dosomething(ctx): - print(ctx.path.ant_glob('wsc*')) <1> - print(ctx.path.ant_glob('w?cr?p?')) <2> - print(ctx.root.ant_glob('usr/include/**/zlib*', <3> dir=False, src=True)) <4> - print(ctx.path.ant_glob(['**/*py', '**/*p'], excl=['**/default*'])) <5> ---------------- - -<1> The method ant_glob is called on a node object, and not on the build context, it returns only files by default -<2> Patterns may contain wildcards such as '*' or '?', but they are http://ant.apache.org/manual/dirtasks.html[Ant patterns], not regular expressions -<3> The symbol '**' enable recursion. Complex folder hierarchies may take a lot of time, so use with care. -<4> Even though recursion is enabled, only files are returned by default. To turn directory listing on, use 'dir=True' -<5> Patterns are either lists of strings or space-delimited values. Patterns to exclude are defined in 'waflib.Node.exclude_regs'. - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf configure dosomething -Setting top to : /tmp/nodes_ant_glob -Setting out to : /tmp/nodes_ant_glob/build -'configure' finished successfully (0.006s) -[/tmp/nodes_ant_glob/wscript] -[/tmp/nodes_ant_glob/wscript] -[/usr/include/zlib.h] -[/tmp/nodes_ant_glob/build/c4che/build.config.py] ---------------- - -The sequence '..' represents exactly two dot characters, and not the parent directory. This is used to guarantee that the search will terminate, and that the same files will not be listed multiple times. Consider the following: - -[source,python] ---------------- -ctx.path.ant_glob('../wscript') <1> -ctx.path.parent.ant_glob('wscript') <2> ---------------- - -<1> Invalid, this pattern will never return anything -<2> Call 'ant_glob' from the parent directory - -==== Path manipulation: abspath, path_from - -The following example illustrates a few ways of obtaining absolute and relative paths: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(ctx): - dir = ctx.path <1> - src = ctx.path.find_resource('wscript') - bld = ctx.path.find_or_declare('out.out') - - print(src.abspath()) <2> - print(bld.abspath()) - print(dir.abspath()) - print(src.path_from(dir.parent)) <3> - print(ctx.root.path_from(src)) <4> ---------------- - -<1> Directory node, source node and build node -<2> Print the absolute path -<3> Compute the path relative to another node -<4> Compute the relative path in reverse order - -Here is the execution trace on a unix-like system: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.002s) -'configure' finished successfully (0.005s) -Waf: Entering directory `/tmp/nested/build' -/tmp/nested/wscript -/tmp/nested/build/out.out -/tmp/nested/build/ -/tmp/nested -nested/wscript -../../../.. -Waf: Leaving directory `/tmp/nested/build' -'build' finished successfully (0.003s) ---------------- - -=== BuildContext-specific methods - -==== Source and build nodes - -Although the _sources_ and _targets_ in the +wscript+ files are declared as if they were in the current directory, the target files are output into the build directory. To enable this behaviour, the directory structure below the _top_ directory must be replicated in the _out_ directory. For example, the folder *program* from +demos/c+ has its equivalent in the build directory: - -[source,shishell] ---------------- -$ cd demos/c -$ tree -. -|-- build -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.h -| |-- config.log -| `-- program -| |-- main.c.0.o -| `-- myprogram -|-- program -| |-- a.h -| |-- main.c -| `-- wscript_build -`-- wscript ---------------- - -To support this, the build context provides two additional nodes: - -. srcnode: node representing the top-level directory -. bldnode: node representing the build directory - -To obtain a build node from a src node and vice-versa, the following methods may be used: - -. Node.get_src() -. Node.get_bld() - -==== Using Nodes during the build phase - -Although using _srcnode_ and _bldnode_ directly is possible, the three following wrapper methods are much easier to use. They accept a string representing the target as input and return a single node: - -. *find_dir*: returns a node or None if the folder cannot be found on the system. -. *find_resource*: returns a node under the source directory, a node under the corresponding build directory, or None if no such a node exists. If the file is not in the build directory, the node signature is computed and put into a cache (file contents hash). -. *find_or_declare*: returns a node or create the corresponding node in the build directory. - -Besides, they all use _find_dir_ internally which will create the required directory structure in the build directory. Because the folders may be replicated in the build directory before the build starts, it is recommended to use it whenever possible: - -[source,python] ---------------- -def build(bld): - p = bld.path.parent.find_dir('src') <1> - p = bld.path.find_dir('../src') <2> ---------------- - -<1> Not recommended, use _find_dir_ instead -<2> Path separators are converted automatically according to the platform. - -==== Nodes, tasks, and task generators - -As seen in the previous chapter, Task objects can process files represented as lists of input and output nodes. The task generators -will usually process the input files given as strings to obtain such nodes and bind them to the tasks. - -Because the build directory can be enabled or disabled, the following file copy would be ambiguous: footnote:[When file copies cannot be avoided, the best practice is to change the file names] - -[source,python] ---------------- -def build(bld): - bld(rule='cp ${SRC} ${TGT}', source='foo.txt', target='foo.txt') ---------------- - -To actually copy a file into the corresponding build directory with the same name, the ambiguity must be removed: - -[source,python] ---------------- -def build(bld): - bld( - rule = 'cp ${SRC} ${TGT}', - source = bld.path.make_node('foo.txt'), - target = bld.path.get_bld().make_node('foo.txt') - ) ---------------- - -In practice, it is easier to use a wrapper that conceals these details (more examples can be found in +demos/subst+): - -[source,python] ---------------- -def build(bld): - bld(features='subst', source='wscript', target='wscript', is_copy=True) ---------------- - -// ==== Serialization concerns - diff --git a/docs/book/pdebug.eps b/docs/book/pdebug.eps deleted file mode 100644 index fe7afdb0bb..0000000000 --- a/docs/book/pdebug.eps +++ /dev/null @@ -1,5787 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: cairo 1.8.10 (http://cairographics.org) -%%CreationDate: Fri Jul 23 18:31:56 2010 -%%Pages: 1 -%%BoundingBox: 0 0 640 167 -%%DocumentData: Clean7Bit -%%LanguageLevel: 2 -%%EndComments -%%BeginProlog -/cairo_eps_state save def -/dict_count countdictstack def -/op_count count 1 sub def -userdict begin -/q { gsave } bind def -/Q { grestore } bind def -/cm { 6 array astore concat } bind def -/w { setlinewidth } bind def -/J { setlinecap } bind def -/j { setlinejoin } bind def -/M { setmiterlimit } bind def -/d { setdash } bind def -/m { moveto } bind def -/l { lineto } bind def -/c { curveto } bind def -/h { closepath } bind def -/re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto - 0 exch rlineto 0 rlineto closepath } bind def -/S { stroke } bind def -/f { fill } bind def -/f* { eofill } bind def -/B { fill stroke } bind def -/B* { eofill stroke } bind def -/n { newpath } bind def -/W { clip } bind def -/W* { eoclip } bind def -/BT { } bind def -/ET { } bind def -/pdfmark where { pop globaldict /?pdfmark /exec load put } - { globaldict begin /?pdfmark /pop load def /pdfmark - /cleartomark load def end } ifelse -/BDC { mark 3 1 roll /BDC pdfmark } bind def -/EMC { mark /EMC pdfmark } bind def -/cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def -/Tj { show currentpoint cairo_store_point } bind def -/TJ { - { - dup - type /stringtype eq - { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse - } forall - currentpoint cairo_store_point -} bind def -/cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore - cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def -/Tf { pop /cairo_font exch def /cairo_font_matrix where - { pop cairo_selectfont } if } bind def -/Td { matrix translate cairo_font_matrix matrix concatmatrix dup - /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point - /cairo_font where { pop cairo_selectfont } if } bind def -/Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def - cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def -/g { setgray } bind def -/rg { setrgbcolor } bind def -/d1 { setcachedevice } bind def -%%EndProlog -11 dict begin -/FontType 42 def -/FontName /f-0-0 def -/PaintType 0 def -/FontMatrix [ 1 0 0 1 0 0 ] def -/FontBBox [ 0 0 0 0 ] def -/Encoding 256 array def -0 1 255 { Encoding exch /.notdef put } for -Encoding 1 /uni0050 put -Encoding 2 /uni0061 put -Encoding 3 /uni0072 put -Encoding 4 /uni006C put -Encoding 5 /uni0065 put -Encoding 6 /uni0020 put -Encoding 7 /uni0070 put -Encoding 8 /uni0073 put -Encoding 9 /uni006E put -Encoding 10 /uni0074 put -Encoding 11 /uni0069 put -Encoding 12 /uni006F put -Encoding 13 /uni0066 put -Encoding 14 /uni0027 put -Encoding 15 /uni0077 put -Encoding 16 /uni0062 put -Encoding 17 /uni0075 put -Encoding 18 /uni0064 put -Encoding 19 /uni002D put -Encoding 20 /uni006A put -Encoding 21 /uni0036 put -Encoding 22 /uni0043 put -Encoding 23 /uni006D put -Encoding 24 /uni006B put -Encoding 25 /uni004F put -Encoding 26 /uni0068 put -Encoding 27 /uni004C put -/CharStrings 28 dict dup begin -/.notdef 0 def -/uni0050 1 def -/uni0061 2 def -/uni0072 3 def -/uni006C 4 def -/uni0065 5 def -/uni0020 6 def -/uni0070 7 def -/uni0073 8 def -/uni006E 9 def -/uni0074 10 def -/uni0069 11 def -/uni006F 12 def -/uni0066 13 def -/uni0027 14 def -/uni0077 15 def -/uni0062 16 def -/uni0075 17 def -/uni0064 18 def -/uni002D 19 def -/uni006A 20 def -/uni0036 21 def -/uni0043 22 def -/uni006D 23 def -/uni006B 24 def -/uni004F 25 def -/uni0068 26 def -/uni004C 27 def -end readonly def -/sfnts [ -<00010000000a008000030020636d617000f2f182000011dc000000766376742000691d390000 -1254000001fe6670676d7134766a00001454000000ab676c796676060f25000000ac00001130 -68656164f1f329920000150000000036686865610cb8066d0000153800000024686d74787ae6 -0eb00000155c000000706c6f63610000f634000015cc000000746d6178700489067100001640 -00000020707265703b07f100000016600000056800020066fe96046605a400030007001a400c -04fb0006fb0108057f0204002fc4d4ec310010d4ecd4ec301311211125211121660400fc7303 -1bfce5fe96070ef8f2720629000200c90000048d05d500080013003a40180195100095098112 -100a0802040005190d3f11001c09041410fcec32fcec11173931002ff4ecd4ec30400b0f151f -153f155f15af1505015d011133323635342623252132041514042b0111230193fe8d9a9a8dfe -3801c8fb0101fefffbfeca052ffdcf92878692a6e3dbdde2fda80002007bffe3042d047b000a -002500bc4027191f0b17090e00a91706b90e1120861fba1cb923b8118c170c001703180d0908 -0b1f030814452610fcecccd4ec323211393931002fc4e4f4fcf4ec10c6ee10ee113911391239 -30406e301d301e301f3020302130223f27401d401e401f402040214022501d501e501f502050 -21502250277027851d871e871f8720872185229027a027f0271e301e301f30203021401e401f -40204021501e501f50205021601e601f60206021701e701f70207021801e801f80208021185d -015d0122061514163332363d01371123350e01232226353436332135342623220607353e0133 -321602bedfac816f99b9b8b83fbc88accbfdfb0102a79760b65465be5af3f00233667b6273d9 -b4294cfd81aa6661c1a2bdc0127f8b2e2eaa2727fc00000100ba0000034a047b001100304014 -060b0700110b03870eb809bc070a06080008461210fcc4ec3231002fe4f4ecc4d4cc11123930 -b450139f1302015d012e012322061511231133153e0133321617034a1f492c9ca7b9b93aba85 -132e1c03b41211cbbefdb20460ae666305050000000100c100000179061400030022b7009702 -010800460410fcec31002fec30400d10054005500560057005f00506015d13331123c1b8b806 -14f9ec0000020071ffe3047f047b0014001b00704024001501098608880515a90105b90c01bb -18b912b80c8c1c1b1502081508004b02120f451c10fcecf4ecc4111239310010e4f4ece410ee -10ee10f4ee1112393040293f1d701da01dd01df01d053f003f013f023f153f1b052c072f082f -092c0a6f006f016f026f156f1b095d71015d0115211e0133323637150e012320001110003332 -00072e0123220607047ffcb20ccdb76ac76263d06bfef4fec70129fce20107b802a5889ab90e -025e5abec73434ae2a2c0138010a01130143feddc497b4ae9e00000200bafe5604a4047b0010 -001c003e401b1ab9000e14b90508b80e8c01bd03bc1d11120b471704000802461d10fcec3232 -f4ec310010e4e4e4f4c4ec10c4ee304009601e801ea01ee01e04015d2511231133153e013332 -001110022322260134262322061514163332360173b9b93ab17bcc00ffffcc7bb10238a79292 -a7a79292a7a8fdae060aaa6461febcfef8fef8febc6101ebcbe7e7cbcbe7e70000000001006f -ffe303c7047b002700e7403c0d0c020e0b531f1e080902070a531f1f1e420a0b1e1f04150086 -0189041486158918b91104b925b8118c281e0a0b1f1b0700521b080e07081422452810fcc4ec -d4ece4111239393939310010e4f4ec10fef5ee10f5ee121739304b535807100eed111739070e -ed1117395922b2002701015d406d1c0a1c0b1c0c2e092c0a2c0b2c0c3b093b0a3b0b3b0c0b20 -0020012402280a280b2a132f142f152a16281e281f292029212427860a860b860c860d120000 -00010202060a060b030c030d030e030f03100319031a031b031c041d09272f293f295f297f29 -80299029a029f029185d005d7101152e012322061514161f011e0115140623222627351e0133 -32363534262f012e01353436333216038b4ea85a898962943fc4a5f7d85ac36c66c661828c65 -ab40ab98e0ce66b4043fae282854544049210e2a99899cb62323be353559514b50250f249582 -9eac1e000000000100ba00000464047b001300364019030900030e0106870e11b80cbc0a0102 -08004e0d09080b461410fcec32f4ec31002f3ce4f4c4ec1112173930b46015cf1502015d0111 -231134262322061511231133153e013332160464b87c7c95acb9b942b375c1c602a4fd5c029e -9f9ebea4fd870460ae6564ef00010037000002f2059e0013003840190e05080f03a9001101bc -08870a0b08090204000810120e461410fc3cc4fc3cc432393931002fecf43cc4ec3211393930 -b2af1501015d01112115211114163b01152322263511233533110177017bfe854b73bdbdd5a2 -8787059efec28ffda0894e9a9fd202608f013e000000000200c100000179061400030007002b -400e06be04b100bc020501080400460810fc3cec3231002fe4fcec30400b1009400950096009 -700905015d1333112311331523c1b8b8b8b80460fba00614e90000020071ffe30475047b000b -0017004a401306b91200b90cb8128c1809120f51031215451810fcecf4ec310010e4f4ec10ee -3040233f197b007b067f077f087f097f0a7f0b7b0c7f0d7f0e7f0f7f107f117b12a019f01911 -015d012206151416333236353426273200111000232200111000027394acab9593acac93f001 -12feeef0f1feef011103dfe7c9c9e7e8c8c7e99cfec8feecfeedfec701390113011401380000 -0001002f000002f8061400130059401c0510010c08a906018700970e06bc0a02130700070905 -080d0f0b4c1410fc4bb00a5458b9000b004038594bb00e5458b9000bffc038593cc4fc3cc4c4 -12393931002fe432fcec10ee321239393001b640155015a015035d01152322061d0121152111 -23112335333534363302f8b0634d012ffed1b9b0b0aebd0614995068638ffc2f03d18f4ebbab -000100c503aa016f05d500030037400a0184008104000502040410fc4bb012544bb013545b58 -b90002ffc03859ec310010f4ec3001400d40055005600570059005a005065d01112311016faa -05d5fdd5022b000000010056000006350460000c01eb404905550605090a0904550a0903550a -0b0a025501020b0b0a061107080705110405080807021103020c000c011100000c420a050203 -060300bf0b080c0b0a09080605040302010b07000d10d44bb00a544bb011545b4bb012545b4b -b013545b4bb00b545b58b9000000403859014bb00c544bb00d545b4bb010545b58b90000ffc0 -3859cc173931002f3cec32321739304b5358071005ed071008ed071008ed071005ed071008ed -071005ed0705ed071008ed59220140ff050216021605220a350a49024905460a400a5b025b05 -550a500a6e026e05660a79027f0279057f05870299029805940abc02bc05ce02c703cf051d05 -02090306040b050a080b09040b050c1502190316041a051b081b09140b150c25002501230227 -03210425052206220725082709240a210b230c390336043608390c300e460248034604400442 -054006400740084409440a440b400e400e560056015602500451055206520750085309540a55 -0b6300640165026a0365046a056a066a076e09610b670c6f0e7500750179027d0378047d057a -067f067a077f07780879097f097b0a760b7d0c870288058f0e97009701940293039c049b0598 -0698079908402f960c9f0ea600a601a402a403ab04ab05a906a907ab08a40caf0eb502b103bd -04bb05b809bf0ec402c303cc04ca05795d005d13331b01331b013301230b012356b8e6e5d9e6 -e5b8fedbd9f1f2d90460fc96036afc96036afba00396fc6a000200baffe304a40614000b001c -0038401903b90c0f09b918158c0fb81b971900121247180c06081a461d10fcec3232f4ec3100 -2fece4f4c4ec10c6ee30b6601e801ea01e03015d013426232206151416333236013e01333200 -111002232226271523113303e5a79292a7a79292a7fd8e3ab17bcc00ffffcc7bb13ab9b9022f -cbe7e7cbcbe7e702526461febcfef8fef8febc6164a80614000200aeffe30458047b00130014 -003b401c030900030e0106870e118c0a01bc14b80c0d0908140b4e020800461510fcecf439ec -3231002fe4e432f4c4ec1112173930b46f15c01502015d131133111416333236351133112335 -0e0123222601aeb87c7c95adb8b843b175c1c801cf01ba02a6fd619f9fbea4027bfba0ac6663 -f003a80000020071ffe3045a06140010001c003840191ab9000e14b905088c0eb80197031704 -0008024711120b451d10fcecf4ec323231002fece4f4c4ec10c4ee30b6601e801ea01e03015d -0111331123350e0123220211100033321601141633323635342623220603a2b8b83ab17ccbff -00ffcb7cb1fdc7a79292a8a89292a703b6025ef9eca86461014401080108014461fe15cbe7e7 -cbcbe7e70001006401df027f028300030011b6009c020401000410dccc310010d4ec30132115 -2164021bfde50283a4000002ffdbfe5601790614000b000f0044401c0b0207000ebe0c078705 -bd00bc0cb110081005064f0d01080c00461010fc3cec32e4391239310010ece4f4ec10ee1112 -393930400b1011401150116011701105015d13331114062b01353332363511331523c1b8a3b5 -4631694cb8b80460fb8cd6c09c61990628e900000002008fffe3049605f0000b002400584024 -1306000d860c00a01606a01c16a510a00c8922911c8c250c22091c191e131c03211f1b2510fc -ececf4ece4310010e4f4e4fce410ee10ee10ee111239304014cb00cb01cd02cd03cd04cb05cb -0607a41eb21e025d015d01220615141633323635342601152e01232202033e01333200151400 -23200011100021321602a4889f9f88889f9f01094c9b4cc8d30f3bb26be10105fef0e2fefdfe -ee0150011b4c9b033bbaa2a1bbbba1a2ba0279b82426fef2feef575dfeefebe6feea018d0179 -016201a51e00000000010073ffe3052705f000190036401a0da10eae0a951101a100ae049517 -91118c1a07190d003014101a10fcec32ec310010e4f4ecf4ec10eef6ee30b40f1b1f1b02015d -01152e0123200011100021323637150e01232000111000213216052766e782ff00fef0011001 -0082e7666aed84feadfe7a0186015386ed0562d55f5efec7fed8fed9fec75e5fd34848019f01 -670168019f470000000100ba0000071d047b0022005a4026061209180f00061d07150c871d20 -03b81bbc19100700110f0808065011080f501c18081a462310fcec32fcfcfcec11123931002f -3c3ce4f43cc4ec32111217393040133024502470249024a024a024bf24df24ff2409015d013e -013332161511231134262322061511231134262322061511231133153e01333216042945c082 -afbeb972758fa6b972778da6b9b93fb0797aab03897c76f5e2fd5c029ea19cbea4fd87029ea2 -9bbfa3fd870460ae67627c000000000100ba0000049c0614000a00bc40290811050605071106 -060503110405040211050504420805020303bc009709060501040608010800460b10fcec32d4 -c4113931002f3cece41739304b5358071004ed071005ed071005ed071004ed5922b2100c0101 -5d405f04020a081602270229052b0856026602670873027705820289058e08930296059708a3 -021209050906020b030a072803270428052b062b07400c6803600c8903850489058d068f079a -039707aa03a705b607c507d607f703f003f704f0041a5d71005d1333110133090123011123ba -b90225ebfdae026bf0fdc7b90614fc6901e3fdf4fdac0223fddd00020073ffe305d905f0000b -00170023401306951200950c91128c1809190f33031915101810fcecfcec310010e4f4ec10ee -300122001110003332001110002720001110002120001110000327dcfefd0103dcdc0101feff -dc013a0178fe88fec6fec5fe870179054cfeb8fee5fee6feb80148011a011b0148a4fe5bfe9e -fe9ffe5b01a40162016201a50000000100ba000004640614001300344019030900030e010687 -0e11b80c970a010208004e0d09080b461410fcec32f4ec31002f3cecf4c4ec1112173930b260 -1501015d0111231134262322061511231133113e013332160464b87c7c95acb9b942b375c1c6 -02a4fd5c029e9f9ebea4fd870614fd9e6564ef00000100c90000046a05d500050025400c0295 -008104011c033a00040610fcecec31002fe4ec304009300750078003800404015d1333112115 -21c9ca02d7fc5f05d5fad5aa0000000200030000000000140001000000000034000400200000 -0004000400010000f01bffff0000f000ffff10000001000000000006004200000000001c0000 -000100020003000400050006000700080009000a000b000c000d000e000f0010001100120013 -001400150016001700180019001a001b0000013500b800cb00cb00c100aa009c01a600b80066 -0000007100cb00a002b20085007500b800c301cb0189022d00cb00a600f000d300aa008700cb -03aa0400014a003300cb000000d9050200f4015400b4009c01390114013907060400044e04b4 -045204b804e704cd0037047304cd04600473013303a2055605a60556053903c5021200c9001f -00b801df007300ba03e9033303bc0444040e00df03cd03aa00e503aa0404000000cb008f00a4 -007b00b80014016f007f027b0252008f00c705cd009a009a006f00cb00cd019e01d300f000ba -018300d5009803040248009e01d500c100cb00f600830354027f00000333026600d300c700a4 -00cd008f009a0073040005d5010a00fe022b00a400b4009c00000062009c0000001d032d05d5 -05d505d505f0007f007b005400a406b80614072301d300b800cb00a601c301ec069300a000d3 -035c037103db0185042304a80448008f0139011401390360008f05d5019a0614072306660179 -046004600460047b009c00000277046001aa00e904600762007b00c5007f027b000000b40252 -05cd006600bc00660077061000cd013b01850389008f007b0000001d00cd074a042f009c009c -0000077d006f0000006f0335006a006f007b00ae00b2002d0396008f027b00f6008303540637 -05f6008f009c04e10266008f018d02f600cd03440029006604ee00730000140000960000b707 -060504030201002c2010b002254964b040515820c859212d2cb002254964b040515820c85921 -2d2c20100720b00050b00d7920b8ffff5058041b0559b0051cb0032508b0042523e120b00050 -b00d7920b8ffff5058041b0559b0051cb0032508e12d2c4b505820b0fd454459212d2cb00225 -4560442d2c4b5358b00225b0022545445921212d2c45442d2cb00225b0022549b00525b00525 -4960b0206368208a108a233a8a10653a2d000001000000024ccc55ef24b85f0f3cf5001f0800 -00000000c6bc48a000000000c6bc48a0f7d6fd330d7209550000000800000001000000000001 -0000076dfe1d00000de2f7d6fa510d7200010000000000000000000000000000001c04cd0066 -04d300c904e7007b034a00ba023900c104ec0071028b0000051400ba042b006f051200ba0323 -0037023900c104e5007102d1002f023300c5068b0056051400ba051200ae0514007102e30064 -0239ffdb0517008f0596007307cb00ba04a200ba064c0073051200ba047500c9000000000000 -0044000000c4000001f0000002600000029c00000370000003700000041000000570000005e8 -00000664000006b400000758000007f00000084400000a6800000b0000000b8400000c1c0000 -0c4800000cc400000d9c00000e3400000ef800000fe800001074000010ec0000113000010000 -001c0354002b0068000c000200100099000800000415021600080004b8028040fffbfe03fa14 -03f92503f83203f79603f60e03f5fe03f4fe03f32503f20e03f19603f02503ef8a4105effe03 -ee9603ed9603ecfa03ebfa03eafe03e93a03e84203e7fe03e63203e5e45305e59603e48a4105 -e45303e3e22f05e3fa03e22f03e1fe03e0fe03df3203de1403dd9603dcfe03db1203da7d03d9 -bb03d8fe03d68a4105d67d03d5d44705d57d03d44703d3d21b05d3fe03d21b03d1fe03d0fe03 -cffe03cefe03cd9603cccb1e05ccfe03cb1e03ca3203c9fe03c6851105c61c03c51603c4fe03 -c3fe03c2fe03c1fe03c0fe03bffe03befe03bdfe03bcfe03bbfe03ba1103b9862505b9fe03b8 -b7bb05b8fe03b7b65d05b7bb03b78004b6b52505b65d40ff03b64004b52503b4fe03b39603b2 -fe03b1fe03b0fe03affe03ae6403ad0e03acab2505ac6403abaa1205ab2503aa1203a98a4105 -a9fa03a8fe03a7fe03a6fe03a51203a4fe03a3a20e05a33203a20e03a16403a08a4105a09603 -9ffe039e9d0c059efe039d0c039c9b19059c64039b9a10059b19039a1003990a0398fe039796 -0d0597fe03960d03958a410595960394930e05942803930e0392fa039190bb0591fe03908f5d -0590bb039080048f8e25058f5d038f40048e25038dfe038c8b2e058cfe038b2e038a8625058a -410389880b05891403880b03878625058764038685110586250385110384fe038382110583fe -0382110381fe0380fe037ffe0340ff7e7d7d057efe037d7d037c64037b5415057b25037afe03 -79fe03780e03770c03760a0375fe0374fa0373fa0372fa0371fa0370fe036ffe036efe036c21 -036bfe036a1142056a530369fe03687d036711420566fe0365fe0364fe0363fe0362fe03613a -0360fa035e0c035dfe035bfe035afe0359580a0559fa03580a035716190557320356fe035554 -150555420354150353011005531803521403514a130551fe03500b034ffe034e4d10054efe03 -4d10034cfe034b4a13054bfe034a4910054a1303491d0d05491003480d0347fe034696034596 -0344fe0343022d0543fa0342bb03414b0340fe033ffe033e3d12053e14033d3c0f053d12033c -3b0d053c40ff0f033b0d033afe0339fe033837140538fa033736100537140336350b05361003 -350b03341e03330d0332310b0532fe03310b03302f0b05300d032f0b032e2d09052e10032d09 -032c32032b2a25052b64032a2912052a25032912032827250528410327250326250b05260f03 -250b0324fe0323fe03220f03210110052112032064031ffa031e1d0d051e64031d0d031c1142 -051cfe031bfa031a42031911420519fe031864031716190517fe031601100516190315fe0314 -fe0313fe031211420512fe0311022d05114203107d030f64030efe030d0c16050dfe030c0110 -050c16030bfe030a100309fe0308022d0508fe030714030664030401100504fe03401503022d -0503fe0302011005022d0301100300fe0301b80164858d012b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b00 -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b1d00> -] def -FontName currentdict end definefont pop -%%Page: 1 1 -%%BeginPageSetup -%%PageBoundingBox: 0 0 640 167 -%%EndPageSetup -q -0 g -BT -12 0 0 12 200.282813 2.4961 Tm -/f-0-0 1 Tf -[<01>44<0203>-1<02>-1<04>1<040504>1<06>-1<03>21<050703>20<05080509>-1<0a -02>-1<0a0b>]TJ -10.056641 0 Td -[<0c0906>-1<0d>1<0c03>-1<060e>-1<0f02>-1<0d>1<06>-1<1011>-1<0b>1<041206 -13>-1<14>1<15>-1<0e>]TJ -ET -0.301961 0.654902 0.301961 rg -0.398 165.698 m 3.848 165.698 l 3.848 148.096 l 0.398 148.096 l 0.398 -165.698 l h -0.398 165.698 m f* -0 g -0.8 w -0 J -0 j -[] 0.0 d -4 M q 1 0 0 -1 0 166.0961 cm -0.398 0.398 m 3.848 0.398 l 3.848 18 l 0.398 18 l 0.398 0.398 l h -0.398 0.398 m S Q -0.301961 0.654902 0.301961 rg -0.746 148.096 m 6.031 148.096 l 6.031 130.495 l 0.746 130.495 l 0.746 -148.096 l h -0.746 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -0.746 18 m 6.031 18 l 6.031 35.602 l 0.746 35.602 l 0.746 18 l h -0.746 18 m S Q -0.301961 0.654902 0.301961 rg -1.074 130.495 m 5.875 130.495 l 5.875 112.897 l 1.074 112.897 l 1.074 -130.495 l h -1.074 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -1.074 35.602 m 5.875 35.602 l 5.875 53.199 l 1.074 53.199 l 1.074 -35.602 l h -1.074 35.602 m S Q -0.301961 0.654902 0.301961 rg -1.566 112.897 m 7.105 112.897 l 7.105 95.295 l 1.566 95.295 l 1.566 -112.897 l h -1.566 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -1.566 53.199 m 7.105 53.199 l 7.105 70.801 l 1.566 70.801 l 1.566 -53.199 l h -1.566 53.199 m S Q -0.301961 0.654902 0.301961 rg -1.848 95.295 m 7.109 95.295 l 7.109 77.698 l 1.848 77.698 l 1.848 -95.295 l h -1.848 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -1.848 70.801 m 7.109 70.801 l 7.109 88.398 l 1.848 88.398 l 1.848 -70.801 l h -1.848 70.801 m S Q -0.301961 0.654902 0.301961 rg -3.852 165.698 m 9.012 165.698 l 9.012 148.096 l 3.852 148.096 l 3.852 -165.698 l h -3.852 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -3.852 0.398 m 9.012 0.398 l 9.012 18 l 3.852 18 l 3.852 0.398 l h -3.852 0.398 m S Q -0.301961 0.654902 0.301961 rg -5.879 130.495 m 10.711 130.495 l 10.711 112.897 l 5.879 112.897 l 5.879 -130.495 l h -5.879 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -5.879 35.602 m 10.711 35.602 l 10.711 53.199 l 5.879 53.199 l 5.879 -35.602 l h -5.879 35.602 m S Q -0.301961 0.654902 0.301961 rg -6.047 148.096 m 11.812 148.096 l 11.812 130.495 l 6.047 130.495 l 6.047 -148.096 l h -6.047 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -6.047 18 m 11.812 18 l 11.812 35.602 l 6.047 35.602 l 6.047 18 l h -6.047 18 m S Q -0.301961 0.654902 0.301961 rg -7.117 112.897 m 13.332 112.897 l 13.332 95.295 l 7.117 95.295 l 7.117 -112.897 l h -7.117 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -7.117 53.199 m 13.332 53.199 l 13.332 70.801 l 7.117 70.801 l 7.117 -53.199 l h -7.117 53.199 m S Q -0.301961 0.654902 0.301961 rg -7.125 95.295 m 12.566 95.295 l 12.566 77.698 l 7.125 77.698 l 7.125 -95.295 l h -7.125 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -7.125 70.801 m 12.566 70.801 l 12.566 88.398 l 7.125 88.398 l 7.125 -70.801 l h -7.125 70.801 m S Q -0.301961 0.654902 0.301961 rg -9.016 165.698 m 13.074 165.698 l 13.074 148.096 l 9.016 148.096 l 9.016 -165.698 l h -9.016 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -9.016 0.398 m 13.074 0.398 l 13.074 18 l 9.016 18 l 9.016 0.398 l h -9.016 0.398 m S Q -0.301961 0.654902 0.301961 rg -10.715 130.495 m 16.02 130.495 l 16.02 112.897 l 10.715 112.897 l -10.715 130.495 l h -10.715 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -10.715 35.602 m 16.02 35.602 l 16.02 53.199 l 10.715 53.199 l 10.715 -35.602 l h -10.715 35.602 m S Q -0.301961 0.654902 0.301961 rg -11.816 148.096 m 16.785 148.096 l 16.785 130.495 l 11.816 130.495 l -11.816 148.096 l h -11.816 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -11.816 18 m 16.785 18 l 16.785 35.602 l 11.816 35.602 l 11.816 18 l h -11.816 18 m S Q -0.301961 0.654902 0.301961 rg -12.574 95.295 m 17.582 95.295 l 17.582 77.698 l 12.574 77.698 l 12.574 -95.295 l h -12.574 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -12.574 70.801 m 17.582 70.801 l 17.582 88.398 l 12.574 88.398 l 12.574 -70.801 l h -12.574 70.801 m S Q -0.301961 0.654902 0.301961 rg -13.078 165.698 m 18.496 165.698 l 18.496 148.096 l 13.078 148.096 l -13.078 165.698 l h -13.078 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -13.078 0.398 m 18.496 0.398 l 18.496 18 l 13.078 18 l 13.078 0.398 l h -13.078 0.398 m S Q -0.301961 0.654902 0.301961 rg -13.336 112.897 m 19.203 112.897 l 19.203 95.295 l 13.336 95.295 l -13.336 112.897 l h -13.336 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -13.336 53.199 m 19.203 53.199 l 19.203 70.801 l 13.336 70.801 l 13.336 -53.199 l h -13.336 53.199 m S Q -0.301961 0.654902 0.301961 rg -16.031 130.495 m 21.137 130.495 l 21.137 112.897 l 16.031 112.897 l -16.031 130.495 l h -16.031 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -16.031 35.602 m 21.137 35.602 l 21.137 53.199 l 16.031 53.199 l 16.031 -35.602 l h -16.031 35.602 m S Q -0.301961 0.654902 0.301961 rg -16.789 148.096 m 21.699 148.096 l 21.699 130.495 l 16.789 130.495 l -16.789 148.096 l h -16.789 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -16.789 18 m 21.699 18 l 21.699 35.602 l 16.789 35.602 l 16.789 18 l h -16.789 18 m S Q -0.301961 0.654902 0.301961 rg -17.586 95.295 m 23.246 95.295 l 23.246 77.698 l 17.586 77.698 l 17.586 -95.295 l h -17.586 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -17.586 70.801 m 23.246 70.801 l 23.246 88.398 l 17.586 88.398 l 17.586 -70.801 l h -17.586 70.801 m S Q -0.301961 0.654902 0.301961 rg -18.5 165.698 m 24.012 165.698 l 24.012 148.096 l 18.5 148.096 l 18.5 -165.698 l h -18.5 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -18.5 0.398 m 24.012 0.398 l 24.012 18 l 18.5 18 l 18.5 0.398 l h -18.5 0.398 m S Q -0.301961 0.654902 0.301961 rg -19.207 112.897 m 22.973 112.897 l 22.973 95.295 l 19.207 95.295 l -19.207 112.897 l h -19.207 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -19.207 53.199 m 22.973 53.199 l 22.973 70.801 l 19.207 70.801 l 19.207 -53.199 l h -19.207 53.199 m S Q -0.301961 0.654902 0.301961 rg -21.141 130.495 m 27.18 130.495 l 27.18 112.897 l 21.141 112.897 l -21.141 130.495 l h -21.141 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -21.141 35.602 m 27.18 35.602 l 27.18 53.199 l 21.141 53.199 l 21.141 -35.602 l h -21.141 35.602 m S Q -0.301961 0.654902 0.301961 rg -21.707 148.096 m 26.477 148.096 l 26.477 130.495 l 21.707 130.495 l -21.707 148.096 l h -21.707 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -21.707 18 m 26.477 18 l 26.477 35.602 l 21.707 35.602 l 21.707 18 l h -21.707 18 m S Q -0.301961 0.654902 0.301961 rg -22.977 112.897 m 27.543 112.897 l 27.543 95.295 l 22.977 95.295 l -22.977 112.897 l h -22.977 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -22.977 53.199 m 27.543 53.199 l 27.543 70.801 l 22.977 70.801 l 22.977 -53.199 l h -22.977 53.199 m S Q -0.301961 0.654902 0.301961 rg -23.25 95.295 m 28.098 95.295 l 28.098 77.698 l 23.25 77.698 l 23.25 -95.295 l h -23.25 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -23.25 70.801 m 28.098 70.801 l 28.098 88.398 l 23.25 88.398 l 23.25 -70.801 l h -23.25 70.801 m S Q -0.301961 0.654902 0.301961 rg -24.016 165.698 m 29.434 165.698 l 29.434 148.096 l 24.016 148.096 l -24.016 165.698 l h -24.016 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -24.016 0.398 m 29.434 0.398 l 29.434 18 l 24.016 18 l 24.016 0.398 l h -24.016 0.398 m S Q -0.301961 0.654902 0.301961 rg -26.484 148.096 m 30.824 148.096 l 30.824 130.495 l 26.484 130.495 l -26.484 148.096 l h -26.484 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -26.484 18 m 30.824 18 l 30.824 35.602 l 26.484 35.602 l 26.484 18 l h -26.484 18 m S Q -0.301961 0.654902 0.301961 rg -27.184 130.495 m 32.43 130.495 l 32.43 112.897 l 27.184 112.897 l -27.184 130.495 l h -27.184 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -27.184 35.602 m 32.43 35.602 l 32.43 53.199 l 27.184 53.199 l 27.184 -35.602 l h -27.184 35.602 m S Q -0.301961 0.654902 0.301961 rg -27.547 112.897 m 34.059 112.897 l 34.059 95.295 l 27.547 95.295 l -27.547 112.897 l h -27.547 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -27.547 53.199 m 34.059 53.199 l 34.059 70.801 l 27.547 70.801 l 27.547 -53.199 l h -27.547 53.199 m S Q -0.301961 0.654902 0.301961 rg -28.102 95.295 m 32.883 95.295 l 32.883 77.698 l 28.102 77.698 l 28.102 -95.295 l h -28.102 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -28.102 70.801 m 32.883 70.801 l 32.883 88.398 l 28.102 88.398 l 28.102 -70.801 l h -28.102 70.801 m S Q -0.301961 0.654902 0.301961 rg -29.438 165.698 m 34.82 165.698 l 34.82 148.096 l 29.438 148.096 l -29.438 165.698 l h -29.438 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -29.438 0.398 m 34.82 0.398 l 34.82 18 l 29.438 18 l 29.438 0.398 l h -29.438 0.398 m S Q -0.301961 0.654902 0.301961 rg -30.828 148.096 m 36.594 148.096 l 36.594 130.495 l 30.828 130.495 l -30.828 148.096 l h -30.828 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -30.828 18 m 36.594 18 l 36.594 35.602 l 30.828 35.602 l 30.828 18 l h -30.828 18 m S Q -0.301961 0.654902 0.301961 rg -32.438 130.495 m 37.125 130.495 l 37.125 112.897 l 32.438 112.897 l -32.438 130.495 l h -32.438 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -32.438 35.602 m 37.125 35.602 l 37.125 53.199 l 32.438 53.199 l 32.438 -35.602 l h -32.438 35.602 m S Q -0.301961 0.654902 0.301961 rg -32.887 95.295 m 37.848 95.295 l 37.848 77.698 l 32.887 77.698 l 32.887 -95.295 l h -32.887 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -32.887 70.801 m 37.848 70.801 l 37.848 88.398 l 32.887 88.398 l 32.887 -70.801 l h -32.887 70.801 m S Q -0.301961 0.654902 0.301961 rg -34.062 112.897 m 39.445 112.897 l 39.445 95.295 l 34.062 95.295 l -34.062 112.897 l h -34.062 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -34.062 53.199 m 39.445 53.199 l 39.445 70.801 l 34.062 70.801 l 34.062 -53.199 l h -34.062 53.199 m S Q -0.301961 0.654902 0.301961 rg -34.824 165.698 m 39.062 165.698 l 39.062 148.096 l 34.824 148.096 l -34.824 165.698 l h -34.824 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -34.824 0.398 m 39.062 0.398 l 39.062 18 l 34.824 18 l 34.824 0.398 l h -34.824 0.398 m S Q -0.301961 0.654902 0.301961 rg -36.602 148.096 m 42.637 148.096 l 42.637 130.495 l 36.602 130.495 l -36.602 148.096 l h -36.602 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -36.602 18 m 42.637 18 l 42.637 35.602 l 36.602 35.602 l 36.602 18 l h -36.602 18 m S Q -0.301961 0.654902 0.301961 rg -37.133 130.495 m 42.102 130.495 l 42.102 112.897 l 37.133 112.897 l -37.133 130.495 l h -37.133 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -37.133 35.602 m 42.102 35.602 l 42.102 53.199 l 37.133 53.199 l 37.133 -35.602 l h -37.133 35.602 m S Q -0.301961 0.654902 0.301961 rg -37.855 95.295 m 42.184 95.295 l 42.184 77.698 l 37.855 77.698 l 37.855 -95.295 l h -37.855 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -37.855 70.801 m 42.184 70.801 l 42.184 88.398 l 37.855 88.398 l 37.855 -70.801 l h -37.855 70.801 m S Q -0.301961 0.654902 0.301961 rg -39.066 165.698 m 43.633 165.698 l 43.633 148.096 l 39.066 148.096 l -39.066 165.698 l h -39.066 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -39.066 0.398 m 43.633 0.398 l 43.633 18 l 39.066 18 l 39.066 0.398 l h -39.066 0.398 m S Q -0.301961 0.654902 0.301961 rg -39.48 112.897 m 44.773 112.897 l 44.773 95.295 l 39.48 95.295 l 39.48 -112.897 l h -39.48 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -39.48 53.199 m 44.773 53.199 l 44.773 70.801 l 39.48 70.801 l 39.48 -53.199 l h -39.48 53.199 m S Q -0.301961 0.654902 0.301961 rg -42.105 130.495 m 46.477 130.495 l 46.477 112.897 l 42.105 112.897 l -42.105 130.495 l h -42.105 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -42.105 35.602 m 46.477 35.602 l 46.477 53.199 l 42.105 53.199 l 42.105 -35.602 l h -42.105 35.602 m S Q -0.301961 0.654902 0.301961 rg -42.289 95.295 m 47.82 95.295 l 47.82 77.698 l 42.289 77.698 l 42.289 -95.295 l h -42.289 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -42.289 70.801 m 47.82 70.801 l 47.82 88.398 l 42.289 88.398 l 42.289 -70.801 l h -42.289 70.801 m S Q -0.301961 0.654902 0.301961 rg -42.641 148.096 m 49.34 148.096 l 49.34 130.495 l 42.641 130.495 l -42.641 148.096 l h -42.641 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -42.641 18 m 49.34 18 l 49.34 35.602 l 42.641 35.602 l 42.641 18 l h -42.641 18 m S Q -0.301961 0.654902 0.301961 rg -43.637 165.698 m 48.129 165.698 l 48.129 148.096 l 43.637 148.096 l -43.637 165.698 l h -43.637 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -43.637 0.398 m 48.129 0.398 l 48.129 18 l 43.637 18 l 43.637 0.398 l h -43.637 0.398 m S Q -0.301961 0.654902 0.301961 rg -44.777 112.897 m 50.113 112.897 l 50.113 95.295 l 44.777 95.295 l -44.777 112.897 l h -44.777 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -44.777 53.199 m 50.113 53.199 l 50.113 70.801 l 44.777 70.801 l 44.777 -53.199 l h -44.777 53.199 m S Q -0.301961 0.654902 0.301961 rg -46.48 130.495 m 52.559 130.495 l 52.559 112.897 l 46.48 112.897 l 46.48 -130.495 l h -46.48 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -46.48 35.602 m 52.559 35.602 l 52.559 53.199 l 46.48 53.199 l 46.48 -35.602 l h -46.48 35.602 m S Q -0.301961 0.654902 0.301961 rg -47.828 95.295 m 51.926 95.295 l 51.926 77.698 l 47.828 77.698 l 47.828 -95.295 l h -47.828 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -47.828 70.801 m 51.926 70.801 l 51.926 88.398 l 47.828 88.398 l 47.828 -70.801 l h -47.828 70.801 m S Q -0.301961 0.654902 0.301961 rg -48.133 165.698 m 52.402 165.698 l 52.402 148.096 l 48.133 148.096 l -48.133 165.698 l h -48.133 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -48.133 0.398 m 52.402 0.398 l 52.402 18 l 48.133 18 l 48.133 0.398 l h -48.133 0.398 m S Q -0.301961 0.654902 0.301961 rg -49.348 148.096 m 54.555 148.096 l 54.555 130.495 l 49.348 130.495 l -49.348 148.096 l h -49.348 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -49.348 18 m 54.555 18 l 54.555 35.602 l 49.348 35.602 l 49.348 18 l h -49.348 18 m S Q -0.301961 0.654902 0.301961 rg -50.121 112.897 m 55.582 112.897 l 55.582 95.295 l 50.121 95.295 l -50.121 112.897 l h -50.121 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -50.121 53.199 m 55.582 53.199 l 55.582 70.801 l 50.121 70.801 l 50.121 -53.199 l h -50.121 53.199 m S Q -0.301961 0.654902 0.301961 rg -51.93 95.295 m 56.754 95.295 l 56.754 77.698 l 51.93 77.698 l 51.93 -95.295 l h -51.93 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -51.93 70.801 m 56.754 70.801 l 56.754 88.398 l 51.93 88.398 l 51.93 -70.801 l h -51.93 70.801 m S Q -0.301961 0.654902 0.301961 rg -52.41 165.698 m 58.242 165.698 l 58.242 148.096 l 52.41 148.096 l 52.41 -165.698 l h -52.41 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -52.41 0.398 m 58.242 0.398 l 58.242 18 l 52.41 18 l 52.41 0.398 l h -52.41 0.398 m S Q -0.301961 0.654902 0.301961 rg -52.57 130.495 m 57.148 130.495 l 57.148 112.897 l 52.57 112.897 l 52.57 -130.495 l h -52.57 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -52.57 35.602 m 57.148 35.602 l 57.148 53.199 l 52.57 53.199 l 52.57 -35.602 l h -52.57 35.602 m S Q -0.301961 0.654902 0.301961 rg -54.559 148.096 m 59.785 148.096 l 59.785 130.495 l 54.559 130.495 l -54.559 148.096 l h -54.559 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -54.559 18 m 59.785 18 l 59.785 35.602 l 54.559 35.602 l 54.559 18 l h -54.559 18 m S Q -0.301961 0.654902 0.301961 rg -55.59 112.897 m 60.59 112.897 l 60.59 95.295 l 55.59 95.295 l 55.59 -112.897 l h -55.59 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -55.59 53.199 m 60.59 53.199 l 60.59 70.801 l 55.59 70.801 l 55.59 -53.199 l h -55.59 53.199 m S Q -0.301961 0.654902 0.301961 rg -56.762 95.295 m 62.945 95.295 l 62.945 77.698 l 56.762 77.698 l 56.762 -95.295 l h -56.762 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -56.762 70.801 m 62.945 70.801 l 62.945 88.398 l 56.762 88.398 l 56.762 -70.801 l h -56.762 70.801 m S Q -0.301961 0.654902 0.301961 rg -57.156 130.495 m 62.598 130.495 l 62.598 112.897 l 57.156 112.897 l -57.156 130.495 l h -57.156 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -57.156 35.602 m 62.598 35.602 l 62.598 53.199 l 57.156 53.199 l 57.156 -35.602 l h -57.156 35.602 m S Q -0.301961 0.654902 0.301961 rg -58.25 165.698 m 62.156 165.698 l 62.156 148.096 l 58.25 148.096 l 58.25 -165.698 l h -58.25 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -58.25 0.398 m 62.156 0.398 l 62.156 18 l 58.25 18 l 58.25 0.398 l h -58.25 0.398 m S Q -0.301961 0.654902 0.301961 rg -59.793 148.096 m 65.645 148.096 l 65.645 130.495 l 59.793 130.495 l -59.793 148.096 l h -59.793 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -59.793 18 m 65.645 18 l 65.645 35.602 l 59.793 35.602 l 59.793 18 l h -59.793 18 m S Q -0.301961 0.654902 0.301961 rg -60.594 112.897 m 65.926 112.897 l 65.926 95.295 l 60.594 95.295 l -60.594 112.897 l h -60.594 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -60.594 53.199 m 65.926 53.199 l 65.926 70.801 l 60.594 70.801 l 60.594 -53.199 l h -60.594 53.199 m S Q -0.301961 0.654902 0.301961 rg -62.16 165.698 m 67.809 165.698 l 67.809 148.096 l 62.16 148.096 l 62.16 -165.698 l h -62.16 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -62.16 0.398 m 67.809 0.398 l 67.809 18 l 62.16 18 l 62.16 0.398 l h -62.16 0.398 m S Q -0.301961 0.654902 0.301961 rg -62.605 130.495 m 67.117 130.495 l 67.117 112.897 l 62.605 112.897 l -62.605 130.495 l h -62.605 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -62.605 35.602 m 67.117 35.602 l 67.117 53.199 l 62.605 53.199 l 62.605 -35.602 l h -62.605 35.602 m S Q -0.301961 0.654902 0.301961 rg -62.953 95.295 m 67.809 95.295 l 67.809 77.698 l 62.953 77.698 l 62.953 -95.295 l h -62.953 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -62.953 70.801 m 67.809 70.801 l 67.809 88.398 l 62.953 88.398 l 62.953 -70.801 l h -62.953 70.801 m S Q -0.301961 0.654902 0.301961 rg -65.648 148.096 m 70.516 148.096 l 70.516 130.495 l 65.648 130.495 l -65.648 148.096 l h -65.648 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -65.648 18 m 70.516 18 l 70.516 35.602 l 65.648 35.602 l 65.648 18 l h -65.648 18 m S Q -0.301961 0.654902 0.301961 rg -65.93 112.897 m 70.051 112.897 l 70.051 95.295 l 65.93 95.295 l 65.93 -112.897 l h -65.93 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -65.93 53.199 m 70.051 53.199 l 70.051 70.801 l 65.93 70.801 l 65.93 -53.199 l h -65.93 53.199 m S Q -0.301961 0.654902 0.301961 rg -67.121 130.495 m 72.699 130.495 l 72.699 112.897 l 67.121 112.897 l -67.121 130.495 l h -67.121 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -67.121 35.602 m 72.699 35.602 l 72.699 53.199 l 67.121 53.199 l 67.121 -35.602 l h -67.121 35.602 m S Q -0.301961 0.654902 0.301961 rg -67.949 95.295 m 73.281 95.295 l 73.281 77.698 l 67.949 77.698 l 67.949 -95.295 l h -67.949 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -67.949 70.801 m 73.281 70.801 l 73.281 88.398 l 67.949 88.398 l 67.949 -70.801 l h -67.949 70.801 m S Q -0.301961 0.654902 0.301961 rg -67.973 165.698 m 73.789 165.698 l 73.789 148.096 l 67.973 148.096 l -67.973 165.698 l h -67.973 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -67.973 0.398 m 73.789 0.398 l 73.789 18 l 67.973 18 l 67.973 0.398 l h -67.973 0.398 m S Q -0.301961 0.654902 0.301961 rg -70.055 112.897 m 74.332 112.897 l 74.332 95.295 l 70.055 95.295 l -70.055 112.897 l h -70.055 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -70.055 53.199 m 74.332 53.199 l 74.332 70.801 l 70.055 70.801 l 70.055 -53.199 l h -70.055 53.199 m S Q -0.301961 0.654902 0.301961 rg -70.52 148.096 m 76.457 148.096 l 76.457 130.495 l 70.52 130.495 l 70.52 -148.096 l h -70.52 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -70.52 18 m 76.457 18 l 76.457 35.602 l 70.52 35.602 l 70.52 18 l h -70.52 18 m S Q -0.301961 0.654902 0.301961 rg -72.707 130.495 m 78.004 130.495 l 78.004 112.897 l 72.707 112.897 l -72.707 130.495 l h -72.707 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -72.707 35.602 m 78.004 35.602 l 78.004 53.199 l 72.707 53.199 l 72.707 -35.602 l h -72.707 35.602 m S Q -0.301961 0.654902 0.301961 rg -73.535 95.295 m 79.438 95.295 l 79.438 77.698 l 73.535 77.698 l 73.535 -95.295 l h -73.535 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -73.535 70.801 m 79.438 70.801 l 79.438 88.398 l 73.535 88.398 l 73.535 -70.801 l h -73.535 70.801 m S Q -0.301961 0.654902 0.301961 rg -73.797 165.698 m 77.355 165.698 l 77.355 148.096 l 73.797 148.096 l -73.797 165.698 l h -73.797 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -73.797 0.398 m 77.355 0.398 l 77.355 18 l 73.797 18 l 73.797 0.398 l h -73.797 0.398 m S Q -0.301961 0.654902 0.301961 rg -74.336 112.897 m 79.168 112.897 l 79.168 95.295 l 74.336 95.295 l -74.336 112.897 l h -74.336 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -74.336 53.199 m 79.168 53.199 l 79.168 70.801 l 74.336 70.801 l 74.336 -53.199 l h -74.336 53.199 m S Q -0.301961 0.654902 0.301961 rg -76.461 148.096 m 82.152 148.096 l 82.152 130.495 l 76.461 130.495 l -76.461 148.096 l h -76.461 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -76.461 18 m 82.152 18 l 82.152 35.602 l 76.461 35.602 l 76.461 18 l h -76.461 18 m S Q -0.301961 0.654902 0.301961 rg -77.359 165.698 m 82.52 165.698 l 82.52 148.096 l 77.359 148.096 l -77.359 165.698 l h -77.359 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -77.359 0.398 m 82.52 0.398 l 82.52 18 l 77.359 18 l 77.359 0.398 l h -77.359 0.398 m S Q -0.301961 0.654902 0.301961 rg -78.02 130.495 m 83.625 130.495 l 83.625 112.897 l 78.02 112.897 l 78.02 -130.495 l h -78.02 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -78.02 35.602 m 83.625 35.602 l 83.625 53.199 l 78.02 53.199 l 78.02 -35.602 l h -78.02 35.602 m S Q -0.301961 0.654902 0.301961 rg -79.172 112.897 m 83.199 112.897 l 83.199 95.295 l 79.172 95.295 l -79.172 112.897 l h -79.172 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -79.172 53.199 m 83.199 53.199 l 83.199 70.801 l 79.172 70.801 l 79.172 -53.199 l h -79.172 53.199 m S Q -0.301961 0.654902 0.301961 rg -79.445 95.295 m 85.027 95.295 l 85.027 77.698 l 79.445 77.698 l 79.445 -95.295 l h -79.445 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -79.445 70.801 m 85.027 70.801 l 85.027 88.398 l 79.445 88.398 l 79.445 -70.801 l h -79.445 70.801 m S Q -0.301961 0.654902 0.301961 rg -82.16 148.096 m 86.355 148.096 l 86.355 130.495 l 82.16 130.495 l 82.16 -148.096 l h -82.16 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -82.16 18 m 86.355 18 l 86.355 35.602 l 82.16 35.602 l 82.16 18 l h -82.16 18 m S Q -0.301961 0.654902 0.301961 rg -82.539 165.698 m 88.559 165.698 l 88.559 148.096 l 82.539 148.096 l -82.539 165.698 l h -82.539 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -82.539 0.398 m 88.559 0.398 l 88.559 18 l 82.539 18 l 82.539 0.398 l h -82.539 0.398 m S Q -0.301961 0.654902 0.301961 rg -83.215 112.897 m 88.875 112.897 l 88.875 95.295 l 83.215 95.295 l -83.215 112.897 l h -83.215 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -83.215 53.199 m 88.875 53.199 l 88.875 70.801 l 83.215 70.801 l 83.215 -53.199 l h -83.215 53.199 m S Q -0.301961 0.654902 0.301961 rg -83.629 130.495 m 89.035 130.495 l 89.035 112.897 l 83.629 112.897 l -83.629 130.495 l h -83.629 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -83.629 35.602 m 89.035 35.602 l 89.035 53.199 l 83.629 53.199 l 83.629 -35.602 l h -83.629 35.602 m S Q -0.301961 0.654902 0.301961 rg -85.031 95.295 m 90.258 95.295 l 90.258 77.698 l 85.031 77.698 l 85.031 -95.295 l h -85.031 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -85.031 70.801 m 90.258 70.801 l 90.258 88.398 l 85.031 88.398 l 85.031 -70.801 l h -85.031 70.801 m S Q -0.301961 0.654902 0.301961 rg -86.359 148.096 m 91.672 148.096 l 91.672 130.495 l 86.359 130.495 l -86.359 148.096 l h -86.359 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -86.359 18 m 91.672 18 l 91.672 35.602 l 86.359 35.602 l 86.359 18 l h -86.359 18 m S Q -0.301961 0.654902 0.301961 rg -88.566 165.698 m 94.184 165.698 l 94.184 148.096 l 88.566 148.096 l -88.566 165.698 l h -88.566 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -88.566 0.398 m 94.184 0.398 l 94.184 18 l 88.566 18 l 88.566 0.398 l h -88.566 0.398 m S Q -0.301961 0.654902 0.301961 rg -88.887 112.897 m 93.629 112.897 l 93.629 95.295 l 88.887 95.295 l -88.887 112.897 l h -88.887 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -88.887 53.199 m 93.629 53.199 l 93.629 70.801 l 88.887 70.801 l 88.887 -53.199 l h -88.887 53.199 m S Q -0.301961 0.654902 0.301961 rg -89.051 130.495 m 94.887 130.495 l 94.887 112.897 l 89.051 112.897 l -89.051 130.495 l h -89.051 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -89.051 35.602 m 94.887 35.602 l 94.887 53.199 l 89.051 53.199 l 89.051 -35.602 l h -89.051 35.602 m S Q -0.301961 0.654902 0.301961 rg -90.266 95.295 m 96.402 95.295 l 96.402 77.698 l 90.266 77.698 l 90.266 -95.295 l h -90.266 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -90.266 70.801 m 96.402 70.801 l 96.402 88.398 l 90.266 88.398 l 90.266 -70.801 l h -90.266 70.801 m S Q -0.301961 0.654902 0.301961 rg -91.688 148.096 m 96.633 148.096 l 96.633 130.495 l 91.688 130.495 l -91.688 148.096 l h -91.688 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -91.688 18 m 96.633 18 l 96.633 35.602 l 91.688 35.602 l 91.688 18 l h -91.688 18 m S Q -0.301961 0.654902 0.301961 rg -93.637 112.897 m 98.789 112.897 l 98.789 95.295 l 93.637 95.295 l -93.637 112.897 l h -93.637 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -93.637 53.199 m 98.789 53.199 l 98.789 70.801 l 93.637 70.801 l 93.637 -53.199 l h -93.637 53.199 m S Q -0.301961 0.654902 0.301961 rg -94.188 165.698 m 99.293 165.698 l 99.293 148.096 l 94.188 148.096 l -94.188 165.698 l h -94.188 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -94.188 0.398 m 99.293 0.398 l 99.293 18 l 94.188 18 l 94.188 0.398 l h -94.188 0.398 m S Q -0.301961 0.654902 0.301961 rg -94.891 130.495 m 99.922 130.495 l 99.922 112.897 l 94.891 112.897 l -94.891 130.495 l h -94.891 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -94.891 35.602 m 99.922 35.602 l 99.922 53.199 l 94.891 53.199 l 94.891 -35.602 l h -94.891 35.602 m S Q -0.301961 0.654902 0.301961 rg -96.41 95.295 m 101.477 95.295 l 101.477 77.698 l 96.41 77.698 l 96.41 -95.295 l h -96.41 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -96.41 70.801 m 101.477 70.801 l 101.477 88.398 l 96.41 88.398 l 96.41 -70.801 l h -96.41 70.801 m S Q -0.301961 0.654902 0.301961 rg -96.645 148.096 m 103.281 148.096 l 103.281 130.495 l 96.645 130.495 l -96.645 148.096 l h -96.645 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -96.645 18 m 103.281 18 l 103.281 35.602 l 96.645 35.602 l 96.645 18 l h -96.645 18 m S Q -0.301961 0.654902 0.301961 rg -98.797 112.897 m 103.91 112.897 l 103.91 95.295 l 98.797 95.295 l -98.797 112.897 l h -98.797 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -98.797 53.199 m 103.91 53.199 l 103.91 70.801 l 98.797 70.801 l 98.797 -53.199 l h -98.797 53.199 m S Q -0.301961 0.654902 0.301961 rg -99.301 165.698 m 102.922 165.698 l 102.922 148.096 l 99.301 148.096 l -99.301 165.698 l h -99.301 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -99.301 0.398 m 102.922 0.398 l 102.922 18 l 99.301 18 l 99.301 0.398 l -h -99.301 0.398 m S Q -0.301961 0.654902 0.301961 rg -99.926 130.495 m 104.75 130.495 l 104.75 112.897 l 99.926 112.897 l -99.926 130.495 l h -99.926 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -99.926 35.602 m 104.75 35.602 l 104.75 53.199 l 99.926 53.199 l 99.926 -35.602 l h -99.926 35.602 m S Q -0.301961 0.654902 0.301961 rg -101.48 95.295 m 107.34 95.295 l 107.34 77.698 l 101.48 77.698 l 101.48 -95.295 l h -101.48 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -101.48 70.801 m 107.34 70.801 l 107.34 88.398 l 101.48 88.398 l 101.48 -70.801 l h -101.48 70.801 m S Q -0.301961 0.654902 0.301961 rg -102.926 165.698 m 108.281 165.698 l 108.281 148.096 l 102.926 148.096 l -102.926 165.698 l h -102.926 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -102.926 0.398 m 108.281 0.398 l 108.281 18 l 102.926 18 l 102.926 0.398 -l h -102.926 0.398 m S Q -0.301961 0.654902 0.301961 rg -103.285 148.096 m 110.113 148.096 l 110.113 130.495 l 103.285 130.495 l -103.285 148.096 l h -103.285 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -103.285 18 m 110.113 18 l 110.113 35.602 l 103.285 35.602 l 103.285 18 -l h -103.285 18 m S Q -0.301961 0.654902 0.301961 rg -103.914 112.897 m 109.707 112.897 l 109.707 95.295 l 103.914 95.295 l -103.914 112.897 l h -103.914 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -103.914 53.199 m 109.707 53.199 l 109.707 70.801 l 103.914 70.801 l -103.914 53.199 l h -103.914 53.199 m S Q -0.301961 0.654902 0.301961 rg -104.754 130.495 m 108.836 130.495 l 108.836 112.897 l 104.754 112.897 l -104.754 130.495 l h -104.754 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -104.754 35.602 m 108.836 35.602 l 108.836 53.199 l 104.754 53.199 l -104.754 35.602 l h -104.754 35.602 m S Q -0.301961 0.654902 0.301961 rg -107.344 95.295 m 112.473 95.295 l 112.473 77.698 l 107.344 77.698 l -107.344 95.295 l h -107.344 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -107.344 70.801 m 112.473 70.801 l 112.473 88.398 l 107.344 88.398 l -107.344 70.801 l h -107.344 70.801 m S Q -0.301961 0.654902 0.301961 rg -108.285 165.698 m 113.188 165.698 l 113.188 148.096 l 108.285 148.096 l -108.285 165.698 l h -108.285 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -108.285 0.398 m 113.188 0.398 l 113.188 18 l 108.285 18 l 108.285 0.398 -l h -108.285 0.398 m S Q -0.301961 0.654902 0.301961 rg -108.84 130.495 m 114.105 130.495 l 114.105 112.897 l 108.84 112.897 l -108.84 130.495 l h -108.84 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -108.84 35.602 m 114.105 35.602 l 114.105 53.199 l 108.84 53.199 l -108.84 35.602 l h -108.84 35.602 m S Q -0.301961 0.654902 0.301961 rg -109.711 112.897 m 114.754 112.897 l 114.754 95.295 l 109.711 95.295 l -109.711 112.897 l h -109.711 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -109.711 53.199 m 114.754 53.199 l 114.754 70.801 l 109.711 70.801 l -109.711 53.199 l h -109.711 53.199 m S Q -0.301961 0.654902 0.301961 rg -110.117 148.096 m 115.02 148.096 l 115.02 130.495 l 110.117 130.495 l -110.117 148.096 l h -110.117 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -110.117 18 m 115.02 18 l 115.02 35.602 l 110.117 35.602 l 110.117 18 l -h -110.117 18 m S Q -0.301961 0.654902 0.301961 rg -112.477 95.295 m 118.445 95.295 l 118.445 77.698 l 112.477 77.698 l -112.477 95.295 l h -112.477 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -112.477 70.801 m 118.445 70.801 l 118.445 88.398 l 112.477 88.398 l -112.477 70.801 l h -112.477 70.801 m S Q -0.301961 0.654902 0.301961 rg -113.191 165.698 m 120.262 165.698 l 120.262 148.096 l 113.191 148.096 l -113.191 165.698 l h -113.191 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -113.191 0.398 m 120.262 0.398 l 120.262 18 l 113.191 18 l 113.191 0.398 -l h -113.191 0.398 m S Q -0.301961 0.654902 0.301961 rg -114.109 130.495 m 120.117 130.495 l 120.117 112.897 l 114.109 112.897 l -114.109 130.495 l h -114.109 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -114.109 35.602 m 120.117 35.602 l 120.117 53.199 l 114.109 53.199 l -114.109 35.602 l h -114.109 35.602 m S Q -0.301961 0.654902 0.301961 rg -114.762 112.897 m 121.793 112.897 l 121.793 95.295 l 114.762 95.295 l -114.762 112.897 l h -114.762 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -114.762 53.199 m 121.793 53.199 l 121.793 70.801 l 114.762 70.801 l -114.762 53.199 l h -114.762 53.199 m S Q -0.301961 0.654902 0.301961 rg -115.062 148.096 m 119.652 148.096 l 119.652 130.495 l 115.062 130.495 l -115.062 148.096 l h -115.062 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -115.062 18 m 119.652 18 l 119.652 35.602 l 115.062 35.602 l 115.062 18 -l h -115.062 18 m S Q -0.301961 0.654902 0.301961 rg -118.449 95.295 m 123.613 95.295 l 123.613 77.698 l 118.449 77.698 l -118.449 95.295 l h -118.449 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -118.449 70.801 m 123.613 70.801 l 123.613 88.398 l 118.449 88.398 l -118.449 70.801 l h -118.449 70.801 m S Q -0.301961 0.654902 0.301961 rg -119.66 148.096 m 125.012 148.096 l 125.012 130.495 l 119.66 130.495 l -119.66 148.096 l h -119.66 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -119.66 18 m 125.012 18 l 125.012 35.602 l 119.66 35.602 l 119.66 18 l h -119.66 18 m S Q -0.301961 0.654902 0.301961 rg -120.125 130.495 m 124.156 130.495 l 124.156 112.897 l 120.125 112.897 l -120.125 130.495 l h -120.125 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -120.125 35.602 m 124.156 35.602 l 124.156 53.199 l 120.125 53.199 l -120.125 35.602 l h -120.125 35.602 m S Q -0.301961 0.654902 0.301961 rg -120.352 165.698 m 125.594 165.698 l 125.594 148.096 l 120.352 148.096 l -120.352 165.698 l h -120.352 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -120.352 0.398 m 125.594 0.398 l 125.594 18 l 120.352 18 l 120.352 0.398 -l h -120.352 0.398 m S Q -0.301961 0.654902 0.301961 rg -121.797 112.897 m 127.527 112.897 l 127.527 95.295 l 121.797 95.295 l -121.797 112.897 l h -121.797 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -121.797 53.199 m 127.527 53.199 l 127.527 70.801 l 121.797 70.801 l -121.797 53.199 l h -121.797 53.199 m S Q -0.301961 0.654902 0.301961 rg -123.621 95.295 m 129.496 95.295 l 129.496 77.698 l 123.621 77.698 l -123.621 95.295 l h -123.621 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -123.621 70.801 m 129.496 70.801 l 129.496 88.398 l 123.621 88.398 l -123.621 70.801 l h -123.621 70.801 m S Q -0.301961 0.654902 0.301961 rg -124.16 130.495 m 129.984 130.495 l 129.984 112.897 l 124.16 112.897 l -124.16 130.495 l h -124.16 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -124.16 35.602 m 129.984 35.602 l 129.984 53.199 l 124.16 53.199 l -124.16 35.602 l h -124.16 35.602 m S Q -0.301961 0.654902 0.301961 rg -125.02 148.096 m 130.129 148.096 l 130.129 130.495 l 125.02 130.495 l -125.02 148.096 l h -125.02 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -125.02 18 m 130.129 18 l 130.129 35.602 l 125.02 35.602 l 125.02 18 l h -125.02 18 m S Q -0.301961 0.654902 0.301961 rg -125.602 165.698 m 130.703 165.698 l 130.703 148.096 l 125.602 148.096 l -125.602 165.698 l h -125.602 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -125.602 0.398 m 130.703 0.398 l 130.703 18 l 125.602 18 l 125.602 0.398 -l h -125.602 0.398 m S Q -0.301961 0.654902 0.301961 rg -127.531 112.897 m 132.812 112.897 l 132.812 95.295 l 127.531 95.295 l -127.531 112.897 l h -127.531 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -127.531 53.199 m 132.812 53.199 l 132.812 70.801 l 127.531 70.801 l -127.531 53.199 l h -127.531 53.199 m S Q -0.301961 0.654902 0.301961 rg -129.504 95.295 m 133.828 95.295 l 133.828 77.698 l 129.504 77.698 l -129.504 95.295 l h -129.504 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -129.504 70.801 m 133.828 70.801 l 133.828 88.398 l 129.504 88.398 l -129.504 70.801 l h -129.504 70.801 m S Q -0.301961 0.654902 0.301961 rg -129.992 130.495 m 134.836 130.495 l 134.836 112.897 l 129.992 112.897 l -129.992 130.495 l h -129.992 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -129.992 35.602 m 134.836 35.602 l 134.836 53.199 l 129.992 53.199 l -129.992 35.602 l h -129.992 35.602 m S Q -0.301961 0.654902 0.301961 rg -130.145 148.096 m 135.719 148.096 l 135.719 130.495 l 130.145 130.495 l -130.145 148.096 l h -130.145 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -130.145 18 m 135.719 18 l 135.719 35.602 l 130.145 35.602 l 130.145 18 -l h -130.145 18 m S Q -0.301961 0.654902 0.301961 rg -130.707 165.698 m 136.168 165.698 l 136.168 148.096 l 130.707 148.096 l -130.707 165.698 l h -130.707 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -130.707 0.398 m 136.168 0.398 l 136.168 18 l 130.707 18 l 130.707 0.398 -l h -130.707 0.398 m S Q -0.301961 0.654902 0.301961 rg -132.816 112.897 m 137.516 112.897 l 137.516 95.295 l 132.816 95.295 l -132.816 112.897 l h -132.816 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -132.816 53.199 m 137.516 53.199 l 137.516 70.801 l 132.816 70.801 l -132.816 53.199 l h -132.816 53.199 m S Q -0.301961 0.654902 0.301961 rg -133.836 95.295 m 138.859 95.295 l 138.859 77.698 l 133.836 77.698 l -133.836 95.295 l h -133.836 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -133.836 70.801 m 138.859 70.801 l 138.859 88.398 l 133.836 88.398 l -133.836 70.801 l h -133.836 70.801 m S Q -0.301961 0.654902 0.301961 rg -134.844 130.495 m 141.012 130.495 l 141.012 112.897 l 134.844 112.897 l -134.844 130.495 l h -134.844 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -134.844 35.602 m 141.012 35.602 l 141.012 53.199 l 134.844 53.199 l -134.844 35.602 l h -134.844 35.602 m S Q -0.301961 0.654902 0.301961 rg -135.723 148.096 m 140.102 148.096 l 140.102 130.495 l 135.723 130.495 l -135.723 148.096 l h -135.723 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -135.723 18 m 140.102 18 l 140.102 35.602 l 135.723 35.602 l 135.723 18 -l h -135.723 18 m S Q -0.301961 0.654902 0.301961 rg -136.172 165.698 m 141.668 165.698 l 141.668 148.096 l 136.172 148.096 l -136.172 165.698 l h -136.172 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -136.172 0.398 m 141.668 0.398 l 141.668 18 l 136.172 18 l 136.172 0.398 -l h -136.172 0.398 m S Q -0.301961 0.654902 0.301961 rg -137.523 112.897 m 144.516 112.897 l 144.516 95.295 l 137.523 95.295 l -137.523 112.897 l h -137.523 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -137.523 53.199 m 144.516 53.199 l 144.516 70.801 l 137.523 70.801 l -137.523 53.199 l h -137.523 53.199 m S Q -0.301961 0.654902 0.301961 rg -138.867 95.295 m 143.078 95.295 l 143.078 77.698 l 138.867 77.698 l -138.867 95.295 l h -138.867 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -138.867 70.801 m 143.078 70.801 l 143.078 88.398 l 138.867 88.398 l -138.867 70.801 l h -138.867 70.801 m S Q -0.301961 0.654902 0.301961 rg -140.109 148.096 m 144.969 148.096 l 144.969 130.495 l 140.109 130.495 l -140.109 148.096 l h -140.109 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -140.109 18 m 144.969 18 l 144.969 35.602 l 140.109 35.602 l 140.109 18 -l h -140.109 18 m S Q -0.301961 0.654902 0.301961 rg -141.016 130.495 m 145.992 130.495 l 145.992 112.897 l 141.016 112.897 l -141.016 130.495 l h -141.016 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -141.016 35.602 m 145.992 35.602 l 145.992 53.199 l 141.016 53.199 l -141.016 35.602 l h -141.016 35.602 m S Q -0.301961 0.654902 0.301961 rg -141.672 165.698 m 148.57 165.698 l 148.57 148.096 l 141.672 148.096 l -141.672 165.698 l h -141.672 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -141.672 0.398 m 148.57 0.398 l 148.57 18 l 141.672 18 l 141.672 0.398 l -h -141.672 0.398 m S Q -0.301961 0.654902 0.301961 rg -143.082 95.295 m 148.062 95.295 l 148.062 77.698 l 143.082 77.698 l -143.082 95.295 l h -143.082 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -143.082 70.801 m 148.062 70.801 l 148.062 88.398 l 143.082 88.398 l -143.082 70.801 l h -143.082 70.801 m S Q -0.301961 0.654902 0.301961 rg -144.52 112.897 m 149.242 112.897 l 149.242 95.295 l 144.52 95.295 l -144.52 112.897 l h -144.52 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -144.52 53.199 m 149.242 53.199 l 149.242 70.801 l 144.52 70.801 l -144.52 53.199 l h -144.52 53.199 m S Q -0.301961 0.654902 0.301961 rg -144.977 148.096 m 150.586 148.096 l 150.586 130.495 l 144.977 130.495 l -144.977 148.096 l h -144.977 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -144.977 18 m 150.586 18 l 150.586 35.602 l 144.977 35.602 l 144.977 18 -l h -144.977 18 m S Q -0.301961 0.654902 0.301961 rg -145.996 130.495 m 150.387 130.495 l 150.387 112.897 l 145.996 112.897 l -145.996 130.495 l h -145.996 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -145.996 35.602 m 150.387 35.602 l 150.387 53.199 l 145.996 53.199 l -145.996 35.602 l h -145.996 35.602 m S Q -0.301961 0.654902 0.301961 rg -148.066 95.295 m 153.406 95.295 l 153.406 77.698 l 148.066 77.698 l -148.066 95.295 l h -148.066 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -148.066 70.801 m 153.406 70.801 l 153.406 88.398 l 148.066 88.398 l -148.066 70.801 l h -148.066 70.801 m S Q -0.301961 0.654902 0.301961 rg -148.578 165.698 m 153.715 165.698 l 153.715 148.096 l 148.578 148.096 l -148.578 165.698 l h -148.578 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -148.578 0.398 m 153.715 0.398 l 153.715 18 l 148.578 18 l 148.578 0.398 -l h -148.578 0.398 m S Q -0.301961 0.654902 0.301961 rg -149.344 112.897 m 154.184 112.897 l 154.184 95.295 l 149.344 95.295 l -149.344 112.897 l h -149.344 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -149.344 53.199 m 154.184 53.199 l 154.184 70.801 l 149.344 70.801 l -149.344 53.199 l h -149.344 53.199 m S Q -0.301961 0.654902 0.301961 rg -150.391 130.495 m 155.188 130.495 l 155.188 112.897 l 150.391 112.897 l -150.391 130.495 l h -150.391 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -150.391 35.602 m 155.188 35.602 l 155.188 53.199 l 150.391 53.199 l -150.391 35.602 l h -150.391 35.602 m S Q -0.301961 0.654902 0.301961 rg -150.598 148.096 m 157.035 148.096 l 157.035 130.495 l 150.598 130.495 l -150.598 148.096 l h -150.598 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -150.598 18 m 157.035 18 l 157.035 35.602 l 150.598 35.602 l 150.598 18 -l h -150.598 18 m S Q -0.301961 0.654902 0.301961 rg -153.41 95.295 m 160.547 95.295 l 160.547 77.698 l 153.41 77.698 l -153.41 95.295 l h -153.41 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -153.41 70.801 m 160.547 70.801 l 160.547 88.398 l 153.41 88.398 l -153.41 70.801 l h -153.41 70.801 m S Q -0.301961 0.654902 0.301961 rg -153.719 165.698 m 159.973 165.698 l 159.973 148.096 l 153.719 148.096 l -153.719 165.698 l h -153.719 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -153.719 0.398 m 159.973 0.398 l 159.973 18 l 153.719 18 l 153.719 0.398 -l h -153.719 0.398 m S Q -0.301961 0.654902 0.301961 rg -154.188 112.897 m 160.879 112.897 l 160.879 95.295 l 154.188 95.295 l -154.188 112.897 l h -154.188 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -154.188 53.199 m 160.879 53.199 l 160.879 70.801 l 154.188 70.801 l -154.188 53.199 l h -154.188 53.199 m S Q -0.301961 0.654902 0.301961 rg -155.195 130.495 m 161.508 130.495 l 161.508 112.897 l 155.195 112.897 l -155.195 130.495 l h -155.195 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -155.195 35.602 m 161.508 35.602 l 161.508 53.199 l 155.195 53.199 l -155.195 35.602 l h -155.195 35.602 m S Q -0.301961 0.654902 0.301961 rg -157.039 148.096 m 162.434 148.096 l 162.434 130.495 l 157.039 130.495 l -157.039 148.096 l h -157.039 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -157.039 18 m 162.434 18 l 162.434 35.602 l 157.039 35.602 l 157.039 18 -l h -157.039 18 m S Q -0.301961 0.654902 0.301961 rg -159.977 165.698 m 165.613 165.698 l 165.613 148.096 l 159.977 148.096 l -159.977 165.698 l h -159.977 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -159.977 0.398 m 165.613 0.398 l 165.613 18 l 159.977 18 l 159.977 0.398 -l h -159.977 0.398 m S Q -0.301961 0.654902 0.301961 rg -160.555 95.295 m 165.785 95.295 l 165.785 77.698 l 160.555 77.698 l -160.555 95.295 l h -160.555 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -160.555 70.801 m 165.785 70.801 l 165.785 88.398 l 160.555 88.398 l -160.555 70.801 l h -160.555 70.801 m S Q -0.301961 0.654902 0.301961 rg -160.883 112.897 m 165.289 112.897 l 165.289 95.295 l 160.883 95.295 l -160.883 112.897 l h -160.883 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -160.883 53.199 m 165.289 53.199 l 165.289 70.801 l 160.883 70.801 l -160.883 53.199 l h -160.883 53.199 m S Q -0.301961 0.654902 0.301961 rg -161.512 130.495 m 168.016 130.495 l 168.016 112.897 l 161.512 112.897 l -161.512 130.495 l h -161.512 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -161.512 35.602 m 168.016 35.602 l 168.016 53.199 l 161.512 53.199 l -161.512 35.602 l h -161.512 35.602 m S Q -0.301961 0.654902 0.301961 rg -162.438 148.096 m 166.648 148.096 l 166.648 130.495 l 162.438 130.495 l -162.438 148.096 l h -162.438 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -162.438 18 m 166.648 18 l 166.648 35.602 l 162.438 35.602 l 162.438 18 -l h -162.438 18 m S Q -0.301961 0.654902 0.301961 rg -165.297 112.897 m 170.445 112.897 l 170.445 95.295 l 165.297 95.295 l -165.297 112.897 l h -165.297 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -165.297 53.199 m 170.445 53.199 l 170.445 70.801 l 165.297 70.801 l -165.297 53.199 l h -165.297 53.199 m S Q -0.301961 0.654902 0.301961 rg -165.617 165.698 m 170.777 165.698 l 170.777 148.096 l 165.617 148.096 l -165.617 165.698 l h -165.617 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -165.617 0.398 m 170.777 0.398 l 170.777 18 l 165.617 18 l 165.617 0.398 -l h -165.617 0.398 m S Q -0.301961 0.654902 0.301961 rg -165.809 95.295 m 169.406 95.295 l 169.406 77.698 l 165.809 77.698 l -165.809 95.295 l h -165.809 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -165.809 70.801 m 169.406 70.801 l 169.406 88.398 l 165.809 88.398 l -165.809 70.801 l h -165.809 70.801 m S Q -0.301961 0.654902 0.301961 rg -166.977 148.096 m 173.207 148.096 l 173.207 130.495 l 166.977 130.495 l -166.977 148.096 l h -166.977 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -166.977 18 m 173.207 18 l 173.207 35.602 l 166.977 35.602 l 166.977 18 -l h -166.977 18 m S Q -0.301961 0.654902 0.301961 rg -168.02 130.495 m 173.055 130.495 l 173.055 112.897 l 168.02 112.897 l -168.02 130.495 l h -168.02 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -168.02 35.602 m 173.055 35.602 l 173.055 53.199 l 168.02 53.199 l -168.02 35.602 l h -168.02 35.602 m S Q -0.301961 0.654902 0.301961 rg -169.5 95.295 m 175.043 95.295 l 175.043 77.698 l 169.5 77.698 l 169.5 -95.295 l h -169.5 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -169.5 70.801 m 175.043 70.801 l 175.043 88.398 l 169.5 88.398 l 169.5 -70.801 l h -169.5 70.801 m S Q -0.301961 0.654902 0.301961 rg -170.453 112.897 m 175.188 112.897 l 175.188 95.295 l 170.453 95.295 l -170.453 112.897 l h -170.453 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -170.453 53.199 m 175.188 53.199 l 175.188 70.801 l 170.453 70.801 l -170.453 53.199 l h -170.453 53.199 m S Q -0.301961 0.654902 0.301961 rg -170.785 165.698 m 175.719 165.698 l 175.719 148.096 l 170.785 148.096 l -170.785 165.698 l h -170.785 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -170.785 0.398 m 175.719 0.398 l 175.719 18 l 170.785 18 l 170.785 0.398 -l h -170.785 0.398 m S Q -0.301961 0.654902 0.301961 rg -173.062 130.495 m 177.137 130.495 l 177.137 112.897 l 173.062 112.897 l -173.062 130.495 l h -173.062 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -173.062 35.602 m 177.137 35.602 l 177.137 53.199 l 173.062 53.199 l -173.062 35.602 l h -173.062 35.602 m S Q -0.301961 0.654902 0.301961 rg -173.359 148.096 m 178.574 148.096 l 178.574 130.495 l 173.359 130.495 l -173.359 148.096 l h -173.359 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -173.359 18 m 178.574 18 l 178.574 35.602 l 173.359 35.602 l 173.359 18 -l h -173.359 18 m S Q -0.301961 0.654902 0.301961 rg -175.051 95.295 m 180.414 95.295 l 180.414 77.698 l 175.051 77.698 l -175.051 95.295 l h -175.051 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -175.051 70.801 m 180.414 70.801 l 180.414 88.398 l 175.051 88.398 l -175.051 70.801 l h -175.051 70.801 m S Q -0.301961 0.654902 0.301961 rg -175.207 112.897 m 180.898 112.897 l 180.898 95.295 l 175.207 95.295 l -175.207 112.897 l h -175.207 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -175.207 53.199 m 180.898 53.199 l 180.898 70.801 l 175.207 70.801 l -175.207 53.199 l h -175.207 53.199 m S Q -0.301961 0.654902 0.301961 rg -175.723 165.698 m 181.039 165.698 l 181.039 148.096 l 175.723 148.096 l -175.723 165.698 l h -175.723 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -175.723 0.398 m 181.039 0.398 l 181.039 18 l 175.723 18 l 175.723 0.398 -l h -175.723 0.398 m S Q -0.301961 0.654902 0.301961 rg -177.141 130.495 m 183.777 130.495 l 183.777 112.897 l 177.141 112.897 l -177.141 130.495 l h -177.141 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -177.141 35.602 m 183.777 35.602 l 183.777 53.199 l 177.141 53.199 l -177.141 35.602 l h -177.141 35.602 m S Q -0.301961 0.654902 0.301961 rg -178.578 148.096 m 183.199 148.096 l 183.199 130.495 l 178.578 130.495 l -178.578 148.096 l h -178.578 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -178.578 18 m 183.199 18 l 183.199 35.602 l 178.578 35.602 l 178.578 18 -l h -178.578 18 m S Q -0.301961 0.654902 0.301961 rg -180.422 95.295 m 185.637 95.295 l 185.637 77.698 l 180.422 77.698 l -180.422 95.295 l h -180.422 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -180.422 70.801 m 185.637 70.801 l 185.637 88.398 l 180.422 88.398 l -180.422 70.801 l h -180.422 70.801 m S Q -0.301961 0.654902 0.301961 rg -180.906 112.897 m 185.953 112.897 l 185.953 95.295 l 180.906 95.295 l -180.906 112.897 l h -180.906 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -180.906 53.199 m 185.953 53.199 l 185.953 70.801 l 180.906 70.801 l -180.906 53.199 l h -180.906 53.199 m S Q -0.301961 0.654902 0.301961 rg -181.199 165.698 m 186.617 165.698 l 186.617 148.096 l 181.199 148.096 l -181.199 165.698 l h -181.199 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -181.199 0.398 m 186.617 0.398 l 186.617 18 l 181.199 18 l 181.199 0.398 -l h -181.199 0.398 m S Q -0.301961 0.654902 0.301961 rg -183.207 148.096 m 188.664 148.096 l 188.664 130.495 l 183.207 130.495 l -183.207 148.096 l h -183.207 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -183.207 18 m 188.664 18 l 188.664 35.602 l 183.207 35.602 l 183.207 18 -l h -183.207 18 m S Q -0.301961 0.654902 0.301961 rg -183.785 130.495 m 188.062 130.495 l 188.062 112.897 l 183.785 112.897 l -183.785 130.495 l h -183.785 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -183.785 35.602 m 188.062 35.602 l 188.062 53.199 l 183.785 53.199 l -183.785 35.602 l h -183.785 35.602 m S Q -0.301961 0.654902 0.301961 rg -185.645 95.295 m 190.102 95.295 l 190.102 77.698 l 185.645 77.698 l -185.645 95.295 l h -185.645 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -185.645 70.801 m 190.102 70.801 l 190.102 88.398 l 185.645 88.398 l -185.645 70.801 l h -185.645 70.801 m S Q -0.301961 0.654902 0.301961 rg -185.961 112.897 m 191.109 112.897 l 191.109 95.295 l 185.961 95.295 l -185.961 112.897 l h -185.961 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -185.961 53.199 m 191.109 53.199 l 191.109 70.801 l 185.961 70.801 l -185.961 53.199 l h -185.961 53.199 m S Q -0.301961 0.654902 0.301961 rg -186.625 165.698 m 191.461 165.698 l 191.461 148.096 l 186.625 148.096 l -186.625 165.698 l h -186.625 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -186.625 0.398 m 191.461 0.398 l 191.461 18 l 186.625 18 l 186.625 0.398 -l h -186.625 0.398 m S Q -0.301961 0.654902 0.301961 rg -188.066 130.495 m 193.305 130.495 l 193.305 112.897 l 188.066 112.897 l -188.066 130.495 l h -188.066 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -188.066 35.602 m 193.305 35.602 l 193.305 53.199 l 188.066 53.199 l -188.066 35.602 l h -188.066 35.602 m S Q -0.301961 0.654902 0.301961 rg -188.668 148.096 m 194.469 148.096 l 194.469 130.495 l 188.668 130.495 l -188.668 148.096 l h -188.668 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -188.668 18 m 194.469 18 l 194.469 35.602 l 188.668 35.602 l 188.668 18 -l h -188.668 18 m S Q -0.301961 0.654902 0.301961 rg -190.109 95.295 m 197.328 95.295 l 197.328 77.698 l 190.109 77.698 l -190.109 95.295 l h -190.109 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -190.109 70.801 m 197.328 70.801 l 197.328 88.398 l 190.109 88.398 l -190.109 70.801 l h -190.109 70.801 m S Q -0.301961 0.654902 0.301961 rg -191.113 112.897 m 196.43 112.897 l 196.43 95.295 l 191.113 95.295 l -191.113 112.897 l h -191.113 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -191.113 53.199 m 196.43 53.199 l 196.43 70.801 l 191.113 70.801 l -191.113 53.199 l h -191.113 53.199 m S Q -0.301961 0.654902 0.301961 rg -191.469 165.698 m 195.23 165.698 l 195.23 148.096 l 191.469 148.096 l -191.469 165.698 l h -191.469 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -191.469 0.398 m 195.23 0.398 l 195.23 18 l 191.469 18 l 191.469 0.398 l -h -191.469 0.398 m S Q -0.301961 0.654902 0.301961 rg -193.309 130.495 m 199.141 130.495 l 199.141 112.897 l 193.309 112.897 l -193.309 130.495 l h -193.309 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -193.309 35.602 m 199.141 35.602 l 199.141 53.199 l 193.309 53.199 l -193.309 35.602 l h -193.309 35.602 m S Q -0.301961 0.654902 0.301961 rg -194.477 148.096 m 200.102 148.096 l 200.102 130.495 l 194.477 130.495 l -194.477 148.096 l h -194.477 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -194.477 18 m 200.102 18 l 200.102 35.602 l 194.477 35.602 l 194.477 18 -l h -194.477 18 m S Q -0.301961 0.654902 0.301961 rg -195.234 165.698 m 199.715 165.698 l 199.715 148.096 l 195.234 148.096 l -195.234 165.698 l h -195.234 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -195.234 0.398 m 199.715 0.398 l 199.715 18 l 195.234 18 l 195.234 0.398 -l h -195.234 0.398 m S Q -0.301961 0.654902 0.301961 rg -196.434 112.897 m 201.898 112.897 l 201.898 95.295 l 196.434 95.295 l -196.434 112.897 l h -196.434 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -196.434 53.199 m 201.898 53.199 l 201.898 70.801 l 196.434 70.801 l -196.434 53.199 l h -196.434 53.199 m S Q -0.301961 0.654902 0.301961 rg -197.332 95.295 m 202.449 95.295 l 202.449 77.698 l 197.332 77.698 l -197.332 95.295 l h -197.332 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -197.332 70.801 m 202.449 70.801 l 202.449 88.398 l 197.332 88.398 l -197.332 70.801 l h -197.332 70.801 m S Q -0.301961 0.654902 0.301961 rg -199.16 130.495 m 205.215 130.495 l 205.215 112.897 l 199.16 112.897 l -199.16 130.495 l h -199.16 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -199.16 35.602 m 205.215 35.602 l 205.215 53.199 l 199.16 53.199 l -199.16 35.602 l h -199.16 35.602 m S Q -0.301961 0.654902 0.301961 rg -199.723 165.698 m 205.371 165.698 l 205.371 148.096 l 199.723 148.096 l -199.723 165.698 l h -199.723 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -199.723 0.398 m 205.371 0.398 l 205.371 18 l 199.723 18 l 199.723 0.398 -l h -199.723 0.398 m S Q -0.301961 0.654902 0.301961 rg -200.113 148.096 m 204.422 148.096 l 204.422 130.495 l 200.113 130.495 l -200.113 148.096 l h -200.113 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -200.113 18 m 204.422 18 l 204.422 35.602 l 200.113 35.602 l 200.113 18 -l h -200.113 18 m S Q -0.301961 0.654902 0.301961 rg -201.902 112.897 m 207.344 112.897 l 207.344 95.295 l 201.902 95.295 l -201.902 112.897 l h -201.902 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -201.902 53.199 m 207.344 53.199 l 207.344 70.801 l 201.902 70.801 l -201.902 53.199 l h -201.902 53.199 m S Q -0.301961 0.654902 0.301961 rg -202.469 95.295 m 208.328 95.295 l 208.328 77.698 l 202.469 77.698 l -202.469 95.295 l h -202.469 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -202.469 70.801 m 208.328 70.801 l 208.328 88.398 l 202.469 88.398 l -202.469 70.801 l h -202.469 70.801 m S Q -0.301961 0.654902 0.301961 rg -204.43 148.096 m 210.023 148.096 l 210.023 130.495 l 204.43 130.495 l -204.43 148.096 l h -204.43 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -204.43 18 m 210.023 18 l 210.023 35.602 l 204.43 35.602 l 204.43 18 l h -204.43 18 m S Q -0.301961 0.654902 0.301961 rg -205.219 130.495 m 210.332 130.495 l 210.332 112.897 l 205.219 112.897 l -205.219 130.495 l h -205.219 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -205.219 35.602 m 210.332 35.602 l 210.332 53.199 l 205.219 53.199 l -205.219 35.602 l h -205.219 35.602 m S Q -0.301961 0.654902 0.301961 rg -205.383 165.698 m 208.527 165.698 l 208.527 148.096 l 205.383 148.096 l -205.383 165.698 l h -205.383 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -205.383 0.398 m 208.527 0.398 l 208.527 18 l 205.383 18 l 205.383 0.398 -l h -205.383 0.398 m S Q -0.301961 0.654902 0.301961 rg -207.352 112.897 m 212.77 112.897 l 212.77 95.295 l 207.352 95.295 l -207.352 112.897 l h -207.352 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -207.352 53.199 m 212.77 53.199 l 212.77 70.801 l 207.352 70.801 l -207.352 53.199 l h -207.352 53.199 m S Q -0.301961 0.654902 0.301961 rg -208.332 95.295 m 214.891 95.295 l 214.891 77.698 l 208.332 77.698 l -208.332 95.295 l h -208.332 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -208.332 70.801 m 214.891 70.801 l 214.891 88.398 l 208.332 88.398 l -208.332 70.801 l h -208.332 70.801 m S Q -0.301961 0.654902 0.301961 rg -208.535 165.698 m 213.246 165.698 l 213.246 148.096 l 208.535 148.096 l -208.535 165.698 l h -208.535 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -208.535 0.398 m 213.246 0.398 l 213.246 18 l 208.535 18 l 208.535 0.398 -l h -208.535 0.398 m S Q -0.301961 0.654902 0.301961 rg -210.027 148.096 m 216.301 148.096 l 216.301 130.495 l 210.027 130.495 l -210.027 148.096 l h -210.027 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -210.027 18 m 216.301 18 l 216.301 35.602 l 210.027 35.602 l 210.027 18 -l h -210.027 18 m S Q -0.301961 0.654902 0.301961 rg -210.34 130.495 m 216.703 130.495 l 216.703 112.897 l 210.34 112.897 l -210.34 130.495 l h -210.34 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -210.34 35.602 m 216.703 35.602 l 216.703 53.199 l 210.34 53.199 l -210.34 35.602 l h -210.34 35.602 m S Q -0.301961 0.654902 0.301961 rg -212.773 112.897 m 216.277 112.897 l 216.277 95.295 l 212.773 95.295 l -212.773 112.897 l h -212.773 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -212.773 53.199 m 216.277 53.199 l 216.277 70.801 l 212.773 70.801 l -212.773 53.199 l h -212.773 53.199 m S Q -0.301961 0.654902 0.301961 rg -213.254 165.698 m 218.5 165.698 l 218.5 148.096 l 213.254 148.096 l -213.254 165.698 l h -213.254 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -213.254 0.398 m 218.5 0.398 l 218.5 18 l 213.254 18 l 213.254 0.398 l h -213.254 0.398 m S Q -0.301961 0.654902 0.301961 rg -214.895 95.295 m 219.586 95.295 l 219.586 77.698 l 214.895 77.698 l -214.895 95.295 l h -214.895 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -214.895 70.801 m 219.586 70.801 l 219.586 88.398 l 214.895 88.398 l -214.895 70.801 l h -214.895 70.801 m S Q -0.301961 0.654902 0.301961 rg -216.281 112.897 m 220.941 112.897 l 220.941 95.295 l 216.281 95.295 l -216.281 112.897 l h -216.281 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -216.281 53.199 m 220.941 53.199 l 220.941 70.801 l 216.281 70.801 l -216.281 53.199 l h -216.281 53.199 m S Q -0.301961 0.654902 0.301961 rg -216.312 148.096 m 221.941 148.096 l 221.941 130.495 l 216.312 130.495 l -216.312 148.096 l h -216.312 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -216.312 18 m 221.941 18 l 221.941 35.602 l 216.312 35.602 l 216.312 18 -l h -216.312 18 m S Q -0.301961 0.654902 0.301961 rg -216.723 130.495 m 222.188 130.495 l 222.188 112.897 l 216.723 112.897 l -216.723 130.495 l h -216.723 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -216.723 35.602 m 222.188 35.602 l 222.188 53.199 l 216.723 53.199 l -216.723 35.602 l h -216.723 35.602 m S Q -0.301961 0.654902 0.301961 rg -218.508 165.698 m 224.621 165.698 l 224.621 148.096 l 218.508 148.096 l -218.508 165.698 l h -218.508 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -218.508 0.398 m 224.621 0.398 l 224.621 18 l 218.508 18 l 218.508 0.398 -l h -218.508 0.398 m S Q -0.301961 0.654902 0.301961 rg -219.59 95.295 m 225.305 95.295 l 225.305 77.698 l 219.59 77.698 l -219.59 95.295 l h -219.59 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -219.59 70.801 m 225.305 70.801 l 225.305 88.398 l 219.59 88.398 l -219.59 70.801 l h -219.59 70.801 m S Q -0.301961 0.654902 0.301961 rg -220.945 112.897 m 227.617 112.897 l 227.617 95.295 l 220.945 95.295 l -220.945 112.897 l h -220.945 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -220.945 53.199 m 227.617 53.199 l 227.617 70.801 l 220.945 70.801 l -220.945 53.199 l h -220.945 53.199 m S Q -0.301961 0.654902 0.301961 rg -221.945 148.096 m 226.426 148.096 l 226.426 130.495 l 221.945 130.495 l -221.945 148.096 l h -221.945 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -221.945 18 m 226.426 18 l 226.426 35.602 l 221.945 35.602 l 221.945 18 -l h -221.945 18 m S Q -0.301961 0.654902 0.301961 rg -222.203 130.495 m 227.824 130.495 l 227.824 112.897 l 222.203 112.897 l -222.203 130.495 l h -222.203 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -222.203 35.602 m 227.824 35.602 l 227.824 53.199 l 222.203 53.199 l -222.203 35.602 l h -222.203 35.602 m S Q -0.301961 0.654902 0.301961 rg -224.625 165.698 m 230.289 165.698 l 230.289 148.096 l 224.625 148.096 l -224.625 165.698 l h -224.625 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -224.625 0.398 m 230.289 0.398 l 230.289 18 l 224.625 18 l 224.625 0.398 -l h -224.625 0.398 m S Q -0.301961 0.654902 0.301961 rg -225.312 95.295 m 229.695 95.295 l 229.695 77.698 l 225.312 77.698 l -225.312 95.295 l h -225.312 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -225.312 70.801 m 229.695 70.801 l 229.695 88.398 l 225.312 88.398 l -225.312 70.801 l h -225.312 70.801 m S Q -0.301961 0.654902 0.301961 rg -226.43 148.096 m 231.98 148.096 l 231.98 130.495 l 226.43 130.495 l -226.43 148.096 l h -226.43 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -226.43 18 m 231.98 18 l 231.98 35.602 l 226.43 35.602 l 226.43 18 l h -226.43 18 m S Q -0.301961 0.654902 0.301961 rg -227.621 112.897 m 232.746 112.897 l 232.746 95.295 l 227.621 95.295 l -227.621 112.897 l h -227.621 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -227.621 53.199 m 232.746 53.199 l 232.746 70.801 l 227.621 70.801 l -227.621 53.199 l h -227.621 53.199 m S Q -0.301961 0.654902 0.301961 rg -227.832 130.495 m 233.504 130.495 l 233.504 112.897 l 227.832 112.897 l -227.832 130.495 l h -227.832 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -227.832 35.602 m 233.504 35.602 l 233.504 53.199 l 227.832 53.199 l -227.832 35.602 l h -227.832 35.602 m S Q -0.301961 0.654902 0.301961 rg -229.699 95.295 m 234.57 95.295 l 234.57 77.698 l 229.699 77.698 l -229.699 95.295 l h -229.699 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -229.699 70.801 m 234.57 70.801 l 234.57 88.398 l 229.699 88.398 l -229.699 70.801 l h -229.699 70.801 m S Q -0.301961 0.654902 0.301961 rg -230.297 165.698 m 235.898 165.698 l 235.898 148.096 l 230.297 148.096 l -230.297 165.698 l h -230.297 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -230.297 0.398 m 235.898 0.398 l 235.898 18 l 230.297 18 l 230.297 0.398 -l h -230.297 0.398 m S Q -0.301961 0.654902 0.301961 rg -231.992 148.096 m 237.957 148.096 l 237.957 130.495 l 231.992 130.495 l -231.992 148.096 l h -231.992 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -231.992 18 m 237.957 18 l 237.957 35.602 l 231.992 35.602 l 231.992 18 -l h -231.992 18 m S Q -0.301961 0.654902 0.301961 rg -232.754 112.897 m 238.359 112.897 l 238.359 95.295 l 232.754 95.295 l -232.754 112.897 l h -232.754 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -232.754 53.199 m 238.359 53.199 l 238.359 70.801 l 232.754 70.801 l -232.754 53.199 l h -232.754 53.199 m S Q -0.301961 0.654902 0.301961 rg -233.508 130.495 m 238.656 130.495 l 238.656 112.897 l 233.508 112.897 l -233.508 130.495 l h -233.508 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -233.508 35.602 m 238.656 35.602 l 238.656 53.199 l 233.508 53.199 l -233.508 35.602 l h -233.508 35.602 m S Q -0.301961 0.654902 0.301961 rg -234.574 95.295 m 240.152 95.295 l 240.152 77.698 l 234.574 77.698 l -234.574 95.295 l h -234.574 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -234.574 70.801 m 240.152 70.801 l 240.152 88.398 l 234.574 88.398 l -234.574 70.801 l h -234.574 70.801 m S Q -0.301961 0.654902 0.301961 rg -235.902 165.698 m 241.793 165.698 l 241.793 148.096 l 235.902 148.096 l -235.902 165.698 l h -235.902 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -235.902 0.398 m 241.793 0.398 l 241.793 18 l 235.902 18 l 235.902 0.398 -l h -235.902 0.398 m S Q -0.301961 0.654902 0.301961 rg -237.961 148.096 m 242.301 148.096 l 242.301 130.495 l 237.961 130.495 l -237.961 148.096 l h -237.961 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -237.961 18 m 242.301 18 l 242.301 35.602 l 237.961 35.602 l 237.961 18 -l h -237.961 18 m S Q -0.301961 0.654902 0.301961 rg -238.363 112.897 m 243.129 112.897 l 243.129 95.295 l 238.363 95.295 l -238.363 112.897 l h -238.363 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -238.363 53.199 m 243.129 53.199 l 243.129 70.801 l 238.363 70.801 l -238.363 53.199 l h -238.363 53.199 m S Q -0.301961 0.654902 0.301961 rg -238.664 130.495 m 244.484 130.495 l 244.484 112.897 l 238.664 112.897 l -238.664 130.495 l h -238.664 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -238.664 35.602 m 244.484 35.602 l 244.484 53.199 l 238.664 53.199 l -238.664 35.602 l h -238.664 35.602 m S Q -0.301961 0.654902 0.301961 rg -240.156 95.295 m 246.93 95.295 l 246.93 77.698 l 240.156 77.698 l -240.156 95.295 l h -240.156 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -240.156 70.801 m 246.93 70.801 l 246.93 88.398 l 240.156 88.398 l -240.156 70.801 l h -240.156 70.801 m S Q -0.301961 0.654902 0.301961 rg -241.797 165.698 m 247.633 165.698 l 247.633 148.096 l 241.797 148.096 l -241.797 165.698 l h -241.797 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -241.797 0.398 m 247.633 0.398 l 247.633 18 l 241.797 18 l 241.797 0.398 -l h -241.797 0.398 m S Q -0.301961 0.654902 0.301961 rg -242.309 148.096 m 246.59 148.096 l 246.59 130.495 l 242.309 130.495 l -242.309 148.096 l h -242.309 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -242.309 18 m 246.59 18 l 246.59 35.602 l 242.309 35.602 l 242.309 18 l -h -242.309 18 m S Q -0.301961 0.654902 0.301961 rg -243.133 112.897 m 248.375 112.897 l 248.375 95.295 l 243.133 95.295 l -243.133 112.897 l h -243.133 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -243.133 53.199 m 248.375 53.199 l 248.375 70.801 l 243.133 70.801 l -243.133 53.199 l h -243.133 53.199 m S Q -0.301961 0.654902 0.301961 rg -244.492 130.495 m 249.531 130.495 l 249.531 112.897 l 244.492 112.897 l -244.492 130.495 l h -244.492 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -244.492 35.602 m 249.531 35.602 l 249.531 53.199 l 244.492 53.199 l -244.492 35.602 l h -244.492 35.602 m S Q -0.301961 0.654902 0.301961 rg -246.594 148.096 m 251.383 148.096 l 251.383 130.495 l 246.594 130.495 l -246.594 148.096 l h -246.594 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -246.594 18 m 251.383 18 l 251.383 35.602 l 246.594 35.602 l 246.594 18 -l h -246.594 18 m S Q -0.301961 0.654902 0.301961 rg -246.934 95.295 m 252.223 95.295 l 252.223 77.698 l 246.934 77.698 l -246.934 95.295 l h -246.934 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -246.934 70.801 m 252.223 70.801 l 252.223 88.398 l 246.934 88.398 l -246.934 70.801 l h -246.934 70.801 m S Q -0.301961 0.654902 0.301961 rg -247.637 165.698 m 253.512 165.698 l 253.512 148.096 l 247.637 148.096 l -247.637 165.698 l h -247.637 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -247.637 0.398 m 253.512 0.398 l 253.512 18 l 247.637 18 l 247.637 0.398 -l h -247.637 0.398 m S Q -0.301961 0.654902 0.301961 rg -248.383 112.897 m 253.191 112.897 l 253.191 95.295 l 248.383 95.295 l -248.383 112.897 l h -248.383 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -248.383 53.199 m 253.191 53.199 l 253.191 70.801 l 248.383 70.801 l -248.383 53.199 l h -248.383 53.199 m S Q -0.301961 0.654902 0.301961 rg -249.535 130.495 m 254.566 130.495 l 254.566 112.897 l 249.535 112.897 l -249.535 130.495 l h -249.535 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -249.535 35.602 m 254.566 35.602 l 254.566 53.199 l 249.535 53.199 l -249.535 35.602 l h -249.535 35.602 m S Q -0.301961 0.654902 0.301961 rg -251.387 148.096 m 256.684 148.096 l 256.684 130.495 l 251.387 130.495 l -251.387 148.096 l h -251.387 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -251.387 18 m 256.684 18 l 256.684 35.602 l 251.387 35.602 l 251.387 18 -l h -251.387 18 m S Q -0.301961 0.654902 0.301961 rg -252.227 95.295 m 257.953 95.295 l 257.953 77.698 l 252.227 77.698 l -252.227 95.295 l h -252.227 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -252.227 70.801 m 257.953 70.801 l 257.953 88.398 l 252.227 88.398 l -252.227 70.801 l h -252.227 70.801 m S Q -0.301961 0.654902 0.301961 rg -253.199 112.897 m 257.625 112.897 l 257.625 95.295 l 253.199 95.295 l -253.199 112.897 l h -253.199 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -253.199 53.199 m 257.625 53.199 l 257.625 70.801 l 253.199 70.801 l -253.199 53.199 l h -253.199 53.199 m S Q -0.301961 0.654902 0.301961 rg -253.52 165.698 m 259.605 165.698 l 259.605 148.096 l 253.52 148.096 l -253.52 165.698 l h -253.52 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -253.52 0.398 m 259.605 0.398 l 259.605 18 l 253.52 18 l 253.52 0.398 l -h -253.52 0.398 m S Q -0.301961 0.654902 0.301961 rg -254.57 130.495 m 260.035 130.495 l 260.035 112.897 l 254.57 112.897 l -254.57 130.495 l h -254.57 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -254.57 35.602 m 260.035 35.602 l 260.035 53.199 l 254.57 53.199 l -254.57 35.602 l h -254.57 35.602 m S Q -0.301961 0.654902 0.301961 rg -256.688 148.096 m 262.238 148.096 l 262.238 130.495 l 256.688 130.495 l -256.688 148.096 l h -256.688 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -256.688 18 m 262.238 18 l 262.238 35.602 l 256.688 35.602 l 256.688 18 -l h -256.688 18 m S Q -0.301961 0.654902 0.301961 rg -257.629 112.897 m 263.336 112.897 l 263.336 95.295 l 257.629 95.295 l -257.629 112.897 l h -257.629 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -257.629 53.199 m 263.336 53.199 l 263.336 70.801 l 257.629 70.801 l -257.629 53.199 l h -257.629 53.199 m S Q -0.301961 0.654902 0.301961 rg -257.957 95.295 m 263.055 95.295 l 263.055 77.698 l 257.957 77.698 l -257.957 95.295 l h -257.957 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -257.957 70.801 m 263.055 70.801 l 263.055 88.398 l 257.957 88.398 l -257.957 70.801 l h -257.957 70.801 m S Q -0.301961 0.654902 0.301961 rg -259.609 165.698 m 265.039 165.698 l 265.039 148.096 l 259.609 148.096 l -259.609 165.698 l h -259.609 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -259.609 0.398 m 265.039 0.398 l 265.039 18 l 259.609 18 l 259.609 0.398 -l h -259.609 0.398 m S Q -0.301961 0.654902 0.301961 rg -260.043 130.495 m 264.324 130.495 l 264.324 112.897 l 260.043 112.897 l -260.043 130.495 l h -260.043 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -260.043 35.602 m 264.324 35.602 l 264.324 53.199 l 260.043 53.199 l -260.043 35.602 l h -260.043 35.602 m S Q -0.301961 0.654902 0.301961 rg -262.246 148.096 m 269.133 148.096 l 269.133 130.495 l 262.246 130.495 l -262.246 148.096 l h -262.246 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -262.246 18 m 269.133 18 l 269.133 35.602 l 262.246 35.602 l 262.246 18 -l h -262.246 18 m S Q -0.301961 0.654902 0.301961 rg -263.062 95.295 m 268.488 95.295 l 268.488 77.698 l 263.062 77.698 l -263.062 95.295 l h -263.062 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -263.062 70.801 m 268.488 70.801 l 268.488 88.398 l 263.062 88.398 l -263.062 70.801 l h -263.062 70.801 m S Q -0.301961 0.654902 0.301961 rg -263.352 112.897 m 267.625 112.897 l 267.625 95.295 l 263.352 95.295 l -263.352 112.897 l h -263.352 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -263.352 53.199 m 267.625 53.199 l 267.625 70.801 l 263.352 70.801 l -263.352 53.199 l h -263.352 53.199 m S Q -0.301961 0.654902 0.301961 rg -264.332 130.495 m 268.812 130.495 l 268.812 112.897 l 264.332 112.897 l -264.332 130.495 l h -264.332 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -264.332 35.602 m 268.812 35.602 l 268.812 53.199 l 264.332 53.199 l -264.332 35.602 l h -264.332 35.602 m S Q -0.301961 0.654902 0.301961 rg -265.043 165.698 m 270.113 165.698 l 270.113 148.096 l 265.043 148.096 l -265.043 165.698 l h -265.043 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -265.043 0.398 m 270.113 0.398 l 270.113 18 l 265.043 18 l 265.043 0.398 -l h -265.043 0.398 m S Q -0.301961 0.654902 0.301961 rg -267.629 112.897 m 272.32 112.897 l 272.32 95.295 l 267.629 95.295 l -267.629 112.897 l h -267.629 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -267.629 53.199 m 272.32 53.199 l 272.32 70.801 l 267.629 70.801 l -267.629 53.199 l h -267.629 53.199 m S Q -0.301961 0.654902 0.301961 rg -268.496 95.295 m 273.285 95.295 l 273.285 77.698 l 268.496 77.698 l -268.496 95.295 l h -268.496 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -268.496 70.801 m 273.285 70.801 l 273.285 88.398 l 268.496 88.398 l -268.496 70.801 l h -268.496 70.801 m S Q -0.301961 0.654902 0.301961 rg -268.816 130.495 m 274.035 130.495 l 274.035 112.897 l 268.816 112.897 l -268.816 130.495 l h -268.816 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -268.816 35.602 m 274.035 35.602 l 274.035 53.199 l 268.816 53.199 l -268.816 35.602 l h -268.816 35.602 m S Q -0.301961 0.654902 0.301961 rg -269.141 148.096 m 274.867 148.096 l 274.867 130.495 l 269.141 130.495 l -269.141 148.096 l h -269.141 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -269.141 18 m 274.867 18 l 274.867 35.602 l 269.141 35.602 l 269.141 18 -l h -269.141 18 m S Q -0.301961 0.654902 0.301961 rg -270.117 165.698 m 276.32 165.698 l 276.32 148.096 l 270.117 148.096 l -270.117 165.698 l h -270.117 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -270.117 0.398 m 276.32 0.398 l 276.32 18 l 270.117 18 l 270.117 0.398 l -h -270.117 0.398 m S Q -0.301961 0.654902 0.301961 rg -272.328 112.897 m 277.434 112.897 l 277.434 95.295 l 272.328 95.295 l -272.328 112.897 l h -272.328 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -272.328 53.199 m 277.434 53.199 l 277.434 70.801 l 272.328 70.801 l -272.328 53.199 l h -272.328 53.199 m S Q -0.301961 0.654902 0.301961 rg -273.289 95.295 m 278.852 95.295 l 278.852 77.698 l 273.289 77.698 l -273.289 95.295 l h -273.289 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -273.289 70.801 m 278.852 70.801 l 278.852 88.398 l 273.289 88.398 l -273.289 70.801 l h -273.289 70.801 m S Q -0.301961 0.654902 0.301961 rg -274.039 130.495 m 279.227 130.495 l 279.227 112.897 l 274.039 112.897 l -274.039 130.495 l h -274.039 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -274.039 35.602 m 279.227 35.602 l 279.227 53.199 l 274.039 53.199 l -274.039 35.602 l h -274.039 35.602 m S Q -0.301961 0.654902 0.301961 rg -274.871 148.096 m 280.34 148.096 l 280.34 130.495 l 274.871 130.495 l -274.871 148.096 l h -274.871 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -274.871 18 m 280.34 18 l 280.34 35.602 l 274.871 35.602 l 274.871 18 l -h -274.871 18 m S Q -0.301961 0.654902 0.301961 rg -276.324 165.698 m 281.297 165.698 l 281.297 148.096 l 276.324 148.096 l -276.324 165.698 l h -276.324 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -276.324 0.398 m 281.297 0.398 l 281.297 18 l 276.324 18 l 276.324 0.398 -l h -276.324 0.398 m S Q -0.301961 0.654902 0.301961 rg -277.438 112.897 m 282.809 112.897 l 282.809 95.295 l 277.438 95.295 l -277.438 112.897 l h -277.438 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -277.438 53.199 m 282.809 53.199 l 282.809 70.801 l 277.438 70.801 l -277.438 53.199 l h -277.438 53.199 m S Q -0.301961 0.654902 0.301961 rg -278.855 95.295 m 283.504 95.295 l 283.504 77.698 l 278.855 77.698 l -278.855 95.295 l h -278.855 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -278.855 70.801 m 283.504 70.801 l 283.504 88.398 l 278.855 88.398 l -278.855 70.801 l h -278.855 70.801 m S Q -0.301961 0.654902 0.301961 rg -279.234 130.495 m 284.672 130.495 l 284.672 112.897 l 279.234 112.897 l -279.234 130.495 l h -279.234 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -279.234 35.602 m 284.672 35.602 l 284.672 53.199 l 279.234 53.199 l -279.234 35.602 l h -279.234 35.602 m S Q -0.301961 0.654902 0.301961 rg -280.348 148.096 m 286.465 148.096 l 286.465 130.495 l 280.348 130.495 l -280.348 148.096 l h -280.348 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -280.348 18 m 286.465 18 l 286.465 35.602 l 280.348 35.602 l 280.348 18 -l h -280.348 18 m S Q -0.301961 0.654902 0.301961 rg -281.305 165.698 m 286.453 165.698 l 286.453 148.096 l 281.305 148.096 l -281.305 165.698 l h -281.305 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -281.305 0.398 m 286.453 0.398 l 286.453 18 l 281.305 18 l 281.305 0.398 -l h -281.305 0.398 m S Q -0.301961 0.654902 0.301961 rg -282.812 112.897 m 289.047 112.897 l 289.047 95.295 l 282.812 95.295 l -282.812 112.897 l h -282.812 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -282.812 53.199 m 289.047 53.199 l 289.047 70.801 l 282.812 70.801 l -282.812 53.199 l h -282.812 53.199 m S Q -0.301961 0.654902 0.301961 rg -283.512 95.295 m 287.535 95.295 l 287.535 77.698 l 283.512 77.698 l -283.512 95.295 l h -283.512 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -283.512 70.801 m 287.535 70.801 l 287.535 88.398 l 283.512 88.398 l -283.512 70.801 l h -283.512 70.801 m S Q -0.301961 0.654902 0.301961 rg -284.676 130.495 m 289.887 130.495 l 289.887 112.897 l 284.676 112.897 l -284.676 130.495 l h -284.676 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -284.676 35.602 m 289.887 35.602 l 289.887 53.199 l 284.676 53.199 l -284.676 35.602 l h -284.676 35.602 m S Q -0.301961 0.654902 0.301961 rg -286.461 165.698 m 291.602 165.698 l 291.602 148.096 l 286.461 148.096 l -286.461 165.698 l h -286.461 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -286.461 0.398 m 291.602 0.398 l 291.602 18 l 286.461 18 l 286.461 0.398 -l h -286.461 0.398 m S Q -0.301961 0.654902 0.301961 rg -286.477 148.096 m 291.074 148.096 l 291.074 130.495 l 286.477 130.495 l -286.477 148.096 l h -286.477 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -286.477 18 m 291.074 18 l 291.074 35.602 l 286.477 35.602 l 286.477 18 -l h -286.477 18 m S Q -0.301961 0.654902 0.301961 rg -287.543 95.295 m 293.098 95.295 l 293.098 77.698 l 287.543 77.698 l -287.543 95.295 l h -287.543 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -287.543 70.801 m 293.098 70.801 l 293.098 88.398 l 287.543 88.398 l -287.543 70.801 l h -287.543 70.801 m S Q -0.301961 0.654902 0.301961 rg -289.055 112.897 m 293.73 112.897 l 293.73 95.295 l 289.055 95.295 l -289.055 112.897 l h -289.055 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -289.055 53.199 m 293.73 53.199 l 293.73 70.801 l 289.055 70.801 l -289.055 53.199 l h -289.055 53.199 m S Q -0.301961 0.654902 0.301961 rg -289.891 130.495 m 295.184 130.495 l 295.184 112.897 l 289.891 112.897 l -289.891 130.495 l h -289.891 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -289.891 35.602 m 295.184 35.602 l 295.184 53.199 l 289.891 53.199 l -289.891 35.602 l h -289.891 35.602 m S Q -0.301961 0.654902 0.301961 rg -291.082 148.096 m 296.277 148.096 l 296.277 130.495 l 291.082 130.495 l -291.082 148.096 l h -291.082 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -291.082 18 m 296.277 18 l 296.277 35.602 l 291.082 35.602 l 291.082 18 -l h -291.082 18 m S Q -0.301961 0.654902 0.301961 rg -291.609 165.698 m 297.926 165.698 l 297.926 148.096 l 291.609 148.096 l -291.609 165.698 l h -291.609 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -291.609 0.398 m 297.926 0.398 l 297.926 18 l 291.609 18 l 291.609 0.398 -l h -291.609 0.398 m S Q -0.301961 0.654902 0.301961 rg -293.105 95.295 m 297.527 95.295 l 297.527 77.698 l 293.105 77.698 l -293.105 95.295 l h -293.105 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -293.105 70.801 m 297.527 70.801 l 297.527 88.398 l 293.105 88.398 l -293.105 70.801 l h -293.105 70.801 m S Q -0.301961 0.654902 0.301961 rg -293.738 112.897 m 300.902 112.897 l 300.902 95.295 l 293.738 95.295 l -293.738 112.897 l h -293.738 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -293.738 53.199 m 300.902 53.199 l 300.902 70.801 l 293.738 70.801 l -293.738 53.199 l h -293.738 53.199 m S Q -0.301961 0.654902 0.301961 rg -295.188 130.495 m 300.234 130.495 l 300.234 112.897 l 295.188 112.897 l -295.188 130.495 l h -295.188 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -295.188 35.602 m 300.234 35.602 l 300.234 53.199 l 295.188 53.199 l -295.188 35.602 l h -295.188 35.602 m S Q -0.301961 0.654902 0.301961 rg -296.285 148.096 m 300.727 148.096 l 300.727 130.495 l 296.285 130.495 l -296.285 148.096 l h -296.285 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -296.285 18 m 300.727 18 l 300.727 35.602 l 296.285 35.602 l 296.285 18 -l h -296.285 18 m S Q -0.301961 0.654902 0.301961 rg -297.531 95.295 m 301.559 95.295 l 301.559 77.698 l 297.531 77.698 l -297.531 95.295 l h -297.531 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -297.531 70.801 m 301.559 70.801 l 301.559 88.398 l 297.531 88.398 l -297.531 70.801 l h -297.531 70.801 m S Q -0.301961 0.654902 0.301961 rg -297.93 165.698 m 304.039 165.698 l 304.039 148.096 l 297.93 148.096 l -297.93 165.698 l h -297.93 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -297.93 0.398 m 304.039 0.398 l 304.039 18 l 297.93 18 l 297.93 0.398 l -h -297.93 0.398 m S Q -0.301961 0.654902 0.301961 rg -300.238 130.495 m 306.344 130.495 l 306.344 112.897 l 300.238 112.897 l -300.238 130.495 l h -300.238 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -300.238 35.602 m 306.344 35.602 l 306.344 53.199 l 300.238 53.199 l -300.238 35.602 l h -300.238 35.602 m S Q -0.301961 0.654902 0.301961 rg -300.734 148.096 m 306.105 148.096 l 306.105 130.495 l 300.734 130.495 l -300.734 148.096 l h -300.734 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -300.734 18 m 306.105 18 l 306.105 35.602 l 300.734 35.602 l 300.734 18 -l h -300.734 18 m S Q -0.301961 0.654902 0.301961 rg -300.922 112.897 m 305.457 112.897 l 305.457 95.295 l 300.922 95.295 l -300.922 112.897 l h -300.922 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -300.922 53.199 m 305.457 53.199 l 305.457 70.801 l 300.922 70.801 l -300.922 53.199 l h -300.922 53.199 m S Q -0.301961 0.654902 0.301961 rg -301.566 95.295 m 307.422 95.295 l 307.422 77.698 l 301.566 77.698 l -301.566 95.295 l h -301.566 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -301.566 70.801 m 307.422 70.801 l 307.422 88.398 l 301.566 88.398 l -301.566 70.801 l h -301.566 70.801 m S Q -0.301961 0.654902 0.301961 rg -304.043 165.698 m 309.945 165.698 l 309.945 148.096 l 304.043 148.096 l -304.043 165.698 l h -304.043 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -304.043 0.398 m 309.945 0.398 l 309.945 18 l 304.043 18 l 304.043 0.398 -l h -304.043 0.398 m S Q -0.301961 0.654902 0.301961 rg -305.465 112.897 m 310.605 112.897 l 310.605 95.295 l 305.465 95.295 l -305.465 112.897 l h -305.465 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -305.465 53.199 m 310.605 53.199 l 310.605 70.801 l 305.465 70.801 l -305.465 53.199 l h -305.465 53.199 m S Q -0.301961 0.654902 0.301961 rg -306.113 148.096 m 311.551 148.096 l 311.551 130.495 l 306.113 130.495 l -306.113 148.096 l h -306.113 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -306.113 18 m 311.551 18 l 311.551 35.602 l 306.113 35.602 l 306.113 18 -l h -306.113 18 m S Q -0.301961 0.654902 0.301961 rg -306.352 130.495 m 311.688 130.495 l 311.688 112.897 l 306.352 112.897 l -306.352 130.495 l h -306.352 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -306.352 35.602 m 311.688 35.602 l 311.688 53.199 l 306.352 53.199 l -306.352 35.602 l h -306.352 35.602 m S Q -0.301961 0.654902 0.301961 rg -307.426 95.295 m 312.832 95.295 l 312.832 77.698 l 307.426 77.698 l -307.426 95.295 l h -307.426 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -307.426 70.801 m 312.832 70.801 l 312.832 88.398 l 307.426 88.398 l -307.426 70.801 l h -307.426 70.801 m S Q -0.301961 0.654902 0.301961 rg -309.949 165.698 m 314.418 165.698 l 314.418 148.096 l 309.949 148.096 l -309.949 165.698 l h -309.949 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -309.949 0.398 m 314.418 0.398 l 314.418 18 l 309.949 18 l 309.949 0.398 -l h -309.949 0.398 m S Q -0.301961 0.654902 0.301961 rg -310.609 112.897 m 315.875 112.897 l 315.875 95.295 l 310.609 95.295 l -310.609 112.897 l h -310.609 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -310.609 53.199 m 315.875 53.199 l 315.875 70.801 l 310.609 70.801 l -310.609 53.199 l h -310.609 53.199 m S Q -0.301961 0.654902 0.301961 rg -311.555 148.096 m 318.098 148.096 l 318.098 130.495 l 311.555 130.495 l -311.555 148.096 l h -311.555 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -311.555 18 m 318.098 18 l 318.098 35.602 l 311.555 35.602 l 311.555 18 -l h -311.555 18 m S Q -0.301961 0.654902 0.301961 rg -311.703 130.495 m 315.52 130.495 l 315.52 112.897 l 311.703 112.897 l -311.703 130.495 l h -311.703 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -311.703 35.602 m 315.52 35.602 l 315.52 53.199 l 311.703 53.199 l -311.703 35.602 l h -311.703 35.602 m S Q -0.301961 0.654902 0.301961 rg -312.836 95.295 m 318.664 95.295 l 318.664 77.698 l 312.836 77.698 l -312.836 95.295 l h -312.836 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -312.836 70.801 m 318.664 70.801 l 318.664 88.398 l 312.836 88.398 l -312.836 70.801 l h -312.836 70.801 m S Q -0.301961 0.654902 0.301961 rg -314.422 165.698 m 320.969 165.698 l 320.969 148.096 l 314.422 148.096 l -314.422 165.698 l h -314.422 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -314.422 0.398 m 320.969 0.398 l 320.969 18 l 314.422 18 l 314.422 0.398 -l h -314.422 0.398 m S Q -0.301961 0.654902 0.301961 rg -315.523 130.495 m 319.949 130.495 l 319.949 112.897 l 315.523 112.897 l -315.523 130.495 l h -315.523 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -315.523 35.602 m 319.949 35.602 l 319.949 53.199 l 315.523 53.199 l -315.523 35.602 l h -315.523 35.602 m S Q -0.301961 0.654902 0.301961 rg -315.891 112.897 m 321.113 112.897 l 321.113 95.295 l 315.891 95.295 l -315.891 112.897 l h -315.891 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -315.891 53.199 m 321.113 53.199 l 321.113 70.801 l 315.891 70.801 l -315.891 53.199 l h -315.891 53.199 m S Q -0.301961 0.654902 0.301961 rg -318.102 148.096 m 324.02 148.096 l 324.02 130.495 l 318.102 130.495 l -318.102 148.096 l h -318.102 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -318.102 18 m 324.02 18 l 324.02 35.602 l 318.102 35.602 l 318.102 18 l -h -318.102 18 m S Q -0.301961 0.654902 0.301961 rg -318.672 95.295 m 322.668 95.295 l 322.668 77.698 l 318.672 77.698 l -318.672 95.295 l h -318.672 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -318.672 70.801 m 322.668 70.801 l 322.668 88.398 l 318.672 88.398 l -318.672 70.801 l h -318.672 70.801 m S Q -0.301961 0.654902 0.301961 rg -319.957 130.495 m 325.051 130.495 l 325.051 112.897 l 319.957 112.897 l -319.957 130.495 l h -319.957 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -319.957 35.602 m 325.051 35.602 l 325.051 53.199 l 319.957 53.199 l -319.957 35.602 l h -319.957 35.602 m S Q -0.301961 0.654902 0.301961 rg -320.977 165.698 m 325.43 165.698 l 325.43 148.096 l 320.977 148.096 l -320.977 165.698 l h -320.977 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -320.977 0.398 m 325.43 0.398 l 325.43 18 l 320.977 18 l 320.977 0.398 l -h -320.977 0.398 m S Q -0.301961 0.654902 0.301961 rg -321.129 112.897 m 326.555 112.897 l 326.555 95.295 l 321.129 95.295 l -321.129 112.897 l h -321.129 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -321.129 53.199 m 326.555 53.199 l 326.555 70.801 l 321.129 70.801 l -321.129 53.199 l h -321.129 53.199 m S Q -0.301961 0.654902 0.301961 rg -322.672 95.295 m 327.84 95.295 l 327.84 77.698 l 322.672 77.698 l -322.672 95.295 l h -322.672 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -322.672 70.801 m 327.84 70.801 l 327.84 88.398 l 322.672 88.398 l -322.672 70.801 l h -322.672 70.801 m S Q -0.301961 0.654902 0.301961 rg -324.023 148.096 m 329.047 148.096 l 329.047 130.495 l 324.023 130.495 l -324.023 148.096 l h -324.023 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -324.023 18 m 329.047 18 l 329.047 35.602 l 324.023 35.602 l 324.023 18 -l h -324.023 18 m S Q -0.301961 0.654902 0.301961 rg -325.059 130.495 m 330.922 130.495 l 330.922 112.897 l 325.059 112.897 l -325.059 130.495 l h -325.059 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -325.059 35.602 m 330.922 35.602 l 330.922 53.199 l 325.059 53.199 l -325.059 35.602 l h -325.059 35.602 m S Q -0.301961 0.654902 0.301961 rg -325.434 165.698 m 331.176 165.698 l 331.176 148.096 l 325.434 148.096 l -325.434 165.698 l h -325.434 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -325.434 0.398 m 331.176 0.398 l 331.176 18 l 325.434 18 l 325.434 0.398 -l h -325.434 0.398 m S Q -0.301961 0.654902 0.301961 rg -326.559 112.897 m 332.18 112.897 l 332.18 95.295 l 326.559 95.295 l -326.559 112.897 l h -326.559 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -326.559 53.199 m 332.18 53.199 l 332.18 70.801 l 326.559 70.801 l -326.559 53.199 l h -326.559 53.199 m S Q -0.301961 0.654902 0.301961 rg -327.848 95.295 m 333.355 95.295 l 333.355 77.698 l 327.848 77.698 l -327.848 95.295 l h -327.848 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -327.848 70.801 m 333.355 70.801 l 333.355 88.398 l 327.848 88.398 l -327.848 70.801 l h -327.848 70.801 m S Q -0.301961 0.654902 0.301961 rg -329.055 148.096 m 333.527 148.096 l 333.527 130.495 l 329.055 130.495 l -329.055 148.096 l h -329.055 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -329.055 18 m 333.527 18 l 333.527 35.602 l 329.055 35.602 l 329.055 18 -l h -329.055 18 m S Q -0.301961 0.654902 0.301961 rg -330.93 130.495 m 336.262 130.495 l 336.262 112.897 l 330.93 112.897 l -330.93 130.495 l h -330.93 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -330.93 35.602 m 336.262 35.602 l 336.262 53.199 l 330.93 53.199 l -330.93 35.602 l h -330.93 35.602 m S Q -0.301961 0.654902 0.301961 rg -331.199 165.698 m 335.457 165.698 l 335.457 148.096 l 331.199 148.096 l -331.199 165.698 l h -331.199 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -331.199 0.398 m 335.457 0.398 l 335.457 18 l 331.199 18 l 331.199 0.398 -l h -331.199 0.398 m S Q -0.301961 0.654902 0.301961 rg -332.184 112.897 m 337.988 112.897 l 337.988 95.295 l 332.184 95.295 l -332.184 112.897 l h -332.184 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -332.184 53.199 m 337.988 53.199 l 337.988 70.801 l 332.184 70.801 l -332.184 53.199 l h -332.184 53.199 m S Q -0.301961 0.654902 0.301961 rg -333.359 95.295 m 338.301 95.295 l 338.301 77.698 l 333.359 77.698 l -333.359 95.295 l h -333.359 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -333.359 70.801 m 338.301 70.801 l 338.301 88.398 l 333.359 88.398 l -333.359 70.801 l h -333.359 70.801 m S Q -0.301961 0.654902 0.301961 rg -333.535 148.096 m 340.637 148.096 l 340.637 130.495 l 333.535 130.495 l -333.535 148.096 l h -333.535 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -333.535 18 m 340.637 18 l 340.637 35.602 l 333.535 35.602 l 333.535 18 -l h -333.535 18 m S Q -0.301961 0.654902 0.301961 rg -335.465 165.698 m 339.855 165.698 l 339.855 148.096 l 335.465 148.096 l -335.465 165.698 l h -335.465 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -335.465 0.398 m 339.855 0.398 l 339.855 18 l 335.465 18 l 335.465 0.398 -l h -335.465 0.398 m S Q -0.301961 0.654902 0.301961 rg -336.27 130.495 m 341.691 130.495 l 341.691 112.897 l 336.27 112.897 l -336.27 130.495 l h -336.27 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -336.27 35.602 m 341.691 35.602 l 341.691 53.199 l 336.27 53.199 l -336.27 35.602 l h -336.27 35.602 m S Q -0.301961 0.654902 0.301961 rg -337.992 112.897 m 343.98 112.897 l 343.98 95.295 l 337.992 95.295 l -337.992 112.897 l h -337.992 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -337.992 53.199 m 343.98 53.199 l 343.98 70.801 l 337.992 70.801 l -337.992 53.199 l h -337.992 53.199 m S Q -0.301961 0.654902 0.301961 rg -338.309 95.295 m 344.422 95.295 l 344.422 77.698 l 338.309 77.698 l -338.309 95.295 l h -338.309 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -338.309 70.801 m 344.422 70.801 l 344.422 88.398 l 338.309 88.398 l -338.309 70.801 l h -338.309 70.801 m S Q -0.301961 0.654902 0.301961 rg -339.859 165.698 m 343.223 165.698 l 343.223 148.096 l 339.859 148.096 l -339.859 165.698 l h -339.859 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -339.859 0.398 m 343.223 0.398 l 343.223 18 l 339.859 18 l 339.859 0.398 -l h -339.859 0.398 m S Q -0.301961 0.654902 0.301961 rg -340.641 148.096 m 346.348 148.096 l 346.348 130.495 l 340.641 130.495 l -340.641 148.096 l h -340.641 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -340.641 18 m 346.348 18 l 346.348 35.602 l 340.641 35.602 l 340.641 18 -l h -340.641 18 m S Q -0.301961 0.654902 0.301961 rg -341.695 130.495 m 346.918 130.495 l 346.918 112.897 l 341.695 112.897 l -341.695 130.495 l h -341.695 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -341.695 35.602 m 346.918 35.602 l 346.918 53.199 l 341.695 53.199 l -341.695 35.602 l h -341.695 35.602 m S Q -0.301961 0.654902 0.301961 rg -343.227 165.698 m 349.203 165.698 l 349.203 148.096 l 343.227 148.096 l -343.227 165.698 l h -343.227 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -343.227 0.398 m 349.203 0.398 l 349.203 18 l 343.227 18 l 343.227 0.398 -l h -343.227 0.398 m S Q -0.301961 0.654902 0.301961 rg -343.984 112.897 m 348.469 112.897 l 348.469 95.295 l 343.984 95.295 l -343.984 112.897 l h -343.984 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -343.984 53.199 m 348.469 53.199 l 348.469 70.801 l 343.984 70.801 l -343.984 53.199 l h -343.984 53.199 m S Q -0.301961 0.654902 0.301961 rg -344.426 95.295 m 349.871 95.295 l 349.871 77.698 l 344.426 77.698 l -344.426 95.295 l h -344.426 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -344.426 70.801 m 349.871 70.801 l 349.871 88.398 l 344.426 88.398 l -344.426 70.801 l h -344.426 70.801 m S Q -0.301961 0.654902 0.301961 rg -346.352 148.096 m 352.148 148.096 l 352.148 130.495 l 346.352 130.495 l -346.352 148.096 l h -346.352 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -346.352 18 m 352.148 18 l 352.148 35.602 l 346.352 35.602 l 346.352 18 -l h -346.352 18 m S Q -0.301961 0.654902 0.301961 rg -346.926 130.495 m 352.57 130.495 l 352.57 112.897 l 346.926 112.897 l -346.926 130.495 l h -346.926 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -346.926 35.602 m 352.57 35.602 l 352.57 53.199 l 346.926 53.199 l -346.926 35.602 l h -346.926 35.602 m S Q -0.301961 0.654902 0.301961 rg -348.477 112.897 m 353.129 112.897 l 353.129 95.295 l 348.477 95.295 l -348.477 112.897 l h -348.477 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -348.477 53.199 m 353.129 53.199 l 353.129 70.801 l 348.477 70.801 l -348.477 53.199 l h -348.477 53.199 m S Q -0.301961 0.654902 0.301961 rg -349.211 165.698 m 354.969 165.698 l 354.969 148.096 l 349.211 148.096 l -349.211 165.698 l h -349.211 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -349.211 0.398 m 354.969 0.398 l 354.969 18 l 349.211 18 l 349.211 0.398 -l h -349.211 0.398 m S Q -0.301961 0.654902 0.301961 rg -349.875 95.295 m 354.199 95.295 l 354.199 77.698 l 349.875 77.698 l -349.875 95.295 l h -349.875 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -349.875 70.801 m 354.199 70.801 l 354.199 88.398 l 349.875 88.398 l -349.875 70.801 l h -349.875 70.801 m S Q -0.301961 0.654902 0.301961 rg -352.152 148.096 m 357.5 148.096 l 357.5 130.495 l 352.152 130.495 l -352.152 148.096 l h -352.152 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -352.152 18 m 357.5 18 l 357.5 35.602 l 352.152 35.602 l 352.152 18 l h -352.152 18 m S Q -0.301961 0.654902 0.301961 rg -352.578 130.495 m 358.523 130.495 l 358.523 112.897 l 352.578 112.897 l -352.578 130.495 l h -352.578 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -352.578 35.602 m 358.523 35.602 l 358.523 53.199 l 352.578 53.199 l -352.578 35.602 l h -352.578 35.602 m S Q -0.301961 0.654902 0.301961 rg -353.137 112.897 m 357.941 112.897 l 357.941 95.295 l 353.137 95.295 l -353.137 112.897 l h -353.137 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -353.137 53.199 m 357.941 53.199 l 357.941 70.801 l 353.137 70.801 l -353.137 53.199 l h -353.137 53.199 m S Q -0.301961 0.654902 0.301961 rg -354.207 95.295 m 359.285 95.295 l 359.285 77.698 l 354.207 77.698 l -354.207 95.295 l h -354.207 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -354.207 70.801 m 359.285 70.801 l 359.285 88.398 l 354.207 88.398 l -354.207 70.801 l h -354.207 70.801 m S Q -0.301961 0.654902 0.301961 rg -354.973 165.698 m 360.238 165.698 l 360.238 148.096 l 354.973 148.096 l -354.973 165.698 l h -354.973 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -354.973 0.398 m 360.238 0.398 l 360.238 18 l 354.973 18 l 354.973 0.398 -l h -354.973 0.398 m S Q -0.301961 0.654902 0.301961 rg -357.504 148.096 m 361.523 148.096 l 361.523 130.495 l 357.504 130.495 l -357.504 148.096 l h -357.504 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -357.504 18 m 361.523 18 l 361.523 35.602 l 357.504 35.602 l 357.504 18 -l h -357.504 18 m S Q -0.301961 0.654902 0.301961 rg -357.949 112.897 m 363.43 112.897 l 363.43 95.295 l 357.949 95.295 l -357.949 112.897 l h -357.949 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -357.949 53.199 m 363.43 53.199 l 363.43 70.801 l 357.949 70.801 l -357.949 53.199 l h -357.949 53.199 m S Q -0.301961 0.654902 0.301961 rg -358.527 130.495 m 364.281 130.495 l 364.281 112.897 l 358.527 112.897 l -358.527 130.495 l h -358.527 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -358.527 35.602 m 364.281 35.602 l 364.281 53.199 l 358.527 53.199 l -358.527 35.602 l h -358.527 35.602 m S Q -0.301961 0.654902 0.301961 rg -359.289 95.295 m 364.566 95.295 l 364.566 77.698 l 359.289 77.698 l -359.289 95.295 l h -359.289 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -359.289 70.801 m 364.566 70.801 l 364.566 88.398 l 359.289 88.398 l -359.289 70.801 l h -359.289 70.801 m S Q -0.301961 0.654902 0.301961 rg -360.242 165.698 m 365.504 165.698 l 365.504 148.096 l 360.242 148.096 l -360.242 165.698 l h -360.242 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -360.242 0.398 m 365.504 0.398 l 365.504 18 l 360.242 18 l 360.242 0.398 -l h -360.242 0.398 m S Q -0.301961 0.654902 0.301961 rg -361.527 148.096 m 367.316 148.096 l 367.316 130.495 l 361.527 130.495 l -361.527 148.096 l h -361.527 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -361.527 18 m 367.316 18 l 367.316 35.602 l 361.527 35.602 l 361.527 18 -l h -361.527 18 m S Q -0.301961 0.654902 0.301961 rg -363.434 112.897 m 369.348 112.897 l 369.348 95.295 l 363.434 95.295 l -363.434 112.897 l h -363.434 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -363.434 53.199 m 369.348 53.199 l 369.348 70.801 l 363.434 70.801 l -363.434 53.199 l h -363.434 53.199 m S Q -0.301961 0.654902 0.301961 rg -364.289 130.495 m 369.926 130.495 l 369.926 112.897 l 364.289 112.897 l -364.289 130.495 l h -364.289 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -364.289 35.602 m 369.926 35.602 l 369.926 53.199 l 364.289 53.199 l -364.289 35.602 l h -364.289 35.602 m S Q -0.301961 0.654902 0.301961 rg -364.582 95.295 m 370.09 95.295 l 370.09 77.698 l 364.582 77.698 l -364.582 95.295 l h -364.582 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -364.582 70.801 m 370.09 70.801 l 370.09 88.398 l 364.582 88.398 l -364.582 70.801 l h -364.582 70.801 m S Q -0.301961 0.654902 0.301961 rg -365.508 165.698 m 370.93 165.698 l 370.93 148.096 l 365.508 148.096 l -365.508 165.698 l h -365.508 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -365.508 0.398 m 370.93 0.398 l 370.93 18 l 365.508 18 l 365.508 0.398 l -h -365.508 0.398 m S Q -0.301961 0.654902 0.301961 rg -367.32 148.096 m 372.34 148.096 l 372.34 130.495 l 367.32 130.495 l -367.32 148.096 l h -367.32 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -367.32 18 m 372.34 18 l 372.34 35.602 l 367.32 35.602 l 367.32 18 l h -367.32 18 m S Q -0.301961 0.654902 0.301961 rg -369.352 112.897 m 374.969 112.897 l 374.969 95.295 l 369.352 95.295 l -369.352 112.897 l h -369.352 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -369.352 53.199 m 374.969 53.199 l 374.969 70.801 l 369.352 70.801 l -369.352 53.199 l h -369.352 53.199 m S Q -0.301961 0.654902 0.301961 rg -369.938 130.495 m 374.996 130.495 l 374.996 112.897 l 369.938 112.897 l -369.938 130.495 l h -369.938 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -369.938 35.602 m 374.996 35.602 l 374.996 53.199 l 369.938 53.199 l -369.938 35.602 l h -369.938 35.602 m S Q -0.301961 0.654902 0.301961 rg -370.105 95.295 m 374.051 95.295 l 374.051 77.698 l 370.105 77.698 l -370.105 95.295 l h -370.105 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -370.105 70.801 m 374.051 70.801 l 374.051 88.398 l 370.105 88.398 l -370.105 70.801 l h -370.105 70.801 m S Q -0.301961 0.654902 0.301961 rg -370.938 165.698 m 375.922 165.698 l 375.922 148.096 l 370.938 148.096 l -370.938 165.698 l h -370.938 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -370.938 0.398 m 375.922 0.398 l 375.922 18 l 370.938 18 l 370.938 0.398 -l h -370.938 0.398 m S Q -0.301961 0.654902 0.301961 rg -372.348 148.096 m 376.801 148.096 l 376.801 130.495 l 372.348 130.495 l -372.348 148.096 l h -372.348 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -372.348 18 m 376.801 18 l 376.801 35.602 l 372.348 35.602 l 372.348 18 -l h -372.348 18 m S Q -0.301961 0.654902 0.301961 rg -374.055 95.295 m 379.402 95.295 l 379.402 77.698 l 374.055 77.698 l -374.055 95.295 l h -374.055 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -374.055 70.801 m 379.402 70.801 l 379.402 88.398 l 374.055 88.398 l -374.055 70.801 l h -374.055 70.801 m S Q -0.301961 0.654902 0.301961 rg -374.973 112.897 m 381.484 112.897 l 381.484 95.295 l 374.973 95.295 l -374.973 112.897 l h -374.973 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -374.973 53.199 m 381.484 53.199 l 381.484 70.801 l 374.973 70.801 l -374.973 53.199 l h -374.973 53.199 m S Q -0.301961 0.654902 0.301961 rg -375 130.495 m 381.34 130.495 l 381.34 112.897 l 375 112.897 l 375 -130.495 l h -375 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -375 35.602 m 381.34 35.602 l 381.34 53.199 l 375 53.199 l 375 35.602 l -h -375 35.602 m S Q -0.301961 0.654902 0.301961 rg -375.926 165.698 m 382.609 165.698 l 382.609 148.096 l 375.926 148.096 l -375.926 165.698 l h -375.926 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -375.926 0.398 m 382.609 0.398 l 382.609 18 l 375.926 18 l 375.926 0.398 -l h -375.926 0.398 m S Q -0.301961 0.654902 0.301961 rg -376.805 148.096 m 380.543 148.096 l 380.543 130.495 l 376.805 130.495 l -376.805 148.096 l h -376.805 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -376.805 18 m 380.543 18 l 380.543 35.602 l 376.805 35.602 l 376.805 18 -l h -376.805 18 m S Q -0.301961 0.654902 0.301961 rg -379.406 95.295 m 383.664 95.295 l 383.664 77.698 l 379.406 77.698 l -379.406 95.295 l h -379.406 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -379.406 70.801 m 383.664 70.801 l 383.664 88.398 l 379.406 88.398 l -379.406 70.801 l h -379.406 70.801 m S Q -0.301961 0.654902 0.301961 rg -380.551 148.096 m 384.777 148.096 l 384.777 130.495 l 380.551 130.495 l -380.551 148.096 l h -380.551 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -380.551 18 m 384.777 18 l 384.777 35.602 l 380.551 35.602 l 380.551 18 -l h -380.551 18 m S Q -0.301961 0.654902 0.301961 rg -381.344 130.495 m 387.941 130.495 l 387.941 112.897 l 381.344 112.897 l -381.344 130.495 l h -381.344 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -381.344 35.602 m 387.941 35.602 l 387.941 53.199 l 381.344 53.199 l -381.344 35.602 l h -381.344 35.602 m S Q -0.301961 0.654902 0.301961 rg -381.5 112.897 m 387.762 112.897 l 387.762 95.295 l 381.5 95.295 l 381.5 -112.897 l h -381.5 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -381.5 53.199 m 387.762 53.199 l 387.762 70.801 l 381.5 70.801 l 381.5 -53.199 l h -381.5 53.199 m S Q -0.301961 0.654902 0.301961 rg -382.617 165.698 m 386.82 165.698 l 386.82 148.096 l 382.617 148.096 l -382.617 165.698 l h -382.617 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -382.617 0.398 m 386.82 0.398 l 386.82 18 l 382.617 18 l 382.617 0.398 l -h -382.617 0.398 m S Q -0.301961 0.654902 0.301961 rg -383.668 95.295 m 389.879 95.295 l 389.879 77.698 l 383.668 77.698 l -383.668 95.295 l h -383.668 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -383.668 70.801 m 389.879 70.801 l 389.879 88.398 l 383.668 88.398 l -383.668 70.801 l h -383.668 70.801 m S Q -0.301961 0.654902 0.301961 rg -384.781 148.096 m 389.156 148.096 l 389.156 130.495 l 384.781 130.495 l -384.781 148.096 l h -384.781 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -384.781 18 m 389.156 18 l 389.156 35.602 l 384.781 35.602 l 384.781 18 -l h -384.781 18 m S Q -0.301961 0.654902 0.301961 rg -386.824 165.698 m 391.402 165.698 l 391.402 148.096 l 386.824 148.096 l -386.824 165.698 l h -386.824 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -386.824 0.398 m 391.402 0.398 l 391.402 18 l 386.824 18 l 386.824 0.398 -l h -386.824 0.398 m S Q -0.301961 0.654902 0.301961 rg -387.77 112.897 m 392.789 112.897 l 392.789 95.295 l 387.77 95.295 l -387.77 112.897 l h -387.77 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -387.77 53.199 m 392.789 53.199 l 392.789 70.801 l 387.77 70.801 l -387.77 53.199 l h -387.77 53.199 m S Q -0.301961 0.654902 0.301961 rg -387.953 130.495 m 392.648 130.495 l 392.648 112.897 l 387.953 112.897 l -387.953 130.495 l h -387.953 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -387.953 35.602 m 392.648 35.602 l 392.648 53.199 l 387.953 53.199 l -387.953 35.602 l h -387.953 35.602 m S Q -0.301961 0.654902 0.301961 rg -389.16 148.096 m 396.477 148.096 l 396.477 130.495 l 389.16 130.495 l -389.16 148.096 l h -389.16 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -389.16 18 m 396.477 18 l 396.477 35.602 l 389.16 35.602 l 389.16 18 l h -389.16 18 m S Q -0.301961 0.654902 0.301961 rg -389.883 95.295 m 395.508 95.295 l 395.508 77.698 l 389.883 77.698 l -389.883 95.295 l h -389.883 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -389.883 70.801 m 395.508 70.801 l 395.508 88.398 l 389.883 88.398 l -389.883 70.801 l h -389.883 70.801 m S Q -0.301961 0.654902 0.301961 rg -391.41 165.698 m 396.699 165.698 l 396.699 148.096 l 391.41 148.096 l -391.41 165.698 l h -391.41 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -391.41 0.398 m 396.699 0.398 l 396.699 18 l 391.41 18 l 391.41 0.398 l -h -391.41 0.398 m S Q -0.301961 0.654902 0.301961 rg -392.656 130.495 m 398.441 130.495 l 398.441 112.897 l 392.656 112.897 l -392.656 130.495 l h -392.656 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -392.656 35.602 m 398.441 35.602 l 398.441 53.199 l 392.656 53.199 l -392.656 35.602 l h -392.656 35.602 m S Q -0.301961 0.654902 0.301961 rg -392.809 112.897 m 396.27 112.897 l 396.27 95.295 l 392.809 95.295 l -392.809 112.897 l h -392.809 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -392.809 53.199 m 396.27 53.199 l 396.27 70.801 l 392.809 70.801 l -392.809 53.199 l h -392.809 53.199 m S Q -0.301961 0.654902 0.301961 rg -395.512 95.295 m 399.785 95.295 l 399.785 77.698 l 395.512 77.698 l -395.512 95.295 l h -395.512 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -395.512 70.801 m 399.785 70.801 l 399.785 88.398 l 395.512 88.398 l -395.512 70.801 l h -395.512 70.801 m S Q -0.301961 0.654902 0.301961 rg -396.273 112.897 m 401.012 112.897 l 401.012 95.295 l 396.273 95.295 l -396.273 112.897 l h -396.273 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -396.273 53.199 m 401.012 53.199 l 401.012 70.801 l 396.273 70.801 l -396.273 53.199 l h -396.273 53.199 m S Q -0.301961 0.654902 0.301961 rg -396.605 148.096 m 402.613 148.096 l 402.613 130.495 l 396.605 130.495 l -396.605 148.096 l h -396.605 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -396.605 18 m 402.613 18 l 402.613 35.602 l 396.605 35.602 l 396.605 18 -l h -396.605 18 m S Q -0.301961 0.654902 0.301961 rg -396.703 165.698 m 402.613 165.698 l 402.613 148.096 l 396.703 148.096 l -396.703 165.698 l h -396.703 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -396.703 0.398 m 402.613 0.398 l 402.613 18 l 396.703 18 l 396.703 0.398 -l h -396.703 0.398 m S Q -0.301961 0.654902 0.301961 rg -398.445 130.495 m 403.062 130.495 l 403.062 112.897 l 398.445 112.897 l -398.445 130.495 l h -398.445 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -398.445 35.602 m 403.062 35.602 l 403.062 53.199 l 398.445 53.199 l -398.445 35.602 l h -398.445 35.602 m S Q -0.301961 0.654902 0.301961 rg -399.793 95.295 m 405.102 95.295 l 405.102 77.698 l 399.793 77.698 l -399.793 95.295 l h -399.793 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -399.793 70.801 m 405.102 70.801 l 405.102 88.398 l 399.793 88.398 l -399.793 70.801 l h -399.793 70.801 m S Q -0.301961 0.654902 0.301961 rg -401.02 112.897 m 406.348 112.897 l 406.348 95.295 l 401.02 95.295 l -401.02 112.897 l h -401.02 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -401.02 53.199 m 406.348 53.199 l 406.348 70.801 l 401.02 70.801 l -401.02 53.199 l h -401.02 53.199 m S Q -0.301961 0.654902 0.301961 rg -402.621 165.698 m 409.629 165.698 l 409.629 148.096 l 402.621 148.096 l -402.621 165.698 l h -402.621 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -402.621 0.398 m 409.629 0.398 l 409.629 18 l 402.621 18 l 402.621 0.398 -l h -402.621 0.398 m S Q -0.301961 0.654902 0.301961 rg -402.715 148.096 m 407.617 148.096 l 407.617 130.495 l 402.715 130.495 l -402.715 148.096 l h -402.715 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -402.715 18 m 407.617 18 l 407.617 35.602 l 402.715 35.602 l 402.715 18 -l h -402.715 18 m S Q -0.301961 0.654902 0.301961 rg -403.07 130.495 m 409.461 130.495 l 409.461 112.897 l 403.07 112.897 l -403.07 130.495 l h -403.07 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -403.07 35.602 m 409.461 35.602 l 409.461 53.199 l 403.07 53.199 l -403.07 35.602 l h -403.07 35.602 m S Q -0.301961 0.654902 0.301961 rg -405.105 95.295 m 410.918 95.295 l 410.918 77.698 l 405.105 77.698 l -405.105 95.295 l h -405.105 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -405.105 70.801 m 410.918 70.801 l 410.918 88.398 l 405.105 88.398 l -405.105 70.801 l h -405.105 70.801 m S Q -0.301961 0.654902 0.301961 rg -406.352 112.897 m 412.539 112.897 l 412.539 95.295 l 406.352 95.295 l -406.352 112.897 l h -406.352 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -406.352 53.199 m 412.539 53.199 l 412.539 70.801 l 406.352 70.801 l -406.352 53.199 l h -406.352 53.199 m S Q -0.301961 0.654902 0.301961 rg -407.621 148.096 m 415.152 148.096 l 415.152 130.495 l 407.621 130.495 l -407.621 148.096 l h -407.621 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -407.621 18 m 415.152 18 l 415.152 35.602 l 407.621 35.602 l 407.621 18 -l h -407.621 18 m S Q -0.301961 0.654902 0.301961 rg -409.469 130.495 m 415.945 130.495 l 415.945 112.897 l 409.469 112.897 l -409.469 130.495 l h -409.469 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -409.469 35.602 m 415.945 35.602 l 415.945 53.199 l 409.469 53.199 l -409.469 35.602 l h -409.469 35.602 m S Q -0.301961 0.654902 0.301961 rg -409.648 165.698 m 416.27 165.698 l 416.27 148.096 l 409.648 148.096 l -409.648 165.698 l h -409.648 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -409.648 0.398 m 416.27 0.398 l 416.27 18 l 409.648 18 l 409.648 0.398 l -h -409.648 0.398 m S Q -0.301961 0.654902 0.301961 rg -410.926 95.295 m 416.762 95.295 l 416.762 77.698 l 410.926 77.698 l -410.926 95.295 l h -410.926 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -410.926 70.801 m 416.762 70.801 l 416.762 88.398 l 410.926 88.398 l -410.926 70.801 l h -410.926 70.801 m S Q -0.301961 0.654902 0.301961 rg -412.543 112.897 m 417.902 112.897 l 417.902 95.295 l 412.543 95.295 l -412.543 112.897 l h -412.543 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -412.543 53.199 m 417.902 53.199 l 417.902 70.801 l 412.543 70.801 l -412.543 53.199 l h -412.543 53.199 m S Q -0.301961 0.654902 0.301961 rg -415.156 148.096 m 421.727 148.096 l 421.727 130.495 l 415.156 130.495 l -415.156 148.096 l h -415.156 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -415.156 18 m 421.727 18 l 421.727 35.602 l 415.156 35.602 l 415.156 18 -l h -415.156 18 m S Q -0.301961 0.654902 0.301961 rg -415.953 130.495 m 421.102 130.495 l 421.102 112.897 l 415.953 112.897 l -415.953 130.495 l h -415.953 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -415.953 35.602 m 421.102 35.602 l 421.102 53.199 l 415.953 53.199 l -415.953 35.602 l h -415.953 35.602 m S Q -0.301961 0.654902 0.301961 rg -416.289 165.698 m 421.883 165.698 l 421.883 148.096 l 416.289 148.096 l -416.289 165.698 l h -416.289 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -416.289 0.398 m 421.883 0.398 l 421.883 18 l 416.289 18 l 416.289 0.398 -l h -416.289 0.398 m S Q -0.301961 0.654902 0.301961 rg -416.777 95.295 m 422.227 95.295 l 422.227 77.698 l 416.777 77.698 l -416.777 95.295 l h -416.777 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -416.777 70.801 m 422.227 70.801 l 422.227 88.398 l 416.777 88.398 l -416.777 70.801 l h -416.777 70.801 m S Q -0.301961 0.654902 0.301961 rg -417.906 112.897 m 423.371 112.897 l 423.371 95.295 l 417.906 95.295 l -417.906 112.897 l h -417.906 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -417.906 53.199 m 423.371 53.199 l 423.371 70.801 l 417.906 70.801 l -417.906 53.199 l h -417.906 53.199 m S Q -0.301961 0.654902 0.301961 rg -421.109 130.495 m 426.191 130.495 l 426.191 112.897 l 421.109 112.897 l -421.109 130.495 l h -421.109 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -421.109 35.602 m 426.191 35.602 l 426.191 53.199 l 421.109 53.199 l -421.109 35.602 l h -421.109 35.602 m S Q -0.301961 0.654902 0.301961 rg -421.734 148.096 m 426.789 148.096 l 426.789 130.495 l 421.734 130.495 l -421.734 148.096 l h -421.734 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -421.734 18 m 426.789 18 l 426.789 35.602 l 421.734 35.602 l 421.734 18 -l h -421.734 18 m S Q -0.301961 0.654902 0.301961 rg -421.898 165.698 m 427.605 165.698 l 427.605 148.096 l 421.898 148.096 l -421.898 165.698 l h -421.898 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -421.898 0.398 m 427.605 0.398 l 427.605 18 l 421.898 18 l 421.898 0.398 -l h -421.898 0.398 m S Q -0.301961 0.654902 0.301961 rg -422.234 95.295 m 427.234 95.295 l 427.234 77.698 l 422.234 77.698 l -422.234 95.295 l h -422.234 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -422.234 70.801 m 427.234 70.801 l 427.234 88.398 l 422.234 88.398 l -422.234 70.801 l h -422.234 70.801 m S Q -0.301961 0.654902 0.301961 rg -423.379 112.897 m 428.297 112.897 l 428.297 95.295 l 423.379 95.295 l -423.379 112.897 l h -423.379 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -423.379 53.199 m 428.297 53.199 l 428.297 70.801 l 423.379 70.801 l -423.379 53.199 l h -423.379 53.199 m S Q -0.301961 0.654902 0.301961 rg -426.195 130.495 m 432.477 130.495 l 432.477 112.897 l 426.195 112.897 l -426.195 130.495 l h -426.195 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -426.195 35.602 m 432.477 35.602 l 432.477 53.199 l 426.195 53.199 l -426.195 35.602 l h -426.195 35.602 m S Q -0.301961 0.654902 0.301961 rg -426.793 148.096 m 431.66 148.096 l 431.66 130.495 l 426.793 130.495 l -426.793 148.096 l h -426.793 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -426.793 18 m 431.66 18 l 431.66 35.602 l 426.793 35.602 l 426.793 18 l -h -426.793 18 m S Q -0.301961 0.654902 0.301961 rg -427.242 95.295 m 431.309 95.295 l 431.309 77.698 l 427.242 77.698 l -427.242 95.295 l h -427.242 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -427.242 70.801 m 431.309 70.801 l 431.309 88.398 l 427.242 88.398 l -427.242 70.801 l h -427.242 70.801 m S Q -0.301961 0.654902 0.301961 rg -427.613 165.698 m 432.746 165.698 l 432.746 148.096 l 427.613 148.096 l -427.613 165.698 l h -427.613 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -427.613 0.398 m 432.746 0.398 l 432.746 18 l 427.613 18 l 427.613 0.398 -l h -427.613 0.398 m S Q -0.301961 0.654902 0.301961 rg -428.301 112.897 m 433.621 112.897 l 433.621 95.295 l 428.301 95.295 l -428.301 112.897 l h -428.301 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -428.301 53.199 m 433.621 53.199 l 433.621 70.801 l 428.301 70.801 l -428.301 53.199 l h -428.301 53.199 m S Q -0.301961 0.654902 0.301961 rg -431.312 95.295 m 437.934 95.295 l 437.934 77.698 l 431.312 77.698 l -431.312 95.295 l h -431.312 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -431.312 70.801 m 437.934 70.801 l 437.934 88.398 l 431.312 88.398 l -431.312 70.801 l h -431.312 70.801 m S Q -0.301961 0.654902 0.301961 rg -431.664 148.096 m 436.934 148.096 l 436.934 130.495 l 431.664 130.495 l -431.664 148.096 l h -431.664 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -431.664 18 m 436.934 18 l 436.934 35.602 l 431.664 35.602 l 431.664 18 -l h -431.664 18 m S Q -0.301961 0.654902 0.301961 rg -432.48 130.495 m 438.719 130.495 l 438.719 112.897 l 432.48 112.897 l -432.48 130.495 l h -432.48 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -432.48 35.602 m 438.719 35.602 l 438.719 53.199 l 432.48 53.199 l -432.48 35.602 l h -432.48 35.602 m S Q -0.301961 0.654902 0.301961 rg -432.754 165.698 m 437.305 165.698 l 437.305 148.096 l 432.754 148.096 l -432.754 165.698 l h -432.754 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -432.754 0.398 m 437.305 0.398 l 437.305 18 l 432.754 18 l 432.754 0.398 -l h -432.754 0.398 m S Q -0.301961 0.654902 0.301961 rg -433.625 112.897 m 438.348 112.897 l 438.348 95.295 l 433.625 95.295 l -433.625 112.897 l h -433.625 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -433.625 53.199 m 438.348 53.199 l 438.348 70.801 l 433.625 70.801 l -433.625 53.199 l h -433.625 53.199 m S Q -0.301961 0.654902 0.301961 rg -436.938 148.096 m 440.961 148.096 l 440.961 130.495 l 436.938 130.495 l -436.938 148.096 l h -436.938 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -436.938 18 m 440.961 18 l 440.961 35.602 l 436.938 35.602 l 436.938 18 -l h -436.938 18 m S Q -0.301961 0.654902 0.301961 rg -437.312 165.698 m 441.641 165.698 l 441.641 148.096 l 437.312 148.096 l -437.312 165.698 l h -437.312 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -437.312 0.398 m 441.641 0.398 l 441.641 18 l 437.312 18 l 437.312 0.398 -l h -437.312 0.398 m S Q -0.301961 0.654902 0.301961 rg -437.938 95.295 m 443.746 95.295 l 443.746 77.698 l 437.938 77.698 l -437.938 95.295 l h -437.938 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -437.938 70.801 m 443.746 70.801 l 443.746 88.398 l 437.938 88.398 l -437.938 70.801 l h -437.938 70.801 m S Q -0.301961 0.654902 0.301961 rg -438.355 112.897 m 443.367 112.897 l 443.367 95.295 l 438.355 95.295 l -438.355 112.897 l h -438.355 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -438.355 53.199 m 443.367 53.199 l 443.367 70.801 l 438.355 70.801 l -438.355 53.199 l h -438.355 53.199 m S Q -0.301961 0.654902 0.301961 rg -438.723 130.495 m 444.152 130.495 l 444.152 112.897 l 438.723 112.897 l -438.723 130.495 l h -438.723 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -438.723 35.602 m 444.152 35.602 l 444.152 53.199 l 438.723 53.199 l -438.723 35.602 l h -438.723 35.602 m S Q -0.301961 0.654902 0.301961 rg -440.965 148.096 m 446.418 148.096 l 446.418 130.495 l 440.965 130.495 l -440.965 148.096 l h -440.965 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -440.965 18 m 446.418 18 l 446.418 35.602 l 440.965 35.602 l 440.965 18 -l h -440.965 18 m S Q -0.301961 0.654902 0.301961 rg -441.648 165.698 m 446.695 165.698 l 446.695 148.096 l 441.648 148.096 l -441.648 165.698 l h -441.648 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -441.648 0.398 m 446.695 0.398 l 446.695 18 l 441.648 18 l 441.648 0.398 -l h -441.648 0.398 m S Q -0.301961 0.654902 0.301961 rg -443.375 112.897 m 449.516 112.897 l 449.516 95.295 l 443.375 95.295 l -443.375 112.897 l h -443.375 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -443.375 53.199 m 449.516 53.199 l 449.516 70.801 l 443.375 70.801 l -443.375 53.199 l h -443.375 53.199 m S Q -0.301961 0.654902 0.301961 rg -443.77 95.295 m 447.605 95.295 l 447.605 77.698 l 443.77 77.698 l -443.77 95.295 l h -443.77 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -443.77 70.801 m 447.605 70.801 l 447.605 88.398 l 443.77 88.398 l -443.77 70.801 l h -443.77 70.801 m S Q -0.301961 0.654902 0.301961 rg -444.195 130.495 m 450.383 130.495 l 450.383 112.897 l 444.195 112.897 l -444.195 130.495 l h -444.195 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -444.195 35.602 m 450.383 35.602 l 450.383 53.199 l 444.195 53.199 l -444.195 35.602 l h -444.195 35.602 m S Q -0.301961 0.654902 0.301961 rg -446.422 148.096 m 450.418 148.096 l 450.418 130.495 l 446.422 130.495 l -446.422 148.096 l h -446.422 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -446.422 18 m 450.418 18 l 450.418 35.602 l 446.422 35.602 l 446.422 18 -l h -446.422 18 m S Q -0.301961 0.654902 0.301961 rg -446.703 165.698 m 451.699 165.698 l 451.699 148.096 l 446.703 148.096 l -446.703 165.698 l h -446.703 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -446.703 0.398 m 451.699 0.398 l 451.699 18 l 446.703 18 l 446.703 0.398 -l h -446.703 0.398 m S Q -0.301961 0.654902 0.301961 rg -447.609 95.295 m 453.434 95.295 l 453.434 77.698 l 447.609 77.698 l -447.609 95.295 l h -447.609 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -447.609 70.801 m 453.434 70.801 l 453.434 88.398 l 447.609 88.398 l -447.609 70.801 l h -447.609 70.801 m S Q -0.301961 0.654902 0.301961 rg -449.52 112.897 m 454.148 112.897 l 454.148 95.295 l 449.52 95.295 l -449.52 112.897 l h -449.52 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -449.52 53.199 m 454.148 53.199 l 454.148 70.801 l 449.52 70.801 l -449.52 53.199 l h -449.52 53.199 m S Q -0.301961 0.654902 0.301961 rg -450.391 130.495 m 456.773 130.495 l 456.773 112.897 l 450.391 112.897 l -450.391 130.495 l h -450.391 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -450.391 35.602 m 456.773 35.602 l 456.773 53.199 l 450.391 53.199 l -450.391 35.602 l h -450.391 35.602 m S Q -0.301961 0.654902 0.301961 rg -450.652 148.096 m 455.562 148.096 l 455.562 130.495 l 450.652 130.495 l -450.652 148.096 l h -450.652 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -450.652 18 m 455.562 18 l 455.562 35.602 l 450.652 35.602 l 450.652 18 -l h -450.652 18 m S Q -0.301961 0.654902 0.301961 rg -451.703 165.698 m 457.469 165.698 l 457.469 148.096 l 451.703 148.096 l -451.703 165.698 l h -451.703 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -451.703 0.398 m 457.469 0.398 l 457.469 18 l 451.703 18 l 451.703 0.398 -l h -451.703 0.398 m S Q -0.301961 0.654902 0.301961 rg -453.438 95.295 m 458.297 95.295 l 458.297 77.698 l 453.438 77.698 l -453.438 95.295 l h -453.438 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -453.438 70.801 m 458.297 70.801 l 458.297 88.398 l 453.438 88.398 l -453.438 70.801 l h -453.438 70.801 m S Q -0.301961 0.654902 0.301961 rg -454.152 112.897 m 459.422 112.897 l 459.422 95.295 l 454.152 95.295 l -454.152 112.897 l h -454.152 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -454.152 53.199 m 459.422 53.199 l 459.422 70.801 l 454.152 70.801 l -454.152 53.199 l h -454.152 53.199 m S Q -0.301961 0.654902 0.301961 rg -455.566 148.096 m 461.734 148.096 l 461.734 130.495 l 455.566 130.495 l -455.566 148.096 l h -455.566 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -455.566 18 m 461.734 18 l 461.734 35.602 l 455.566 35.602 l 455.566 18 -l h -455.566 18 m S Q -0.301961 0.654902 0.301961 rg -456.781 130.495 m 461.309 130.495 l 461.309 112.897 l 456.781 112.897 l -456.781 130.495 l h -456.781 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -456.781 35.602 m 461.309 35.602 l 461.309 53.199 l 456.781 53.199 l -456.781 35.602 l h -456.781 35.602 m S Q -0.301961 0.654902 0.301961 rg -457.473 165.698 m 462.91 165.698 l 462.91 148.096 l 457.473 148.096 l -457.473 165.698 l h -457.473 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -457.473 0.398 m 462.91 0.398 l 462.91 18 l 457.473 18 l 457.473 0.398 l -h -457.473 0.398 m S Q -0.301961 0.654902 0.301961 rg -458.301 95.295 m 464.242 95.295 l 464.242 77.698 l 458.301 77.698 l -458.301 95.295 l h -458.301 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -458.301 70.801 m 464.242 70.801 l 464.242 88.398 l 458.301 88.398 l -458.301 70.801 l h -458.301 70.801 m S Q -0.301961 0.654902 0.301961 rg -459.426 112.897 m 463.914 112.897 l 463.914 95.295 l 459.426 95.295 l -459.426 112.897 l h -459.426 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -459.426 53.199 m 463.914 53.199 l 463.914 70.801 l 459.426 70.801 l -459.426 53.199 l h -459.426 53.199 m S Q -0.301961 0.654902 0.301961 rg -461.316 130.495 m 466.895 130.495 l 466.895 112.897 l 461.316 112.897 l -461.316 130.495 l h -461.316 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -461.316 35.602 m 466.895 35.602 l 466.895 53.199 l 461.316 53.199 l -461.316 35.602 l h -461.316 35.602 m S Q -0.301961 0.654902 0.301961 rg -461.742 148.096 m 466 148.096 l 466 130.495 l 461.742 130.495 l 461.742 -148.096 l h -461.742 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -461.742 18 m 466 18 l 466 35.602 l 461.742 35.602 l 461.742 18 l h -461.742 18 m S Q -0.301961 0.654902 0.301961 rg -462.918 165.698 m 470.66 165.698 l 470.66 148.096 l 462.918 148.096 l -462.918 165.698 l h -462.918 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -462.918 0.398 m 470.66 0.398 l 470.66 18 l 462.918 18 l 462.918 0.398 l -h -462.918 0.398 m S Q -0.301961 0.654902 0.301961 rg -463.918 112.897 m 468.359 112.897 l 468.359 95.295 l 463.918 95.295 l -463.918 112.897 l h -463.918 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -463.918 53.199 m 468.359 53.199 l 468.359 70.801 l 463.918 70.801 l -463.918 53.199 l h -463.918 53.199 m S Q -0.301961 0.654902 0.301961 rg -464.25 95.295 m 469.75 95.295 l 469.75 77.698 l 464.25 77.698 l 464.25 -95.295 l h -464.25 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -464.25 70.801 m 469.75 70.801 l 469.75 88.398 l 464.25 88.398 l 464.25 -70.801 l h -464.25 70.801 m S Q -0.301961 0.654902 0.301961 rg -466.004 148.096 m 470.199 148.096 l 470.199 130.495 l 466.004 130.495 l -466.004 148.096 l h -466.004 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -466.004 18 m 470.199 18 l 470.199 35.602 l 466.004 35.602 l 466.004 18 -l h -466.004 18 m S Q -0.301961 0.654902 0.301961 rg -466.898 130.495 m 471.715 130.495 l 471.715 112.897 l 466.898 112.897 l -466.898 130.495 l h -466.898 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -466.898 35.602 m 471.715 35.602 l 471.715 53.199 l 466.898 53.199 l -466.898 35.602 l h -466.898 35.602 m S Q -0.301961 0.654902 0.301961 rg -468.367 112.897 m 474.105 112.897 l 474.105 95.295 l 468.367 95.295 l -468.367 112.897 l h -468.367 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -468.367 53.199 m 474.105 53.199 l 474.105 70.801 l 468.367 70.801 l -468.367 53.199 l h -468.367 53.199 m S Q -0.301961 0.654902 0.301961 rg -469.758 95.295 m 474.609 95.295 l 474.609 77.698 l 469.758 77.698 l -469.758 95.295 l h -469.758 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -469.758 70.801 m 474.609 70.801 l 474.609 88.398 l 469.758 88.398 l -469.758 70.801 l h -469.758 70.801 m S Q -0.301961 0.654902 0.301961 rg -470.207 148.096 m 475.824 148.096 l 475.824 130.495 l 470.207 130.495 l -470.207 148.096 l h -470.207 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -470.207 18 m 475.824 18 l 475.824 35.602 l 470.207 35.602 l 470.207 18 -l h -470.207 18 m S Q -0.301961 0.654902 0.301961 rg -470.664 165.698 m 475.707 165.698 l 475.707 148.096 l 470.664 148.096 l -470.664 165.698 l h -470.664 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -470.664 0.398 m 475.707 0.398 l 475.707 18 l 470.664 18 l 470.664 0.398 -l h -470.664 0.398 m S Q -0.301961 0.654902 0.301961 rg -471.719 130.495 m 477.945 130.495 l 477.945 112.897 l 471.719 112.897 l -471.719 130.495 l h -471.719 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -471.719 35.602 m 477.945 35.602 l 477.945 53.199 l 471.719 53.199 l -471.719 35.602 l h -471.719 35.602 m S Q -0.301961 0.654902 0.301961 rg -474.113 112.897 m 478.902 112.897 l 478.902 95.295 l 474.113 95.295 l -474.113 112.897 l h -474.113 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -474.113 53.199 m 478.902 53.199 l 478.902 70.801 l 474.113 70.801 l -474.113 53.199 l h -474.113 53.199 m S Q -0.301961 0.654902 0.301961 rg -474.617 95.295 m 479.512 95.295 l 479.512 77.698 l 474.617 77.698 l -474.617 95.295 l h -474.617 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -474.617 70.801 m 479.512 70.801 l 479.512 88.398 l 474.617 88.398 l -474.617 70.801 l h -474.617 70.801 m S Q -0.301961 0.654902 0.301961 rg -475.711 165.698 m 482.023 165.698 l 482.023 148.096 l 475.711 148.096 l -475.711 165.698 l h -475.711 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -475.711 0.398 m 482.023 0.398 l 482.023 18 l 475.711 18 l 475.711 0.398 -l h -475.711 0.398 m S Q -0.301961 0.654902 0.301961 rg -475.828 148.096 m 480.551 148.096 l 480.551 130.495 l 475.828 130.495 l -475.828 148.096 l h -475.828 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -475.828 18 m 480.551 18 l 480.551 35.602 l 475.828 35.602 l 475.828 18 -l h -475.828 18 m S Q -0.301961 0.654902 0.301961 rg -477.949 130.495 m 482.918 130.495 l 482.918 112.897 l 477.949 112.897 l -477.949 130.495 l h -477.949 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -477.949 35.602 m 482.918 35.602 l 482.918 53.199 l 477.949 53.199 l -477.949 35.602 l h -477.949 35.602 m S Q -0.301961 0.654902 0.301961 rg -478.906 112.897 m 484.98 112.897 l 484.98 95.295 l 478.906 95.295 l -478.906 112.897 l h -478.906 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -478.906 53.199 m 484.98 53.199 l 484.98 70.801 l 478.906 70.801 l -478.906 53.199 l h -478.906 53.199 m S Q -0.301961 0.654902 0.301961 rg -479.52 95.295 m 484.66 95.295 l 484.66 77.698 l 479.52 77.698 l 479.52 -95.295 l h -479.52 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -479.52 70.801 m 484.66 70.801 l 484.66 88.398 l 479.52 88.398 l 479.52 -70.801 l h -479.52 70.801 m S Q -0.301961 0.654902 0.301961 rg -480.555 148.096 m 485.441 148.096 l 485.441 130.495 l 480.555 130.495 l -480.555 148.096 l h -480.555 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -480.555 18 m 485.441 18 l 485.441 35.602 l 480.555 35.602 l 480.555 18 -l h -480.555 18 m S Q -0.301961 0.654902 0.301961 rg -482.027 165.698 m 487.152 165.698 l 487.152 148.096 l 482.027 148.096 l -482.027 165.698 l h -482.027 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -482.027 0.398 m 487.152 0.398 l 487.152 18 l 482.027 18 l 482.027 0.398 -l h -482.027 0.398 m S Q -0.301961 0.654902 0.301961 rg -482.926 130.495 m 487.977 130.495 l 487.977 112.897 l 482.926 112.897 l -482.926 130.495 l h -482.926 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -482.926 35.602 m 487.977 35.602 l 487.977 53.199 l 482.926 53.199 l -482.926 35.602 l h -482.926 35.602 m S Q -0.301961 0.654902 0.301961 rg -484.664 95.295 m 490.867 95.295 l 490.867 77.698 l 484.664 77.698 l -484.664 95.295 l h -484.664 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -484.664 70.801 m 490.867 70.801 l 490.867 88.398 l 484.664 88.398 l -484.664 70.801 l h -484.664 70.801 m S Q -0.301961 0.654902 0.301961 rg -484.984 112.897 m 489.719 112.897 l 489.719 95.295 l 484.984 95.295 l -484.984 112.897 l h -484.984 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -484.984 53.199 m 489.719 53.199 l 489.719 70.801 l 484.984 70.801 l -484.984 53.199 l h -484.984 53.199 m S Q -0.301961 0.654902 0.301961 rg -485.445 148.096 m 490.32 148.096 l 490.32 130.495 l 485.445 130.495 l -485.445 148.096 l h -485.445 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -485.445 18 m 490.32 18 l 490.32 35.602 l 485.445 35.602 l 485.445 18 l -h -485.445 18 m S Q -0.301961 0.654902 0.301961 rg -487.156 165.698 m 492.801 165.698 l 492.801 148.096 l 487.156 148.096 l -487.156 165.698 l h -487.156 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -487.156 0.398 m 492.801 0.398 l 492.801 18 l 487.156 18 l 487.156 0.398 -l h -487.156 0.398 m S Q -0.301961 0.654902 0.301961 rg -487.98 130.495 m 493.637 130.495 l 493.637 112.897 l 487.98 112.897 l -487.98 130.495 l h -487.98 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -487.98 35.602 m 493.637 35.602 l 493.637 53.199 l 487.98 53.199 l -487.98 35.602 l h -487.98 35.602 m S Q -0.301961 0.654902 0.301961 rg -489.727 112.897 m 496.098 112.897 l 496.098 95.295 l 489.727 95.295 l -489.727 112.897 l h -489.727 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -489.727 53.199 m 496.098 53.199 l 496.098 70.801 l 489.727 70.801 l -489.727 53.199 l h -489.727 53.199 m S Q -0.301961 0.654902 0.301961 rg -490.324 148.096 m 494.141 148.096 l 494.141 130.495 l 490.324 130.495 l -490.324 148.096 l h -490.324 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -490.324 18 m 494.141 18 l 494.141 35.602 l 490.324 35.602 l 490.324 18 -l h -490.324 18 m S Q -0.301961 0.654902 0.301961 rg -490.871 95.295 m 495.691 95.295 l 495.691 77.698 l 490.871 77.698 l -490.871 95.295 l h -490.871 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -490.871 70.801 m 495.691 70.801 l 495.691 88.398 l 490.871 88.398 l -490.871 70.801 l h -490.871 70.801 m S Q -0.301961 0.654902 0.301961 rg -492.809 165.698 m 500.035 165.698 l 500.035 148.096 l 492.809 148.096 l -492.809 165.698 l h -492.809 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -492.809 0.398 m 500.035 0.398 l 500.035 18 l 492.809 18 l 492.809 0.398 -l h -492.809 0.398 m S Q -0.301961 0.654902 0.301961 rg -493.645 130.495 m 497.426 130.495 l 497.426 112.897 l 493.645 112.897 l -493.645 130.495 l h -493.645 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -493.645 35.602 m 497.426 35.602 l 497.426 53.199 l 493.645 53.199 l -493.645 35.602 l h -493.645 35.602 m S Q -0.301961 0.654902 0.301961 rg -494.145 148.096 m 500.473 148.096 l 500.473 130.495 l 494.145 130.495 l -494.145 148.096 l h -494.145 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -494.145 18 m 500.473 18 l 500.473 35.602 l 494.145 35.602 l 494.145 18 -l h -494.145 18 m S Q -0.301961 0.654902 0.301961 rg -495.695 95.295 m 500.773 95.295 l 500.773 77.698 l 495.695 77.698 l -495.695 95.295 l h -495.695 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -495.695 70.801 m 500.773 70.801 l 500.773 88.398 l 495.695 88.398 l -495.695 70.801 l h -495.695 70.801 m S Q -0.301961 0.654902 0.301961 rg -496.102 112.897 m 499.254 112.897 l 499.254 95.295 l 496.102 95.295 l -496.102 112.897 l h -496.102 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -496.102 53.199 m 499.254 53.199 l 499.254 70.801 l 496.102 70.801 l -496.102 53.199 l h -496.102 53.199 m S Q -0.301961 0.654902 0.301961 rg -497.43 130.495 m 503.324 130.495 l 503.324 112.897 l 497.43 112.897 l -497.43 130.495 l h -497.43 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -497.43 35.602 m 503.324 35.602 l 503.324 53.199 l 497.43 53.199 l -497.43 35.602 l h -497.43 35.602 m S Q -0.301961 0.654902 0.301961 rg -499.258 112.897 m 504.543 112.897 l 504.543 95.295 l 499.258 95.295 l -499.258 112.897 l h -499.258 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -499.258 53.199 m 504.543 53.199 l 504.543 70.801 l 499.258 70.801 l -499.258 53.199 l h -499.258 53.199 m S Q -0.301961 0.654902 0.301961 rg -500.043 165.698 m 505.098 165.698 l 505.098 148.096 l 500.043 148.096 l -500.043 165.698 l h -500.043 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -500.043 0.398 m 505.098 0.398 l 505.098 18 l 500.043 18 l 500.043 0.398 -l h -500.043 0.398 m S Q -0.301961 0.654902 0.301961 rg -500.48 148.096 m 505.41 148.096 l 505.41 130.495 l 500.48 130.495 l -500.48 148.096 l h -500.48 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -500.48 18 m 505.41 18 l 505.41 35.602 l 500.48 35.602 l 500.48 18 l h -500.48 18 m S Q -0.301961 0.654902 0.301961 rg -500.785 95.295 m 506.02 95.295 l 506.02 77.698 l 500.785 77.698 l -500.785 95.295 l h -500.785 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -500.785 70.801 m 506.02 70.801 l 506.02 88.398 l 500.785 88.398 l -500.785 70.801 l h -500.785 70.801 m S Q -0.301961 0.654902 0.301961 rg -503.328 130.495 m 508.676 130.495 l 508.676 112.897 l 503.328 112.897 l -503.328 130.495 l h -503.328 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -503.328 35.602 m 508.676 35.602 l 508.676 53.199 l 503.328 53.199 l -503.328 35.602 l h -503.328 35.602 m S Q -0.301961 0.654902 0.301961 rg -504.547 112.897 m 508.461 112.897 l 508.461 95.295 l 504.547 95.295 l -504.547 112.897 l h -504.547 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -504.547 53.199 m 508.461 53.199 l 508.461 70.801 l 504.547 70.801 l -504.547 53.199 l h -504.547 53.199 m S Q -0.301961 0.654902 0.301961 rg -505.102 165.698 m 511.742 165.698 l 511.742 148.096 l 505.102 148.096 l -505.102 165.698 l h -505.102 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -505.102 0.398 m 511.742 0.398 l 511.742 18 l 505.102 18 l 505.102 0.398 -l h -505.102 0.398 m S Q -0.301961 0.654902 0.301961 rg -505.418 148.096 m 510.5 148.096 l 510.5 130.495 l 505.418 130.495 l -505.418 148.096 l h -505.418 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -505.418 18 m 510.5 18 l 510.5 35.602 l 505.418 35.602 l 505.418 18 l h -505.418 18 m S Q -0.301961 0.654902 0.301961 rg -506.023 95.295 m 510.848 95.295 l 510.848 77.698 l 506.023 77.698 l -506.023 95.295 l h -506.023 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -506.023 70.801 m 510.848 70.801 l 510.848 88.398 l 506.023 88.398 l -506.023 70.801 l h -506.023 70.801 m S Q -0.301961 0.654902 0.301961 rg -508.469 112.897 m 513.285 112.897 l 513.285 95.295 l 508.469 95.295 l -508.469 112.897 l h -508.469 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -508.469 53.199 m 513.285 53.199 l 513.285 70.801 l 508.469 70.801 l -508.469 53.199 l h -508.469 53.199 m S Q -0.301961 0.654902 0.301961 rg -508.684 130.495 m 514.465 130.495 l 514.465 112.897 l 508.684 112.897 l -508.684 130.495 l h -508.684 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -508.684 35.602 m 514.465 35.602 l 514.465 53.199 l 508.684 53.199 l -508.684 35.602 l h -508.684 35.602 m S Q -0.301961 0.654902 0.301961 rg -510.508 148.096 m 516.656 148.096 l 516.656 130.495 l 510.508 130.495 l -510.508 148.096 l h -510.508 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -510.508 18 m 516.656 18 l 516.656 35.602 l 510.508 35.602 l 510.508 18 -l h -510.508 18 m S Q -0.301961 0.654902 0.301961 rg -510.855 95.295 m 515.691 95.295 l 515.691 77.698 l 510.855 77.698 l -510.855 95.295 l h -510.855 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -510.855 70.801 m 515.691 70.801 l 515.691 88.398 l 510.855 88.398 l -510.855 70.801 l h -510.855 70.801 m S Q -0.301961 0.654902 0.301961 rg -511.746 165.698 m 516.98 165.698 l 516.98 148.096 l 511.746 148.096 l -511.746 165.698 l h -511.746 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -511.746 0.398 m 516.98 0.398 l 516.98 18 l 511.746 18 l 511.746 0.398 l -h -511.746 0.398 m S Q -0.301961 0.654902 0.301961 rg -513.289 112.897 m 518.645 112.897 l 518.645 95.295 l 513.289 95.295 l -513.289 112.897 l h -513.289 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -513.289 53.199 m 518.645 53.199 l 518.645 70.801 l 513.289 70.801 l -513.289 53.199 l h -513.289 53.199 m S Q -0.301961 0.654902 0.301961 rg -514.484 130.495 m 521.547 130.495 l 521.547 112.897 l 514.484 112.897 l -514.484 130.495 l h -514.484 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -514.484 35.602 m 521.547 35.602 l 521.547 53.199 l 514.484 53.199 l -514.484 35.602 l h -514.484 35.602 m S Q -0.301961 0.654902 0.301961 rg -515.695 95.295 m 520.164 95.295 l 520.164 77.698 l 515.695 77.698 l -515.695 95.295 l h -515.695 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -515.695 70.801 m 520.164 70.801 l 520.164 88.398 l 515.695 88.398 l -515.695 70.801 l h -515.695 70.801 m S Q -0.301961 0.654902 0.301961 rg -516.66 148.096 m 521.219 148.096 l 521.219 130.495 l 516.66 130.495 l -516.66 148.096 l h -516.66 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -516.66 18 m 521.219 18 l 521.219 35.602 l 516.66 35.602 l 516.66 18 l h -516.66 18 m S Q -0.301961 0.654902 0.301961 rg -516.988 165.698 m 522.645 165.698 l 522.645 148.096 l 516.988 148.096 l -516.988 165.698 l h -516.988 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -516.988 0.398 m 522.645 0.398 l 522.645 18 l 516.988 18 l 516.988 0.398 -l h -516.988 0.398 m S Q -0.301961 0.654902 0.301961 rg -518.648 112.897 m 523.953 112.897 l 523.953 95.295 l 518.648 95.295 l -518.648 112.897 l h -518.648 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -518.648 53.199 m 523.953 53.199 l 523.953 70.801 l 518.648 70.801 l -518.648 53.199 l h -518.648 53.199 m S Q -0.301961 0.654902 0.301961 rg -520.305 95.295 m 525.133 95.295 l 525.133 77.698 l 520.305 77.698 l -520.305 95.295 l h -520.305 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -520.305 70.801 m 525.133 70.801 l 525.133 88.398 l 520.305 88.398 l -520.305 70.801 l h -520.305 70.801 m S Q -0.301961 0.654902 0.301961 rg -521.227 148.096 m 526.605 148.096 l 526.605 130.495 l 521.227 130.495 l -521.227 148.096 l h -521.227 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -521.227 18 m 526.605 18 l 526.605 35.602 l 521.227 35.602 l 521.227 18 -l h -521.227 18 m S Q -0.301961 0.654902 0.301961 rg -521.551 130.495 m 528.246 130.495 l 528.246 112.897 l 521.551 112.897 l -521.551 130.495 l h -521.551 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -521.551 35.602 m 528.246 35.602 l 528.246 53.199 l 521.551 53.199 l -521.551 35.602 l h -521.551 35.602 m S Q -0.301961 0.654902 0.301961 rg -522.648 165.698 m 529.059 165.698 l 529.059 148.096 l 522.648 148.096 l -522.648 165.698 l h -522.648 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -522.648 0.398 m 529.059 0.398 l 529.059 18 l 522.648 18 l 522.648 0.398 -l h -522.648 0.398 m S Q -0.301961 0.654902 0.301961 rg -523.957 112.897 m 528.703 112.897 l 528.703 95.295 l 523.957 95.295 l -523.957 112.897 l h -523.957 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -523.957 53.199 m 528.703 53.199 l 528.703 70.801 l 523.957 70.801 l -523.957 53.199 l h -523.957 53.199 m S Q -0.301961 0.654902 0.301961 rg -525.137 95.295 m 530.641 95.295 l 530.641 77.698 l 525.137 77.698 l -525.137 95.295 l h -525.137 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -525.137 70.801 m 530.641 70.801 l 530.641 88.398 l 525.137 88.398 l -525.137 70.801 l h -525.137 70.801 m S Q -0.301961 0.654902 0.301961 rg -526.609 148.096 m 531.863 148.096 l 531.863 130.495 l 526.609 130.495 l -526.609 148.096 l h -526.609 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -526.609 18 m 531.863 18 l 531.863 35.602 l 526.609 35.602 l 526.609 18 -l h -526.609 18 m S Q -0.301961 0.654902 0.301961 rg -528.254 130.495 m 533.836 130.495 l 533.836 112.897 l 528.254 112.897 l -528.254 130.495 l h -528.254 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -528.254 35.602 m 533.836 35.602 l 533.836 53.199 l 528.254 53.199 l -528.254 35.602 l h -528.254 35.602 m S Q -0.301961 0.654902 0.301961 rg -528.711 112.897 m 533.062 112.897 l 533.062 95.295 l 528.711 95.295 l -528.711 112.897 l h -528.711 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -528.711 53.199 m 533.062 53.199 l 533.062 70.801 l 528.711 70.801 l -528.711 53.199 l h -528.711 53.199 m S Q -0.301961 0.654902 0.301961 rg -529.062 165.698 m 534.145 165.698 l 534.145 148.096 l 529.062 148.096 l -529.062 165.698 l h -529.062 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -529.062 0.398 m 534.145 0.398 l 534.145 18 l 529.062 18 l 529.062 0.398 -l h -529.062 0.398 m S Q -0.301961 0.654902 0.301961 rg -530.648 95.295 m 536.102 95.295 l 536.102 77.698 l 530.648 77.698 l -530.648 95.295 l h -530.648 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -530.648 70.801 m 536.102 70.801 l 536.102 88.398 l 530.648 88.398 l -530.648 70.801 l h -530.648 70.801 m S Q -0.301961 0.654902 0.301961 rg -531.875 148.096 m 536.926 148.096 l 536.926 130.495 l 531.875 130.495 l -531.875 148.096 l h -531.875 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -531.875 18 m 536.926 18 l 536.926 35.602 l 531.875 35.602 l 531.875 18 -l h -531.875 18 m S Q -0.301961 0.654902 0.301961 rg -533.066 112.897 m 538.453 112.897 l 538.453 95.295 l 533.066 95.295 l -533.066 112.897 l h -533.066 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -533.066 53.199 m 538.453 53.199 l 538.453 70.801 l 533.066 70.801 l -533.066 53.199 l h -533.066 53.199 m S Q -0.301961 0.654902 0.301961 rg -533.84 130.495 m 539.25 130.495 l 539.25 112.897 l 533.84 112.897 l -533.84 130.495 l h -533.84 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -533.84 35.602 m 539.25 35.602 l 539.25 53.199 l 533.84 53.199 l 533.84 -35.602 l h -533.84 35.602 m S Q -0.301961 0.654902 0.301961 rg -534.152 165.698 m 539.512 165.698 l 539.512 148.096 l 534.152 148.096 l -534.152 165.698 l h -534.152 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -534.152 0.398 m 539.512 0.398 l 539.512 18 l 534.152 18 l 534.152 0.398 -l h -534.152 0.398 m S Q -0.301961 0.654902 0.301961 rg -536.105 95.295 m 541.676 95.295 l 541.676 77.698 l 536.105 77.698 l -536.105 95.295 l h -536.105 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -536.105 70.801 m 541.676 70.801 l 541.676 88.398 l 536.105 88.398 l -536.105 70.801 l h -536.105 70.801 m S Q -0.301961 0.654902 0.301961 rg -536.934 148.096 m 543.855 148.096 l 543.855 130.495 l 536.934 130.495 l -536.934 148.096 l h -536.934 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -536.934 18 m 543.855 18 l 543.855 35.602 l 536.934 35.602 l 536.934 18 -l h -536.934 18 m S Q -0.301961 0.654902 0.301961 rg -538.461 112.897 m 543.121 112.897 l 543.121 95.295 l 538.461 95.295 l -538.461 112.897 l h -538.461 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -538.461 53.199 m 543.121 53.199 l 543.121 70.801 l 538.461 70.801 l -538.461 53.199 l h -538.461 53.199 m S Q -0.301961 0.654902 0.301961 rg -539.254 130.495 m 543.82 130.495 l 543.82 112.897 l 539.254 112.897 l -539.254 130.495 l h -539.254 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -539.254 35.602 m 543.82 35.602 l 543.82 53.199 l 539.254 53.199 l -539.254 35.602 l h -539.254 35.602 m S Q -0.301961 0.654902 0.301961 rg -539.523 165.698 m 543.848 165.698 l 543.848 148.096 l 539.523 148.096 l -539.523 165.698 l h -539.523 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -539.523 0.398 m 543.848 0.398 l 543.848 18 l 539.523 18 l 539.523 0.398 -l h -539.523 0.398 m S Q -0.301961 0.654902 0.301961 rg -541.68 95.295 m 547.676 95.295 l 547.676 77.698 l 541.68 77.698 l -541.68 95.295 l h -541.68 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -541.68 70.801 m 547.676 70.801 l 547.676 88.398 l 541.68 88.398 l -541.68 70.801 l h -541.68 70.801 m S Q -0.301961 0.654902 0.301961 rg -543.125 112.897 m 547 112.897 l 547 95.295 l 543.125 95.295 l 543.125 -112.897 l h -543.125 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -543.125 53.199 m 547 53.199 l 547 70.801 l 543.125 70.801 l 543.125 -53.199 l h -543.125 53.199 m S Q -0.301961 0.654902 0.301961 rg -543.824 130.495 m 550.629 130.495 l 550.629 112.897 l 543.824 112.897 l -543.824 130.495 l h -543.824 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -543.824 35.602 m 550.629 35.602 l 550.629 53.199 l 543.824 53.199 l -543.824 35.602 l h -543.824 35.602 m S Q -0.301961 0.654902 0.301961 rg -543.859 165.698 m 549.789 165.698 l 549.789 148.096 l 543.859 148.096 l -543.859 165.698 l h -543.859 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -543.859 0.398 m 549.789 0.398 l 549.789 18 l 543.859 18 l 543.859 0.398 -l h -543.859 0.398 m S Q -0.301961 0.654902 0.301961 rg -543.984 148.096 m 548.141 148.096 l 548.141 130.495 l 543.984 130.495 l -543.984 148.096 l h -543.984 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -543.984 18 m 548.141 18 l 548.141 35.602 l 543.984 35.602 l 543.984 18 -l h -543.984 18 m S Q -0.301961 0.654902 0.301961 rg -547.008 112.897 m 553.598 112.897 l 553.598 95.295 l 547.008 95.295 l -547.008 112.897 l h -547.008 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -547.008 53.199 m 553.598 53.199 l 553.598 70.801 l 547.008 70.801 l -547.008 53.199 l h -547.008 53.199 m S Q -0.301961 0.654902 0.301961 rg -547.68 95.295 m 552.188 95.295 l 552.188 77.698 l 547.68 77.698 l -547.68 95.295 l h -547.68 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -547.68 70.801 m 552.188 70.801 l 552.188 88.398 l 547.68 88.398 l -547.68 70.801 l h -547.68 70.801 m S Q -0.301961 0.654902 0.301961 rg -548.148 148.096 m 552.793 148.096 l 552.793 130.495 l 548.148 130.495 l -548.148 148.096 l h -548.148 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -548.148 18 m 552.793 18 l 552.793 35.602 l 548.148 35.602 l 548.148 18 -l h -548.148 18 m S Q -0.301961 0.654902 0.301961 rg -549.797 165.698 m 554.57 165.698 l 554.57 148.096 l 549.797 148.096 l -549.797 165.698 l h -549.797 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -549.797 0.398 m 554.57 0.398 l 554.57 18 l 549.797 18 l 549.797 0.398 l -h -549.797 0.398 m S Q -0.301961 0.654902 0.301961 rg -550.637 130.495 m 555.27 130.495 l 555.27 112.897 l 550.637 112.897 l -550.637 130.495 l h -550.637 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -550.637 35.602 m 555.27 35.602 l 555.27 53.199 l 550.637 53.199 l -550.637 35.602 l h -550.637 35.602 m S Q -0.301961 0.654902 0.301961 rg -552.191 95.295 m 558.184 95.295 l 558.184 77.698 l 552.191 77.698 l -552.191 95.295 l h -552.191 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -552.191 70.801 m 558.184 70.801 l 558.184 88.398 l 552.191 88.398 l -552.191 70.801 l h -552.191 70.801 m S Q -0.301961 0.654902 0.301961 rg -552.797 148.096 m 557.555 148.096 l 557.555 130.495 l 552.797 130.495 l -552.797 148.096 l h -552.797 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -552.797 18 m 557.555 18 l 557.555 35.602 l 552.797 35.602 l 552.797 18 -l h -552.797 18 m S Q -0.301961 0.654902 0.301961 rg -553.602 112.897 m 558.805 112.897 l 558.805 95.295 l 553.602 95.295 l -553.602 112.897 l h -553.602 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -553.602 53.199 m 558.805 53.199 l 558.805 70.801 l 553.602 70.801 l -553.602 53.199 l h -553.602 53.199 m S Q -0.301961 0.654902 0.301961 rg -554.578 165.698 m 560.062 165.698 l 560.062 148.096 l 554.578 148.096 l -554.578 165.698 l h -554.578 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -554.578 0.398 m 560.062 0.398 l 560.062 18 l 554.578 18 l 554.578 0.398 -l h -554.578 0.398 m S Q -0.301961 0.654902 0.301961 rg -555.273 130.495 m 560.605 130.495 l 560.605 112.897 l 555.273 112.897 l -555.273 130.495 l h -555.273 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -555.273 35.602 m 560.605 35.602 l 560.605 53.199 l 555.273 53.199 l -555.273 35.602 l h -555.273 35.602 m S Q -0.301961 0.654902 0.301961 rg -557.559 148.096 m 562.52 148.096 l 562.52 130.495 l 557.559 130.495 l -557.559 148.096 l h -557.559 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -557.559 18 m 562.52 18 l 562.52 35.602 l 557.559 35.602 l 557.559 18 l -h -557.559 18 m S Q -0.301961 0.654902 0.301961 rg -558.191 95.295 m 563.609 95.295 l 563.609 77.698 l 558.191 77.698 l -558.191 95.295 l h -558.191 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -558.191 70.801 m 563.609 70.801 l 563.609 88.398 l 558.191 88.398 l -558.191 70.801 l h -558.191 70.801 m S Q -0.301961 0.654902 0.301961 rg -558.809 112.897 m 563.426 112.897 l 563.426 95.295 l 558.809 95.295 l -558.809 112.897 l h -558.809 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -558.809 53.199 m 563.426 53.199 l 563.426 70.801 l 558.809 70.801 l -558.809 53.199 l h -558.809 53.199 m S Q -0.301961 0.654902 0.301961 rg -560.066 165.698 m 565.965 165.698 l 565.965 148.096 l 560.066 148.096 l -560.066 165.698 l h -560.066 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -560.066 0.398 m 565.965 0.398 l 565.965 18 l 560.066 18 l 560.066 0.398 -l h -560.066 0.398 m S Q -0.301961 0.654902 0.301961 rg -560.609 130.495 m 565.496 130.495 l 565.496 112.897 l 560.609 112.897 l -560.609 130.495 l h -560.609 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -560.609 35.602 m 565.496 35.602 l 565.496 53.199 l 560.609 53.199 l -560.609 35.602 l h -560.609 35.602 m S Q -0.301961 0.654902 0.301961 rg -562.527 148.096 m 568.516 148.096 l 568.516 130.495 l 562.527 130.495 l -562.527 148.096 l h -562.527 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -562.527 18 m 568.516 18 l 568.516 35.602 l 562.527 35.602 l 562.527 18 -l h -562.527 18 m S Q -0.301961 0.654902 0.301961 rg -563.43 112.897 m 568.852 112.897 l 568.852 95.295 l 563.43 95.295 l -563.43 112.897 l h -563.43 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -563.43 53.199 m 568.852 53.199 l 568.852 70.801 l 563.43 70.801 l -563.43 53.199 l h -563.43 53.199 m S Q -0.301961 0.654902 0.301961 rg -563.629 95.295 m 567.715 95.295 l 567.715 77.698 l 563.629 77.698 l -563.629 95.295 l h -563.629 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -563.629 70.801 m 567.715 70.801 l 567.715 88.398 l 563.629 88.398 l -563.629 70.801 l h -563.629 70.801 m S Q -0.301961 0.654902 0.301961 rg -565.5 130.495 m 573.277 130.495 l 573.277 112.897 l 565.5 112.897 l -565.5 130.495 l h -565.5 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -565.5 35.602 m 573.277 35.602 l 573.277 53.199 l 565.5 53.199 l 565.5 -35.602 l h -565.5 35.602 m S Q -0.301961 0.654902 0.301961 rg -565.973 165.698 m 570.039 165.698 l 570.039 148.096 l 565.973 148.096 l -565.973 165.698 l h -565.973 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -565.973 0.398 m 570.039 0.398 l 570.039 18 l 565.973 18 l 565.973 0.398 -l h -565.973 0.398 m S Q -0.301961 0.654902 0.301961 rg -567.719 95.295 m 571.504 95.295 l 571.504 77.698 l 567.719 77.698 l -567.719 95.295 l h -567.719 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -567.719 70.801 m 571.504 70.801 l 571.504 88.398 l 567.719 88.398 l -567.719 70.801 l h -567.719 70.801 m S Q -0.301961 0.654902 0.301961 rg -568.523 148.096 m 574.816 148.096 l 574.816 130.495 l 568.523 130.495 l -568.523 148.096 l h -568.523 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -568.523 18 m 574.816 18 l 574.816 35.602 l 568.523 35.602 l 568.523 18 -l h -568.523 18 m S Q -0.301961 0.654902 0.301961 rg -568.859 112.897 m 573.672 112.897 l 573.672 95.295 l 568.859 95.295 l -568.859 112.897 l h -568.859 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -568.859 53.199 m 573.672 53.199 l 573.672 70.801 l 568.859 70.801 l -568.859 53.199 l h -568.859 53.199 m S Q -0.301961 0.654902 0.301961 rg -570.043 165.698 m 575.941 165.698 l 575.941 148.096 l 570.043 148.096 l -570.043 165.698 l h -570.043 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -570.043 0.398 m 575.941 0.398 l 575.941 18 l 570.043 18 l 570.043 0.398 -l h -570.043 0.398 m S Q -0.301961 0.654902 0.301961 rg -571.508 95.295 m 577.152 95.295 l 577.152 77.698 l 571.508 77.698 l -571.508 95.295 l h -571.508 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -571.508 70.801 m 577.152 70.801 l 577.152 88.398 l 571.508 88.398 l -571.508 70.801 l h -571.508 70.801 m S Q -0.301961 0.654902 0.301961 rg -573.285 130.495 m 577.379 130.495 l 577.379 112.897 l 573.285 112.897 l -573.285 130.495 l h -573.285 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -573.285 35.602 m 577.379 35.602 l 577.379 53.199 l 573.285 53.199 l -573.285 35.602 l h -573.285 35.602 m S Q -0.301961 0.654902 0.301961 rg -573.676 112.897 m 579.859 112.897 l 579.859 95.295 l 573.676 95.295 l -573.676 112.897 l h -573.676 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -573.676 53.199 m 579.859 53.199 l 579.859 70.801 l 573.676 70.801 l -573.676 53.199 l h -573.676 53.199 m S Q -0.301961 0.654902 0.301961 rg -574.82 148.096 m 579.551 148.096 l 579.551 130.495 l 574.82 130.495 l -574.82 148.096 l h -574.82 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -574.82 18 m 579.551 18 l 579.551 35.602 l 574.82 35.602 l 574.82 18 l h -574.82 18 m S Q -0.301961 0.654902 0.301961 rg -575.949 165.698 m 581.934 165.698 l 581.934 148.096 l 575.949 148.096 l -575.949 165.698 l h -575.949 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -575.949 0.398 m 581.934 0.398 l 581.934 18 l 575.949 18 l 575.949 0.398 -l h -575.949 0.398 m S Q -0.301961 0.654902 0.301961 rg -577.156 95.295 m 582.293 95.295 l 582.293 77.698 l 577.156 77.698 l -577.156 95.295 l h -577.156 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -577.156 70.801 m 582.293 70.801 l 582.293 88.398 l 577.156 88.398 l -577.156 70.801 l h -577.156 70.801 m S Q -0.301961 0.654902 0.301961 rg -577.387 130.495 m 582.508 130.495 l 582.508 112.897 l 577.387 112.897 l -577.387 130.495 l h -577.387 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -577.387 35.602 m 582.508 35.602 l 582.508 53.199 l 577.387 53.199 l -577.387 35.602 l h -577.387 35.602 m S Q -0.301961 0.654902 0.301961 rg -579.555 148.096 m 585.789 148.096 l 585.789 130.495 l 579.555 130.495 l -579.555 148.096 l h -579.555 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -579.555 18 m 585.789 18 l 585.789 35.602 l 579.555 35.602 l 579.555 18 -l h -579.555 18 m S Q -0.301961 0.654902 0.301961 rg -579.863 112.897 m 585.34 112.897 l 585.34 95.295 l 579.863 95.295 l -579.863 112.897 l h -579.863 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -579.863 53.199 m 585.34 53.199 l 585.34 70.801 l 579.863 70.801 l -579.863 53.199 l h -579.863 53.199 m S Q -0.301961 0.654902 0.301961 rg -581.938 165.698 m 586.992 165.698 l 586.992 148.096 l 581.938 148.096 l -581.938 165.698 l h -581.938 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -581.938 0.398 m 586.992 0.398 l 586.992 18 l 581.938 18 l 581.938 0.398 -l h -581.938 0.398 m S Q -0.301961 0.654902 0.301961 rg -582.301 95.295 m 587.441 95.295 l 587.441 77.698 l 582.301 77.698 l -582.301 95.295 l h -582.301 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -582.301 70.801 m 587.441 70.801 l 587.441 88.398 l 582.301 88.398 l -582.301 70.801 l h -582.301 70.801 m S Q -0.301961 0.654902 0.301961 rg -582.516 130.495 m 587.07 130.495 l 587.07 112.897 l 582.516 112.897 l -582.516 130.495 l h -582.516 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -582.516 35.602 m 587.07 35.602 l 587.07 53.199 l 582.516 53.199 l -582.516 35.602 l h -582.516 35.602 m S Q -0.301961 0.654902 0.301961 rg -585.344 112.897 m 590.75 112.897 l 590.75 95.295 l 585.344 95.295 l -585.344 112.897 l h -585.344 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -585.344 53.199 m 590.75 53.199 l 590.75 70.801 l 585.344 70.801 l -585.344 53.199 l h -585.344 53.199 m S Q -0.301961 0.654902 0.301961 rg -585.797 148.096 m 590.59 148.096 l 590.59 130.495 l 585.797 130.495 l -585.797 148.096 l h -585.797 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -585.797 18 m 590.59 18 l 590.59 35.602 l 585.797 35.602 l 585.797 18 l -h -585.797 18 m S Q -0.301961 0.654902 0.301961 rg -587.082 165.698 m 591.707 165.698 l 591.707 148.096 l 587.082 148.096 l -587.082 165.698 l h -587.082 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -587.082 0.398 m 591.707 0.398 l 591.707 18 l 587.082 18 l 587.082 0.398 -l h -587.082 0.398 m S Q -0.301961 0.654902 0.301961 rg -587.109 130.495 m 592.734 130.495 l 592.734 112.897 l 587.109 112.897 l -587.109 130.495 l h -587.109 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -587.109 35.602 m 592.734 35.602 l 592.734 53.199 l 587.109 53.199 l -587.109 35.602 l h -587.109 35.602 m S Q -0.301961 0.654902 0.301961 rg -587.469 95.295 m 593.238 95.295 l 593.238 77.698 l 587.469 77.698 l -587.469 95.295 l h -587.469 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -587.469 70.801 m 593.238 70.801 l 593.238 88.398 l 587.469 88.398 l -587.469 70.801 l h -587.469 70.801 m S Q -0.301961 0.654902 0.301961 rg -590.594 148.096 m 596.855 148.096 l 596.855 130.495 l 590.594 130.495 l -590.594 148.096 l h -590.594 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -590.594 18 m 596.855 18 l 596.855 35.602 l 590.594 35.602 l 590.594 18 -l h -590.594 18 m S Q -0.301961 0.654902 0.301961 rg -590.77 112.897 m 596.668 112.897 l 596.668 95.295 l 590.77 95.295 l -590.77 112.897 l h -590.77 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -590.77 53.199 m 596.668 53.199 l 596.668 70.801 l 590.77 70.801 l -590.77 53.199 l h -590.77 53.199 m S Q -0.301961 0.654902 0.301961 rg -591.715 165.698 m 596.301 165.698 l 596.301 148.096 l 591.715 148.096 l -591.715 165.698 l h -591.715 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -591.715 0.398 m 596.301 0.398 l 596.301 18 l 591.715 18 l 591.715 0.398 -l h -591.715 0.398 m S Q -0.301961 0.654902 0.301961 rg -592.738 130.495 m 597.207 130.495 l 597.207 112.897 l 592.738 112.897 l -592.738 130.495 l h -592.738 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -592.738 35.602 m 597.207 35.602 l 597.207 53.199 l 592.738 53.199 l -592.738 35.602 l h -592.738 35.602 m S Q -0.301961 0.654902 0.301961 rg -593.246 95.295 m 597.895 95.295 l 597.895 77.698 l 593.246 77.698 l -593.246 95.295 l h -593.246 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -593.246 70.801 m 597.895 70.801 l 597.895 88.398 l 593.246 88.398 l -593.246 70.801 l h -593.246 70.801 m S Q -0.301961 0.654902 0.301961 rg -596.305 165.698 m 601.773 165.698 l 601.773 148.096 l 596.305 148.096 l -596.305 165.698 l h -596.305 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -596.305 0.398 m 601.773 0.398 l 601.773 18 l 596.305 18 l 596.305 0.398 -l h -596.305 0.398 m S Q -0.301961 0.654902 0.301961 rg -596.699 112.897 m 602.727 112.897 l 602.727 95.295 l 596.699 95.295 l -596.699 112.897 l h -596.699 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -596.699 53.199 m 602.727 53.199 l 602.727 70.801 l 596.699 70.801 l -596.699 53.199 l h -596.699 53.199 m S Q -0.301961 0.654902 0.301961 rg -596.875 148.096 m 600.816 148.096 l 600.816 130.495 l 596.875 130.495 l -596.875 148.096 l h -596.875 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -596.875 18 m 600.816 18 l 600.816 35.602 l 596.875 35.602 l 596.875 18 -l h -596.875 18 m S Q -0.301961 0.654902 0.301961 rg -597.215 130.495 m 603.629 130.495 l 603.629 112.897 l 597.215 112.897 l -597.215 130.495 l h -597.215 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -597.215 35.602 m 603.629 35.602 l 603.629 53.199 l 597.215 53.199 l -597.215 35.602 l h -597.215 35.602 m S Q -0.301961 0.654902 0.301961 rg -597.898 95.295 m 602.773 95.295 l 602.773 77.698 l 597.898 77.698 l -597.898 95.295 l h -597.898 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -597.898 70.801 m 602.773 70.801 l 602.773 88.398 l 597.898 88.398 l -597.898 70.801 l h -597.898 70.801 m S Q -0.301961 0.654902 0.301961 rg -600.824 148.096 m 606.219 148.096 l 606.219 130.495 l 600.824 130.495 l -600.824 148.096 l h -600.824 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -600.824 18 m 606.219 18 l 606.219 35.602 l 600.824 35.602 l 600.824 18 -l h -600.824 18 m S Q -0.301961 0.654902 0.301961 rg -601.781 165.698 m 607.414 165.698 l 607.414 148.096 l 601.781 148.096 l -601.781 165.698 l h -601.781 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -601.781 0.398 m 607.414 0.398 l 607.414 18 l 601.781 18 l 601.781 0.398 -l h -601.781 0.398 m S Q -0.301961 0.654902 0.301961 rg -602.73 112.897 m 606.754 112.897 l 606.754 95.295 l 602.73 95.295 l -602.73 112.897 l h -602.73 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -602.73 53.199 m 606.754 53.199 l 606.754 70.801 l 602.73 70.801 l -602.73 53.199 l h -602.73 53.199 m S Q -0.301961 0.654902 0.301961 rg -602.828 95.295 m 608.969 95.295 l 608.969 77.698 l 602.828 77.698 l -602.828 95.295 l h -602.828 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -602.828 70.801 m 608.969 70.801 l 608.969 88.398 l 602.828 88.398 l -602.828 70.801 l h -602.828 70.801 m S Q -0.301961 0.654902 0.301961 rg -603.633 130.495 m 609.633 130.495 l 609.633 112.897 l 603.633 112.897 l -603.633 130.495 l h -603.633 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -603.633 35.602 m 609.633 35.602 l 609.633 53.199 l 603.633 53.199 l -603.633 35.602 l h -603.633 35.602 m S Q -0.301961 0.654902 0.301961 rg -606.227 148.096 m 611.27 148.096 l 611.27 130.495 l 606.227 130.495 l -606.227 148.096 l h -606.227 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -606.227 18 m 611.27 18 l 611.27 35.602 l 606.227 35.602 l 606.227 18 l -h -606.227 18 m S Q -0.301961 0.654902 0.301961 rg -606.762 112.897 m 611.902 112.897 l 611.902 95.295 l 606.762 95.295 l -606.762 112.897 l h -606.762 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -606.762 53.199 m 611.902 53.199 l 611.902 70.801 l 606.762 70.801 l -606.762 53.199 l h -606.762 53.199 m S Q -0.301961 0.654902 0.301961 rg -607.418 165.698 m 613.035 165.698 l 613.035 148.096 l 607.418 148.096 l -607.418 165.698 l h -607.418 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -607.418 0.398 m 613.035 0.398 l 613.035 18 l 607.418 18 l 607.418 0.398 -l h -607.418 0.398 m S Q -0.301961 0.654902 0.301961 rg -608.977 95.295 m 614.129 95.295 l 614.129 77.698 l 608.977 77.698 l -608.977 95.295 l h -608.977 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -608.977 70.801 m 614.129 70.801 l 614.129 88.398 l 608.977 88.398 l -608.977 70.801 l h -608.977 70.801 m S Q -0.301961 0.654902 0.301961 rg -609.641 130.495 m 616.34 130.495 l 616.34 112.897 l 609.641 112.897 l -609.641 130.495 l h -609.641 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -609.641 35.602 m 616.34 35.602 l 616.34 53.199 l 609.641 53.199 l -609.641 35.602 l h -609.641 35.602 m S Q -0.301961 0.654902 0.301961 rg -611.273 148.096 m 616.508 148.096 l 616.508 130.495 l 611.273 130.495 l -611.273 148.096 l h -611.273 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -611.273 18 m 616.508 18 l 616.508 35.602 l 611.273 35.602 l 611.273 18 -l h -611.273 18 m S Q -0.301961 0.654902 0.301961 rg -611.91 112.897 m 616.719 112.897 l 616.719 95.295 l 611.91 95.295 l -611.91 112.897 l h -611.91 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -611.91 53.199 m 616.719 53.199 l 616.719 70.801 l 611.91 70.801 l -611.91 53.199 l h -611.91 53.199 m S Q -0.301961 0.654902 0.301961 rg -613.039 165.698 m 618.566 165.698 l 618.566 148.096 l 613.039 148.096 l -613.039 165.698 l h -613.039 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -613.039 0.398 m 618.566 0.398 l 618.566 18 l 613.039 18 l 613.039 0.398 -l h -613.039 0.398 m S Q -0.301961 0.654902 0.301961 rg -614.242 95.295 m 618.062 95.295 l 618.062 77.698 l 614.242 77.698 l -614.242 95.295 l h -614.242 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -614.242 70.801 m 618.062 70.801 l 618.062 88.398 l 614.242 88.398 l -614.242 70.801 l h -614.242 70.801 m S Q -0.301961 0.654902 0.301961 rg -616.355 130.495 m 621.363 130.495 l 621.363 112.897 l 616.355 112.897 l -616.355 130.495 l h -616.355 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -616.355 35.602 m 621.363 35.602 l 621.363 53.199 l 616.355 53.199 l -616.355 35.602 l h -616.355 35.602 m S Q -0.301961 0.654902 0.301961 rg -616.527 148.096 m 622.074 148.096 l 622.074 130.495 l 616.527 130.495 l -616.527 148.096 l h -616.527 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -616.527 18 m 622.074 18 l 622.074 35.602 l 616.527 35.602 l 616.527 18 -l h -616.527 18 m S Q -0.301961 0.654902 0.301961 rg -616.777 112.897 m 621.738 112.897 l 621.738 95.295 l 616.777 95.295 l -616.777 112.897 l h -616.777 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -616.777 53.199 m 621.738 53.199 l 621.738 70.801 l 616.777 70.801 l -616.777 53.199 l h -616.777 53.199 m S Q -0.301961 0.654902 0.301961 rg -618.066 95.295 m 624.359 95.295 l 624.359 77.698 l 618.066 77.698 l -618.066 95.295 l h -618.066 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -618.066 70.801 m 624.359 70.801 l 624.359 88.398 l 618.066 88.398 l -618.066 70.801 l h -618.066 70.801 m S Q -0.301961 0.654902 0.301961 rg -618.582 165.698 m 624.078 165.698 l 624.078 148.096 l 618.582 148.096 l -618.582 165.698 l h -618.582 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -618.582 0.398 m 624.078 0.398 l 624.078 18 l 618.582 18 l 618.582 0.398 -l h -618.582 0.398 m S Q -0.301961 0.654902 0.301961 rg -621.371 130.495 m 625.562 130.495 l 625.562 112.897 l 621.371 112.897 l -621.371 130.495 l h -621.371 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -621.371 35.602 m 625.562 35.602 l 625.562 53.199 l 621.371 53.199 l -621.371 35.602 l h -621.371 35.602 m S Q -0.301961 0.654902 0.301961 rg -621.762 112.897 m 626.449 112.897 l 626.449 95.295 l 621.762 95.295 l -621.762 112.897 l h -621.762 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -621.762 53.199 m 626.449 53.199 l 626.449 70.801 l 621.762 70.801 l -621.762 53.199 l h -621.762 53.199 m S Q -0.301961 0.654902 0.301961 rg -622.078 148.096 m 626.758 148.096 l 626.758 130.495 l 622.078 130.495 l -622.078 148.096 l h -622.078 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -622.078 18 m 626.758 18 l 626.758 35.602 l 622.078 35.602 l 622.078 18 -l h -622.078 18 m S Q -0.301961 0.654902 0.301961 rg -624.086 165.698 m 629.086 165.698 l 629.086 148.096 l 624.086 148.096 l -624.086 165.698 l h -624.086 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -624.086 0.398 m 629.086 0.398 l 629.086 18 l 624.086 18 l 624.086 0.398 -l h -624.086 0.398 m S Q -0.301961 0.654902 0.301961 rg -624.367 95.295 m 629.461 95.295 l 629.461 77.698 l 624.367 77.698 l -624.367 95.295 l h -624.367 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -624.367 70.801 m 629.461 70.801 l 629.461 88.398 l 624.367 88.398 l -624.367 70.801 l h -624.367 70.801 m S Q -0.301961 0.654902 0.301961 rg -625.574 130.495 m 629.887 130.495 l 629.887 112.897 l 625.574 112.897 l -625.574 130.495 l h -625.574 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -625.574 35.602 m 629.887 35.602 l 629.887 53.199 l 625.574 53.199 l -625.574 35.602 l h -625.574 35.602 m S Q -0.301961 0.654902 0.301961 rg -626.461 112.897 m 630.906 112.897 l 630.906 95.295 l 626.461 95.295 l -626.461 112.897 l h -626.461 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -626.461 53.199 m 630.906 53.199 l 630.906 70.801 l 626.461 70.801 l -626.461 53.199 l h -626.461 53.199 m S Q -0.301961 0.654902 0.301961 rg -626.777 148.096 m 632.895 148.096 l 632.895 130.495 l 626.777 130.495 l -626.777 148.096 l h -626.777 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -626.777 18 m 632.895 18 l 632.895 35.602 l 626.777 35.602 l 626.777 18 -l h -626.777 18 m S Q -0.301961 0.654902 0.301961 rg -629.098 165.698 m 633.445 165.698 l 633.445 148.096 l 629.098 148.096 l -629.098 165.698 l h -629.098 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -629.098 0.398 m 633.445 0.398 l 633.445 18 l 629.098 18 l 629.098 0.398 -l h -629.098 0.398 m S Q -0.301961 0.654902 0.301961 rg -629.516 95.295 m 634.094 95.295 l 634.094 77.698 l 629.516 77.698 l -629.516 95.295 l h -629.516 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -629.516 70.801 m 634.094 70.801 l 634.094 88.398 l 629.516 88.398 l -629.516 70.801 l h -629.516 70.801 m S Q -0.301961 0.654902 0.301961 rg -629.93 130.495 m 636.984 130.495 l 636.984 112.897 l 629.93 112.897 l -629.93 130.495 l h -629.93 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -629.93 35.602 m 636.984 35.602 l 636.984 53.199 l 629.93 53.199 l -629.93 35.602 l h -629.93 35.602 m S Q -0.301961 0.654902 0.301961 rg -630.914 112.897 m 638.766 112.897 l 638.766 95.295 l 630.914 95.295 l -630.914 112.897 l h -630.914 112.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -630.914 53.199 m 638.766 53.199 l 638.766 70.801 l 630.914 70.801 l -630.914 53.199 l h -630.914 53.199 m S Q -0.301961 0.654902 0.301961 rg -632.902 148.096 m 638.59 148.096 l 638.59 130.495 l 632.902 130.495 l -632.902 148.096 l h -632.902 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -632.902 18 m 638.59 18 l 638.59 35.602 l 632.902 35.602 l 632.902 18 l -h -632.902 18 m S Q -0.654902 0.317647 1 rg -633.465 165.698 m 638.27 165.698 l 638.27 148.096 l 633.465 148.096 l -633.465 165.698 l h -633.465 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -633.465 0.398 m 638.27 0.398 l 638.27 18 l 633.465 18 l 633.465 0.398 l -h -633.465 0.398 m S Q -0.654902 0.317647 1 rg -634.113 95.295 m 636.031 95.295 l 636.031 77.698 l 634.113 77.698 l -634.113 95.295 l h -634.113 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -634.113 70.801 m 636.031 70.801 l 636.031 88.398 l 634.113 88.398 l -634.113 70.801 l h -634.113 70.801 m S Q -0.654902 0.317647 1 rg -636.039 95.295 m 639.387 95.295 l 639.387 77.698 l 636.039 77.698 l -636.039 95.295 l h -636.039 95.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -636.039 70.801 m 639.387 70.801 l 639.387 88.398 l 636.039 88.398 l -636.039 70.801 l h -636.039 70.801 m S Q -0.654902 0.317647 1 rg -636.992 130.495 m 639.539 130.495 l 639.539 112.897 l 636.992 112.897 l -636.992 130.495 l h -636.992 130.495 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -636.992 35.602 m 639.539 35.602 l 639.539 53.199 l 636.992 53.199 l -636.992 35.602 l h -636.992 35.602 m S Q -0.654902 0.317647 1 rg -638.277 165.698 m 640.156 165.698 l 640.156 148.096 l 638.277 148.096 l -638.277 165.698 l h -638.277 165.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -638.277 0.398 m 640.156 0.398 l 640.156 18 l 638.277 18 l 638.277 0.398 -l h -638.277 0.398 m S Q -0.654902 0.317647 1 rg -638.848 148.096 m 640.398 148.096 l 640.398 130.495 l 638.848 130.495 l -638.848 148.096 l h -638.848 148.096 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -638.848 18 m 640.398 18 l 640.398 35.602 l 638.848 35.602 l 638.848 18 -l h -638.848 18 m S Q -0.301961 0.654902 0.301961 rg -18 68.897 m 26.801 68.897 l 26.801 60.096 l 18 60.096 l 18 68.897 l h -18 68.897 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -18 97.199 m 26.801 97.199 l 26.801 106 l 18 106 l 18 97.199 l h -18 97.199 m S Q -BT -9.6 0 0 9.6 35.6 60.0961 Tm -/f-0-0 1 Tf -[<16>-1<0c>1<17>-1<070b>1<04020a>-1<0b>1<0c09>-1<060a>-1<0208>-1<18>]TJ -ET -0.8 0.113725 0.113725 rg -18 51.295 m 26.801 51.295 l 26.801 42.495 l 18 42.495 l 18 51.295 l h -18 51.295 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -18 114.801 m 26.801 114.801 l 26.801 123.602 l 18 123.602 l 18 114.801 -l h -18 114.801 m S Q -BT -9.6 0 0 9.6 35.6 42.4961 Tm -/f-0-0 1 Tf -[<190a1a>-1<0503>]TJ -ET -0.654902 0.317647 1 rg -18 33.698 m 26.801 33.698 l 26.801 24.897 l 18 24.897 l 18 33.698 l h -18 33.698 m f* -0 g -q 1 0 0 -1 0 166.0961 cm -18 132.398 m 26.801 132.398 l 26.801 141.199 l 18 141.199 l 18 132.398 -l h -18 132.398 m S Q -BT -9.6 0 0 9.6 35.6 24.8961 Tm -/f-0-0 1 Tf -[<1b0b>1<09>-1<18>-1<060a>-1<0208>-1<18>]TJ -ET -Q -showpage -%%Trailer -count op_count sub {pop} repeat -countdictstack dict_count sub {end} repeat -cairo_eps_state restore -%%EOF diff --git a/docs/book/pics/note.png b/docs/book/pics/note.png deleted file mode 100644 index 7c1f3e2fa7..0000000000 Binary files a/docs/book/pics/note.png and /dev/null differ diff --git a/docs/book/pics/tasks_pseudo_after.png b/docs/book/pics/tasks_pseudo_after.png deleted file mode 100644 index b451479fb4..0000000000 Binary files a/docs/book/pics/tasks_pseudo_after.png and /dev/null differ diff --git a/docs/book/pics/tasks_pseudo_before.png b/docs/book/pics/tasks_pseudo_before.png deleted file mode 100644 index 3a8e3db100..0000000000 Binary files a/docs/book/pics/tasks_pseudo_before.png and /dev/null differ diff --git a/docs/book/pics/tip.png b/docs/book/pics/tip.png deleted file mode 100644 index f087c73b7a..0000000000 Binary files a/docs/book/pics/tip.png and /dev/null differ diff --git a/docs/book/pics/waf-64x64.png b/docs/book/pics/waf-64x64.png deleted file mode 100644 index cbe55f639e..0000000000 Binary files a/docs/book/pics/waf-64x64.png and /dev/null differ diff --git a/docs/book/pics/warning.png b/docs/book/pics/warning.png deleted file mode 100644 index d41edb9adb..0000000000 Binary files a/docs/book/pics/warning.png and /dev/null differ diff --git a/docs/book/posting.semd b/docs/book/posting.semd deleted file mode 100644 index 8268e50851..0000000000 Binary files a/docs/book/posting.semd and /dev/null differ diff --git a/docs/book/prodcons.semd b/docs/book/prodcons.semd deleted file mode 100644 index 9e663dc53d..0000000000 Binary files a/docs/book/prodcons.semd and /dev/null differ diff --git a/docs/book/scenarios.txt b/docs/book/scenarios.txt deleted file mode 100644 index adb134f01c..0000000000 --- a/docs/book/scenarios.txt +++ /dev/null @@ -1,702 +0,0 @@ -== Advanced scenarios - -This chapter demonstrates a few examples of the waf library for more complicated and less common scenarios. - -=== Project organization - -==== Building the compiler first[[build_compiler_first]] - -The example below demonstrates how to build a compiler which is used for building the remaining targets. The requirements are the following: - -. Create the compiler and all its intermediate tasks -. Re-use the compiler in a second build step -. The compiler will transform '.src' files into '.cpp' files, which will be processed too -. Call the compiler again if it was rebuilt (add the dependency on the compiler) - -The first thing to do is to write the expected user script: - -// scenarios_compiler -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(ctx): - ctx.load('g++') - ctx.load('src2cpp', tooldir='.') - -def build(ctx): - ctx.program( <1> - source = 'comp.cpp', - target = 'comp') - - ctx.add_group() <2> - - ctx.program( - source = 'main.cpp a.src', <3> - target = 'foo') ---------------- - -<1> Build the compiler first, it will result in a binary named _comp_ -<2> Add a new build group to make certain the compiler is complete before processing the next tasks -<3> The file 'a.src' is to be transformed by 'comp' into 'a.cpp' - -The code for the _src → cpp_ conversion will be the following: - -[source,python] ---------------- -from waflib.Task import Task -class src2cpp(Task): <1> - run_str = '${SRC[0].abspath()} ${SRC[1].abspath()} ${TGT}' - color = 'PINK' - -from waflib.TaskGen import extension - -@extension('.src') -def process_src(self, node): <2> - tg = self.bld.get_tgen_by_name('comp') <3> - comp = tg.link_task.outputs[0] - tsk = self.create_task('src2cpp', [comp, node], node.change_ext('.cpp')) <4> - self.source.extend(tsk.outputs) <5> ---------------- - -<1> Declare a new task class for processing the source file by our compiler -<2> Files of extension '.src' are to be processed by this method -<3> Obtain a reference on the task generator producing the compiler -<4> Create the task 'src → cpp', the compiler being as used as the first source file -<5> Add the generated 'cpp' file to be processed too - -The compilation results will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.006s) -Setting top to : /tmp/scenarios_compiler -Setting out to : /tmp/scenarios_compiler/build -Checking for program g++,c++ : /usr/bin/g++ -Checking for program ar : /usr/bin/ar -'configure' finished successfully (0.118s) -Waf: Entering directory `/tmp/scenarios_compiler/build' -[1/6] cxx: comp.cpp -> build/comp.cpp.1.o -01:06:00 runner ['/usr/bin/g++', '../comp.cpp', '-c', '-o', 'comp.cpp.1.o'] -[2/6] cxxprogram: build/comp.cpp.1.o -> build/comp <1> -01:06:00 runner ['/usr/bin/g++', 'comp.cpp.1.o', '-o', 'build/comp', '-Wl,-Bstatic', '-Wl,-Bdynamic'] -[3/6] cxx: main.cpp -> build/main.cpp.2.o -01:06:00 runner ['/usr/bin/g++', '../main.cpp', '-c', '-o', 'main.cpp.2.o'] -[4/6] src2cpp: build/comp a.src -> build/a.cpp -01:06:00 runner ['build/comp', 'scenarios_compiler/a.src', 'a.cpp'] <2> -[5/6] cxx: build/a.cpp -> build/a.cpp.2.o -01:06:00 runner ['/usr/bin/g++', 'a.cpp', '-c', '-o', 'a.cpp.2.o'] -[6/6] cxxprogram: build/main.cpp.2.o build/a.cpp.2.o -> build/foo <3> -01:06:00 runner ['/usr/bin/g++', 'main.cpp.2.o', 'a.cpp.2.o', '-o', 'build/foo', '-Wl,-Bstatic', '-Wl,-Bdynamic'] -Waf: Leaving directory `/tmp/scenarios_compiler/build' -'build' finished successfully (0.171s) ---------------- - -<1> Creation of the 'comp' program -<2> Use the compiler to generate 'a.cpp' -<3> Compile and link 'a.cpp' and 'main.cpp' into the program 'foo' - -NOTE: When `waf --targets=foo' is called, the task generator `comp' will create its tasks too (task generators from previous groups are processed). - -==== Providing arbitrary configuration files - -A file is copied into the build directory before the build starts. The build may use this file for building other targets. - -// scenarios_impfile -[source,python] ---------------- -cfg_file = 'somedir/foo.txt' - -def configure(conf): - - orig = conf.root.find_node('/etc/fstab') - txt = orig.read() <1> - - dest = conf.bldnode.make_node(cfg_file) - dest.parent.mkdir() <2> - dest.write(txt) <3> - - conf.env.append_value('cfg_files', dest.abspath()) <4> - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source=cfg_file, target='bar.txt') ---------------- - -<1> Read the file '/etc/fstab' -<2> Create the destination directory in case it does not already exist -<3> Create a new file in the build directory -<4> Mark the output as a configuration file so it can be used during the build - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf configure build -Setting top to : /tmp/scenarios_impfile -Setting out to : /tmp/scenarios_impfile/build -'configure' finished successfully (0.003s) -Waf: Entering directory `/tmp/scenarios_impfile/build' -[1/1] bar.txt: build/somedir/foo.txt -> build/bar.txt -Waf: Leaving directory `/tmp/scenarios_impfile/build' -'build' finished successfully (0.008s) - -$ tree -. -|-- build -| |-- bar.txt -| |-- c4che -| | |-- build.config.py -| | `-- _cache.py -| |-- config.log -| `-- somedir -| `-- foo.txt -`-- wscript ---------------- - - - -=== Mixing extensions and C/C++ features - -==== Files processed by a single task generator - -Now let's illustrate the @extension decorator on idl file processing. Files with .idl extension are processed to produce .c and .h files (`foo.idl` → `foo.c` + `foo.h`). The .c files must be compiled after being generated. - -First, here is the declaration expected in user scripts: - -// scenarios_idl -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - conf.load('g++') - -def build(bld): - bld.program( - source = 'foo.idl main.cpp', - target = 'myapp' - ) ---------------- - -The file +foo.idl+ is listed as a source. It will be processed to +foo.cpp+ and compiled and linked with +main.cpp+ - -Here is the code to support this scenario: - -[source,python] ---------------- -from waflib.Task import Task -from waflib.TaskGen import extension - -class idl(Task): - run_str = 'cp ${SRC} ${TGT[0].abspath()} && touch ${TGT[1].abspath()}' <1> - color = 'BLUE' - ext_out = ['.h'] <2> - -@extension('.idl') -def process_idl(self, node): - cpp_node = node.change_ext('.cpp') - hpp_node = node.change_ext('.hpp') - self.create_task('idl', node, [cpp_node, hpp_node]) <3> - self.source.append(cpp_node) <4> ---------------- - -<1> Dummy command for demonstration purposes. In practice the rule to use would be like _omniidl -bcxx $\{SRC} -C$\{TGT}_ -<2> Because the idl task produces headers, it must be executed before any other +cpp+ file is compiled -<3> Create the task from the '.idl' extension. -<4> Reinject the file to compile by the C++ compiler - -The execution results will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.002s) -Setting top to : /tmp/scenarios_idl -Setting out to : /tmp/scenarios_idl/build -Checking for program g++,c++ : /usr/bin/g++ -Checking for program ar : /usr/bin/ar -'configure' finished successfully (0.072s) -Waf: Entering directory `/tmp/scenarios_idl/build' -[1/4] idl: foo.idl -> build/foo.cpp build/foo.hpp -19:47:11 runner 'cp ../foo.idl foo.cpp && touch foo.hpp' -[2/4] cxx: main.cpp -> build/main.cpp.0.o -19:47:11 runner ['/usr/bin/g++', '-I.', '-I..', '../main.cpp', '-c', '-o', 'main.cpp.0.o'] -[3/4] cxx: build/foo.cpp -> build/foo.cpp.0.o -19:47:11 runner ['/usr/bin/g++', '-I.', '-I..', 'foo.cpp', '-c', '-o', 'foo.cpp.0.o'] -[4/4] cxxprogram: build/main.cpp.0.o build/foo.cpp.0.o -> build/myapp -19:47:11 runner ['/usr/bin/g++', 'main.cpp.0.o', 'foo.cpp.0.o', '-o', 'myapp'] -Waf: Leaving directory `/tmp/scenarios_idl/build' -'build' finished successfully (0.149s) ---------------- - -NOTE: The drawback of this declaration is that the source files produced by the idl transformation can be used by only one task generator. - -==== Resources shared by several task generators - -Let's suppose now that the idl outputs will be shared by several task generators. We will first start by writing the expected user script: - -// scenarios_idl2 -[source,python] ---------------- -top = '.' -out = 'out' - -def configure(ctx): - ctx.load('g++') - -def build(ctx): - ctx( <1> - source = 'notify.idl', - name = 'idl_gen') - - ctx.program( <2> - source = ['main.cpp'], - target = 'testprog', - includes = '.', - add_idl = 'idl_gen') <3> ---------------- - -<1> Process an idl file in a first task generator. Name this task generator 'idl_gen' -<2> Somewhere else (maybe in another script), another task generator will use the source generated by the idl processing -<3> Reference the idl processing task generator by the name 'idl_gen'. - -The code to support this scenario will be the following: - -[source,python] ---------------- -from waflib.Task import Task -from waflib.TaskGen import feature, before_method, extension - -class idl(Task): - run_str = 'cp ${SRC} ${TGT[0].abspath()} && touch ${TGT[1].abspath()}' - color = 'BLUE' - ext_out = ['.h'] <1> - -@extension('.idl') -def process_idl(self, node): - cpp_node = node.change_ext('.cpp') - hpp_node = node.change_ext('.hpp') - self.create_task('idl', node, [cpp_node, hpp_node]) - self.more_source = [cpp_node] <2> - -@feature('*') -@before_method('process_source') <3> -def process_add_source(self): - for x in self.to_list(getattr(self, 'add_idl', [])): <4> - y = self.bld.get_tgen_by_name(x) - y.post() <5> - if getattr(y, 'more_source', None): - self.source.extend(y.more_source) <6> ---------------- - -<1> The idl processing must be performed before any C++ task is executed -<2> Bind the output file to a new attribute -<3> Add the source from another task generator object -<4> Process _add_idl_, finding the other task generator -<5> Ensure that the other task generator has created its tasks -<6> Update the source list - -The task execution output will be very similar to the output from the first example: - -[source,shishell] ---------------- -$ waf distclean configure build -v -'distclean' finished successfully (0.007s) -Setting top to : /tmp/scenarios_idl2 -Setting out to : /tmp/scenarios_idl2/build -Checking for program g++,c++ : /usr/bin/g++ -Checking for program ar : /usr/bin/ar -'configure' finished successfully (0.080s) -Waf: Entering directory `/tmp/scenarios_idl2/build' -[1/4] idl: foo.idl -> build/foo.cpp build/foo.hpp -20:20:24 runner 'cp ../foo.idl foo.cpp && touch foo.hpp' -[2/4] cxx: main.cpp -> build/main.cpp.1.o -20:20:24 runner ['/usr/bin/g++', '-I.', '-I..', '../main.cpp', '-c', '-o', 'main.cpp.1.o'] -[3/4] cxx: build/foo.cpp -> build/foo.cpp.1.o -20:20:24 runner ['/usr/bin/g++', '-I.', '-I..', 'foo.cpp', '-c', '-o', 'foo.cpp.1.o'] -[4/4] cxxprogram: build/main.cpp.1.o build/foo.cpp.1.o -> build/testprog -20:20:24 runner ['/usr/bin/g++', 'main.cpp.1.o', 'foo.cpp.1.o', '-o', 'testprog'] -Waf: Leaving directory `/tmp/scenarios_idl2/build' -'build' finished successfully (0.130s) ---------------- - -=== Task generator methods - -==== Replacing particular attributes - -In general, task generator attributes are not replaced, so the following is not going to be compile +main.c+: - -[source,python] ---------------- -bld.env.FOO = '/usr/includes' -bld.env.MAIN = 'main.c' -bld( - features = 'c cprogram', - source = '${MAIN}', - target = 'app', - includes = '. ${FOO}') ---------------- - -This design decision is motivated by two main reasons: - -. Processing the attributes has a negative performance impact -. For consistency all attributes would have to be processed - -Nevertheless, it is we will demonstrate how to provide Waf with a method to process some attributes. To add a new task generator method, it is necessary to think about its integration with other methods: is there a particular order? The answer is yes, for example, the source attribute is used to create the compilation tasks. To display what methods are in use, execute Waf with the following logging key: - -[source,shishell] ---------------- -$ waf --zones=task_gen -... -19:20:51 task_gen posting task_gen 'app' declared in 'scenarios_expansion' <1> -19:20:51 task_gen -> process_rule (9232720) <2> -19:20:51 task_gen -> process_source (9232720) -19:20:51 task_gen -> apply_link (9232720) -19:20:51 task_gen -> apply_objdeps (9232720) -19:20:51 task_gen -> process_use (9232720) -19:20:51 task_gen -> propagate_uselib_vars (9232720) -19:20:51 task_gen -> apply_incpaths (9232720) -19:20:51 task_gen posted app ---------------- - -<1> Task generator execution -<2> Method name and task generator id in parentheses - -From the method list, we find that *process_rule* and *process_source* are processing the _source_ attribute. The _includes_ attribute is processed by *apply_incpaths*. - -// scenarios_expansion -[source,python] ---------------- -from waflib import Utils, TaskGen -@TaskGen.feature('*') <1> -@TaskGen.before('process_source', 'process_rule', 'apply_incpaths') <2> -def transform_strings(self): - for x in 'includes source'.split(): <3> - val = getattr(self, x, None) - if val: - if isinstance(val, str): - setattr(self, x, Utils.subst_vars(val, self.env)) <4> - elif isinstance(val, list): - for i in range(len(val)): - if isinstance(val[i], str): - val[i] = Utils.subst_vars(val[i], self.env) ---------------- - -<1> Execute this method in all task generators -<2> Methods to take into account -<3> Iterate over all interesting attributes -<4> Substitute the attributes - -==== Inserting special include flags - -A scenario that appears from times to times in C/C++ projects is the need to insert specific flags before others, regardless of how flags are usually processed. We will now consider the following case: execute all C++ compilations with the flag `-I.' in first position (before any other include). - -First, a look at the definition of the C++ compilation rule shows that the variable 'INCPATHS' contains the include flags: - -[source,python] ---------------- -class cxx(Task.Task): - color = 'GREEN' - run_str = '${CXX} ${CXXFLAGS} ${CPPPATH_ST:INCPATHS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' - vars = ['CXXDEPS'] - ext_in = ['.h'] - scan = c_preproc.scan ---------------- - -Those include flags are set by the method 'apply_incpaths'. The trick is then to modify 'INCPATHS' after that method has been executed: - -// scenarios_incflags -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - conf.load('g++') - -def build(bld): - bld.program(features='cxx cxxprogram', source='main.cpp', target='test') - -from waflib.TaskGen import after, feature - -@feature('cxx') -@after_method('apply_incpaths') -def insert_blddir(self): - self.env.prepend_value('INCPATHS', '.') ---------------- - -A related case is how to add the top-level directory containing a configuration header: - -[source,python] ---------------- -@feature('cxx') -@after_method('apply_incpaths', 'insert_blddir') -def insert_srcdir(self): - path = self.bld.srcnode.abspath() - self.env.prepend_value('INCPATHS', path) ---------------- - - -=== Custom tasks - -==== Force the compilation of a particular task - -In some applications, it may be interesting to keep track of the date and time of the last build. In C this may be done by using the macros `__DATE__' and `__TIME__', for example, the following +about.c+ file will contain: - -[source,c] ---------------- -void ping() { - printf("Project compiled: %s %s\n", __DATE__, __TIME__); -} ---------------- - -The files are only compiled when they change though, so it is necessary to find a way to force the +about.c+ recompilation. To sum up, the compilation should be performed whenever: - -. One of the c files of the project is compiled -. The link flags for any task change -. The link task including the object for our macro is removed - -To illustrate this behaviour, we will now set up a project will use various c files: - -// scenarios_end -[source,python] ---------------- -def options(opt): - opt.load('compiler_c') - -def configure(conf): - conf.load('compiler_c') - -def build(bld): - bld.program( - source = 'main.c about.c', - target = 'app', - includes = '.', - use = 'my_static_lib') - - bld.stlib( - source = 'test_staticlib.c', - target = 'my_static_lib') ---------------- - -The main file will just call the function _ping_ defined +about.c+ to display the date and time: - -[source,c] ---------------- -#include "a.h" - -int main() { - ping(); - return 0; -} ---------------- - -The task method _runnable_status_ must be overridden to take into account the dependencies: - -[source,python] ---------------- -import os -from waflib import Task -def runnable_status(self): - if self.inputs[0].name == 'about.c': <1> - h = 0 <2> - for g in self.generator.bld.groups: - for tg in g: - if isinstance(tg, TaskBase): - continue <3> - - h = hash((self.generator.bld.hash_env_vars(self.generator.env, ['LINKFLAGS']), h)) - for tsk in getattr(tg, 'compiled_tasks', []): # all .c or .cpp compilations - if id(tsk) == id(self): - continue - if not tsk.hasrun: - return Task.ASK_LATER - h = hash((tsk.signature(), h)) <4> - self.env.CCDEPS = h - - try: - os.stat(self.generator.link_task.outputs[0].abspath()) <5> - except: - return Task.RUN_ME - - return Task.Task.runnable_status(self) <6> - -from waflib.Tools.c import c <7> -c.runnable_status = runnable_status ---------------- - -<1> If the task processes +about.c+ -<2> Define a hash value that the task will depend on (CCDEPS) -<3> Iterate over all task generators of the project -<4> Hash the link flags and the signatures of all other compilation tasks -<5> Make sure to execute the task if it was never executed before -<6> Normal behaviour -<7> Modify the 'c' task class - -The execution will produce the following output: - -[source,shishell] ---------------- -$ waf -Waf: Entering directory `/tmp/scenarios_end/build' -[2/5] c: test_staticlib.c -> build/test_staticlib.c.1.o -[3/5] cstlib: build/test_staticlib.c.1.o -> build/libmy_static_lib.a -[4/5] c: about.c -> build/about.c.0.o -[5/5] cprogram: build/main.c.0.o build/about.c.0.o -> build/app -Waf: Leaving directory `/tmp/scenarios_end/build' <1> -'build' finished successfully (0.088s) - -$ ./build/app -Project compiled: Jul 25 2010 14:05:30 - -$ echo " " >> main.c <2> - -$ waf -Waf: Entering directory `/tmp/scenarios_end/build' -[1/5] c: main.c -> build/main.c.0.o -[4/5] c: about.c -> build/about.c.0.o <3> -[5/5] cprogram: build/main.c.0.o build/about.c.0.o -> build/app -Waf: Leaving directory `/tmp/scenarios_end/build' -'build' finished successfully (0.101s) - -$ ./build/app -Project compiled: Jul 25 2010 14:05:49 ---------------- - -<1> All files are compiled on the first build -<2> The file +main.c+ is modified -<3> The build generates +about.c+ again to update the build time string - -==== A compiler producing source files with names unknown in advance - -The requirements for this problem are the following: - -. A compiler *creates source files* (one .src file -> several .c files) -. The source file names to create are *known only when the compiler is executed* -. The compiler is slow so it should run *only when absolutely necessary* -. Other tasks will *depend on the generated files* (compile and link the .c files into a program) - -To do this, the information on the source files must be shared between the build executions. - -// scenarios_unknown -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - conf.load('gcc') - conf.load('mytool', tooldir='.') - -def build(bld): - bld.env.COMP = bld.path.find_resource('evil_comp.py').abspath() <1> - bld.stlib(source='x.c foo.src', target='astaticlib') <2> ---------------- - -<1> Compiler path -<2> An example, having a _.src_ file - -The contents of _mytool_ will be the following: - -[source,python] ---------------- -import os -from waflib import Task, Utils, Context -from waflib.Utils import subprocess -from waflib.TaskGen import extension - -@extension('.src') -def process_shpip(self, node): <1> - self.create_task('src2c', node) - -class src2c(Task.Task): - color = 'PINK' - quiet = True <2> - ext_out = ['.h'] <3> - - def run(self): - cmd = '%s %s' % (self.env.COMP, self.inputs[0].abspath()) - n = self.inputs[0].parent.get_bld() - n.mkdir() - cwd = n.abspath() - out = self.generator.bld.cmd_and_log(cmd, cwd=cwd, quiet=Context.STDOUT) <4> - - out = Utils.to_list(out) - self.outputs = [self.generator.path.find_or_declare(x) for x in out] - self.generator.bld.raw_deps[self.uid()] = [self.signature()] + self.outputs <5> - self.add_c_tasks(self.outputs) <6> - - def add_c_tasks(self, lst): - self.more_tasks = [] - for node in lst: - if node.name.endswith('.h'): - continue - tsk = self.generator.create_compiled_task('c', node) - self.more_tasks.append(tsk) <7> - - tsk.env.append_value('INCPATHS', [node.parent.abspath()]) - - if getattr(self.generator, 'link_task', None): <8> - self.generator.link_task.set_run_after(tsk) - self.generator.link_task.inputs.append(tsk.outputs[0]) - self.generator.link_task.inputs.sort(key=lambda x: x.abspath()) - - def runnable_status(self): - ret = super(src2c, self).runnable_status() - if ret == Task.SKIP_ME: - - lst = self.generator.bld.raw_deps[self.uid()] - if lst[0] != self.signature(): - return Task.RUN_ME - - nodes = lst[1:] - for x in nodes: - try: - os.stat(x.abspath()) - except: - return Task.RUN_ME - - nodes = lst[1:] - self.set_outputs(nodes) - self.add_c_tasks(nodes) <9> - - return ret ---------------- - -<1> The processing will be delegated to the task -<2> Disable the warnings raised when a task has no outputs -<3> Make certain the processing will be executed before any task using _.h_ files -<4> When the task is executed, collect the process stdout which contains the generated file names -<5> Store the output file nodes in a persistent cache -<6> Create the tasks to compile the outputs -<7> The c tasks will be processed after the current task is done. This does not mean that the c tasks will always be executed. -<8> If the task generator of the _src_ file has a link task, set the build order -<9> When this task can be skipped, force the dynamic c task creation - -The output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build build -'distclean' finished successfully (0.006s) -Setting top to : /tmp/scenarios_unknown -Setting out to : /tmp/scenarios_unknown/build -Checking for program gcc,cc : /usr/bin/gcc -Checking for program ar : /usr/bin/ar -'configure' finished successfully (0.115s) -Waf: Entering directory `/tmp/scenarios_unknown/build' -[1/3] src2c: foo.src -[2/5] c: build/shpip/a12.c -> build/shpip/a12.c.0.o -[3/5] c: build/shpop/a13.c -> build/shpop/a13.c.0.o -[4/5] c: x.c -> build/x.c.0.o -[5/5] cstlib: build/x.c.0.o build/shpip/a12.c.0.o build/shpop/a13.c.0.o -> build/libastaticlib.a -Waf: Leaving directory `/tmp/scenarios_unknown/build' -'build' finished successfully (0.188s) -Waf: Entering directory `/tmp/scenarios_unknown/build' -Waf: Leaving directory `/tmp/scenarios_unknown/build' -'build' finished successfully (0.013s) ---------------- - diff --git a/docs/book/shishell.lang b/docs/book/shishell.lang deleted file mode 100644 index b9d24ce9a5..0000000000 --- a/docs/book/shishell.lang +++ /dev/null @@ -1,3 +0,0 @@ - -optionalargument = '^(\$|#)(.*?)$' - diff --git a/docs/book/source.semd b/docs/book/source.semd deleted file mode 100644 index c1a6c43f50..0000000000 Binary files a/docs/book/source.semd and /dev/null differ diff --git a/docs/book/symbols.lang b/docs/book/symbols.lang deleted file mode 100644 index 354fb04732..0000000000 --- a/docs/book/symbols.lang +++ /dev/null @@ -1,4 +0,0 @@ -co_symbol = "<1>","<2>","<3>","<4>","<5>","<6>","<7>","<8>", - "<9>","<10>","<11>","<12>","<13>","<14>","<15>" -symbol = "~","!","%","^","*","(",")","-","+","=","[", - "]","\\",":",";",",",".","/","?","&","<",">","\|" diff --git a/docs/book/task_generators.txt b/docs/book/task_generators.txt deleted file mode 100644 index 2f6372c840..0000000000 --- a/docs/book/task_generators.txt +++ /dev/null @@ -1,339 +0,0 @@ -=== General purpose task generators - -So far, various task generators uses have been demonstrated. This chapter provides a detailed description of task generator structure and usage. - -==== Task generator definition - -The chapter on make-like rules illustrated how the attribute 'rule' is processed. Then the chapter on name and extension-based file processing illustrated how the attribute 'source' is processed (in the absence of the rule attribute). To process 'any attribute', the following properties should hold: - -. Attributes should be processed only when the task generator is set to generate the tasks (lazy processing) -. There is no list of authorized attributes (task generators may be extended by user scripts) -. Attribute processing should be controlable on a task generator instance basis (special rules for particular task generators) -. The extensions should be split into independent files (low coupling between the Waf tools) - -Implementing such a system is a difficult problem which lead to the creation of very different designs: - -. _A hierarchy of task generator subclasses_ It was abandoned due to the high coupling between the Waf tools: the C tools required knowledge from the D tool for building hybrid applications -. _Method decoration (creating linked lists of method calls)_ Replacing or disabling a method safely was no longer possible (addition-only), so this system disappeared quickly -. _Flat method and execution constraint declaration_ The concept is close to aspect-oriented programming and might scare programmers. - -So far, the third design proved to be the most flexible and was kept. Here is how to define a task generator method: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - v = bld(myattr='Hello, world!') - v.myattr = 'Hello, world!' <1> - v.myMethod() <2> - -from waflib import TaskGen - -@TaskGen.taskgen_method <3> -def myMethod(tgen): <4> - print(getattr(self, 'myattr', None)) <5> ---------------- - -<1> Attributes may be set by arguments or by accessing the object. It is set two times in this example. -<2> Call the task generator method explicitly -<3> Use a python decorator -<4> Task generator methods have a unique argument representing the current instance -<5> Process the attribute 'myattr' when present (the case in the example) - -The output from the build will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/simpleproject/build' -hello world -Waf: Leaving directory `/tmp/simpleproject/build' -'build' finished successfully (0.003s) ---------------- - -NOTE: The method could be bound by using 'setattr' directly, like for binding any new method on a python class. - -==== Executing the method during the build - -So far, the task generator methods defined are only executed through explicit calls. Another decorator is necessary to have a task generator executed during the build phase automatically. Here is the updated example: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(myattr='Hello, world!') - -from waflib import TaskGen - -@TaskGen.taskgen_method <1> -@TaskGen.feature('*') <2> -def methodName(self): - print(getattr(self, 'myattr', None)) ---------------- - -<1> Bind a method to the task generator class (redundant when other methods such as 'TaskGen.feature' are used) -<2> Bind the method to the symbol 'myfeature' - -The execution results will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build --zones=task_gen <1> -'distclean' finished successfully (0.004s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/simpleproject/build' -23:03:44 task_gen posting objects (normal) -23:03:44 task_gen posting >task_gen '' of type task_gen defined in dir:///tmp/simpleproject> 139657958706768 <2> -23:03:44 task_gen -> exec_rule (139657958706768) <3> -23:03:44 task_gen -> process_source (139657958706768) <4> -23:03:44 task_gen -> methodName (139657958706768) <5> -Hello, world! -23:03:44 task_gen posted <6> -Waf: Leaving directory `/tmp/simpleproject/build' -23:03:44 task_gen posting objects (normal) -'build' finished successfully (0.004s) ---------------- - -<1> The debugging zone 'task_gen' is used to display the task generator methods being executed -<2> Display which task generator is being executed -<3> The method 'exec_rule' is used to process the 'rule'. It is always executed. -<4> The method 'process_source' is used to process the 'source' attribute. It is always executed exept if the method 'exec_rule' processes a 'rule' attribute -<5> Our task generator method is executed, and prints 'Hello, world!' -<6> The task generator methods have been executed, the task generator is marked as done (posted) - -==== Task generator features - -So far, the task generator methods we added were declared to be executed by all task generator instances. Limiting the execution to specific task generators requires the use of the 'feature' decorator: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(features='ping') - bld(features='ping pong') - -from waflib import TaskGen - -@TaskGen.feature('ping') -def ping(self): - print('ping') - -@TaskGen.feature('pong') -def pong(self): - print('pong') ---------------- - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build --zones=task_gen -'distclean' finished successfully (0.003s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/simpleproject/build' -16:22:07 task_gen posting objects (normal) -16:22:07 task_gen posting 140631018237584 -16:22:07 task_gen -> exec_rule (140631018237584) -16:22:07 task_gen -> process_source (140631018237584) -16:22:07 task_gen -> ping (140631018237584) -ping -16:22:07 task_gen posted -16:22:07 task_gen posting 140631018237776 -16:22:07 task_gen -> exec_rule (140631018237776) -16:22:07 task_gen -> process_source (140631018237776) -16:22:07 task_gen -> pong (140631018237776) -pong -16:22:07 task_gen -> ping (140631018237776) -ping -16:22:07 task_gen posted -Waf: Leaving directory `/tmp/simpleproject/build' -16:22:07 task_gen posting objects (normal) -'build' finished successfully (0.005s) ---------------- - -WARNING: Although the task generator instances are processed in order, the task generator method execution requires a specific declaration for the order of execution. Here, the method 'pong' is executed before the method 'ping' - -==== Task generator method execution order - -To control the execution order, two new decorators need to be added. We will now show a new example with two custom task generator methods 'method1' and 'method2', executed in that order: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(myattr='Hello, world!') - -from waflib import TaskGen - -@TaskGen.feature('*') -@TaskGen.before('process_source', 'exec_rule') -def method1(self): - print('method 1 %r' % getattr(self, 'myattr', None)) - -@TaskGen.feature('*') -@TaskGen.before('process_source') -@TaskGen.after('method1') -def method2(self): - print('method 2 %r' % getattr(self, 'myattr', None)) ---------------- - -The execution output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build --zones=task_gen -'distclean' finished successfully (0.003s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/simpleproject/build' -15:54:02 task_gen posting objects (normal) -15:54:02 task_gen posting 139808568487632 -15:54:02 task_gen -> method1 (139808568487632) -method 1 'Hello, world!' -15:54:02 task_gen -> exec_rule (139808568487632) -15:54:02 task_gen -> method2 (139808568487632) -method 2 'Hello, world!' -15:54:02 task_gen -> process_source (139808568487632) -15:54:02 task_gen posted -Waf: Leaving directory `/tmp/simpleproject/build' -15:54:02 task_gen posting objects (normal) -'build' finished successfully (0.005s) ---------------- - -==== Adding or removing a method for execution - -The order constraints on the methods (after/before), are used to sort the list of methods in the attribute 'meths'. The sorting is performed once, and the list is consumed as methods are executed. Though no new feature may be added once the first method is executed, new methods may be added dynamically in self.meths. Here is how to create an infinite loop by adding the same method at the end: - -[source,python] ---------------- -from waflib.TaskGen import feature - -@feature('*') -def infinite_loop(self): - self.meths.append('infinite_loop') ---------------- - -Likewise, methods may be removed from the list of methods to execute: - -[source,python] ---------------- -from waflib.TaskGen import feature - -@feature('*') -@before_method('process_source') -def remove_process_source(self): - self.meths.remove('process_source') ---------------- - -The task generator method workflow is represented in the following illustration: - -image::posting{PIC}["Task generator workflow"{backend@docbook:,width=520:},align="center"] - -==== Expressing abstract dependencies between task generators - -We will now illustrate how task generator methods can be used to express abstract dependencies between task generator objects. Here is a new project file located under '/tmp/targets/': - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(rule='echo A', always=True, name='A') - bld(rule='echo B', always=True, name='B') ---------------- - -By executing 'waf --targets=B', only the task generator 'B' will create its tasks, and the output will be the following: - -[source,shishell] ---------------- -$ waf distclean configure build --targets=B -'distclean' finished successfully (0.000s) -'configure' finished successfully (0.042s) -Waf: Entering directory `/tmp/targets/build' -[1/1] B: -B -Waf: Leaving directory `/tmp/targets/build' -'build' finished successfully (0.032s) ---------------- - -Here is a way to ensure that the task generator 'A' has created its tasks when 'B' does: - -[source,python] ---------------- -top = '.' -out = 'build' - -def configure(conf): - pass - -def build(bld): - bld(rule='echo A', always=True, name='A') - bld(rule='echo B', always=True, name='B', depends_on='A') - -from waflib.TaskGen import feature, before_method -@feature('*') <1> -@before_method('process_rule') -def post_the_other(self): - deps = getattr(self, 'depends_on', []) <2> - for name in self.to_list(deps): - other = self.bld.get_tgen_by_name(name) <3> - print('other task generator tasks (before) %s' % other.tasks) - other.post() <4> - print('other task generator tasks (after) %s' % other.tasks) ---------------- - -<1> This method will be executed for all task generators, before the attribute `rule` is processed -<2> Try to process the attribute `depends_on`, if present -<3> Obtain the task generator by name, and for the same variant -<4> Force the other task generator to create its tasks - -The output will be: - -[source,shishell] ---------------- -$ waf distclean configure build --targets=B -'distclean' finished successfully (0.001s) -'configure' finished successfully (0.001s) -Waf: Entering directory `/tmp/targets/build' -other task generator tasks (before) [] <1> -other task generator tasks (after) [ <2> - {task: A -> }] -[1/2] B: -B -[2/2] A: <3> -A -Waf: Leaving directory `/tmp/targets/build' -'build' finished successfully (0.014s) ---------------- - -<1> The other task generator has not created any task yet -<2> A task generator creates all its tasks by calling its method `post()` -<3> Although `--targets=B` was requested, the task from target 'A' was created and executed too - -In practice, the dependencies will often re-use the task objects created by the other task generator: node, configuration set, etc. This is used by the uselib system (see the next chapter on c/c++ builds). - diff --git a/docs/book/task_run.semd b/docs/book/task_run.semd deleted file mode 100644 index c94580cbed..0000000000 Binary files a/docs/book/task_run.semd and /dev/null differ diff --git a/docs/book/task_signature.semd b/docs/book/task_signature.semd deleted file mode 100644 index 3ae13c091b..0000000000 Binary files a/docs/book/task_signature.semd and /dev/null differ diff --git a/docs/book/tasks.txt b/docs/book/tasks.txt deleted file mode 100644 index a50be86a87..0000000000 --- a/docs/book/tasks.txt +++ /dev/null @@ -1,583 +0,0 @@ - -== Task processing - -This chapter provides a description of the task classes which are used during the build phase. - -=== Task execution - -==== Main actors - -The build context is only used to create the tasks and to return lists of tasks that may be executed in parallel. The scheduling is delegated to a task producer which lets task consumers to execute the tasks. The task producer keeps a record of the build state such as the amount of tasks processed or the errors. - -image::tasks_actors{PIC}["Actors processing the tasks"{backend@docbook:,width=250:},align="center"] - -// To reduce the build time, it is interesting to take advantage of the hardware (multiple cpu cores) or of the environment (distributed builds). -The amount of consumers is determined from the number of processors, or may be set manually by using the '-j' option: - -[source,shishell] ------------------- -$ waf -j3 ------------------- - -==== Build groups - -The task producer iterates over lists of tasks returned by the build context. Although the tasks from a list may be executed in parallel by the consumer threads, all the tasks from one list must be consumed before processing another list of tasks. The build ends when there are no more tasks to process. - -These lists of tasks are called _build groups_ and may be accessed from the build scripts. Let's demonstrate this behaviour on an example: - -// tasks_groups -[source,python] ---------------- -def build(ctx): - for i in range(8): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_a_%d' % i, - color='YELLOW', name='tasks a') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_a_%d' % i, target='wscript_b_%d' % i, - color='GREEN', name='tasks b') - for i in range(8) - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_c_%d' % i, - color='BLUE', name='tasks c') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_c_%d' % i, target='wscript_d_%d' % i, - color='PINK', name='tasks d') ---------------- - -Each green task must be executed after one yellow task and each pink task must be executed after one blue task. Because there is only one group by default, the parallel execution will be similar to the following: - -image::tasks_nogroup{PIC}["One build group"{backend@docbook:,width=440:},align="center"] - -We will now modify the example to add one more build group. - -[source,python] ---------------- -def build(ctx): - for i in range(8): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_a_%d' % i, - color='YELLOW', name='tasks a') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_a_%d' % i, target='wscript_b_%d' % i, - color='GREEN', name='tasks b') - ctx.add_group() - for i in range(8): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_c_%d' % i, - color='BLUE', name='tasks c') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_c_%d' % i, target='wscript_d_%d' % i, - color='PINK', name='tasks d') ---------------- - -Now a separator will appear between the group of yellow and green tasks and the group of blue and violet taks: - -image::tasks_twogroups{PIC}["Two build groups"{backend@docbook:,width=440:},align="center"] - -The tasks and tasks generator are added implicitely to the current group. By giving a name to the groups, it is easy to control what goes where: - -// tasks_groups2 -[source,python] ---------------- -def build(ctx): - - ctx.add_group('group1') - ctx.add_group('group2') - - for i in range(8): - ctx.set_group('group1') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_a_%d' % i, - color='YELLOW', name='tasks a') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_a_%d' % i, target='wscript_b_%d' % i, - color='GREEN', name='tasks b') - - ctx.set_group('group2') - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='wscript_c_%d' % i, - color='BLUE', name='tasks c') - ctx(rule='cp ${SRC} ${TGT}', source='wscript_c_%d' % i, target='wscript_d_%d' % i, - color='PINK', name='tasks d') ---------------- - -In the previous examples, all task generators from all build groups are processed before the build -actually starts. This default is provided to ensure that the task count is as accurate as possible. -Here is how to tune the build groups: - -[source,python] ---------------- -def build(ctx): - from waflib.Build import POST_LAZY, POST_AT_ONCE - ctx.post_mode = POST_AT_ONCE <1> - #ctx.post_mode = POST_LAZY <2> ---------------- - -<1> All task generators create their tasks before the build starts (default behaviour) -<2> Groups are processed sequentially: all tasks from previous groups are executed before the task generators from the next group are processed - -Build groups can be used for <> to process. - -==== The Producer-consumer system - -In most python interpreters, a global interpreter lock prevents parallelization by more than one cpu core at a time. Therefore, it makes sense to restrict the task scheduling on a single task producer, and to let the threads access only the task execution. - -The communication between producer and consumers is based on two queues _ready_ and _out_. The producer adds the tasks to _ready_ and reads them back from _out_. The consumers obtain the tasks from _ready_ and give them back to the producer into _out_ after executing 'task.run'. - -The producer uses the an internal list named _outstanding_ to iterate over the tasks and to decide which ones to put in the queue _ready_. The tasks that cannot be processed are temporarily output in the list _frozen_ to avoid looping endlessly over the tasks waiting for others. - -The following illustrates the relationship between the task producers and consumers as performed during the build: - -image::prodcons{PIC}["Parallel execution"{backend@docbook:,width=900:},align="center"] - -==== Task states and status - -A state is assigned to each task (_task.hasrun = state_) to keep track of the execution. The possible values are the following: - -[options="header", cols="1,1,6"] -|================= -|State | Numeric value | Description -|NOT_RUN | 0 | The task has not been processed yet -|MISSING | 1 | The task outputs are missing -|CRASHED | 2 | The task method 'run' returned a non-0 value -|EXCEPTION| 3 | An exception occured in the Task method 'run' -|SKIPPED | 8 | The task was skipped (it was up-to-date) -|SUCCESS | 9 | The execution was successful -|================= - -To decide to execute a task or not, the producer uses the value returned by the task method 'runnable_status'. The possible return values are the following: - -[options="header", cols="1,6"] -|================= -|Code | Description -| ASK_LATER | The task may depend on other tasks which have not finished to run (not ready) -| SKIP_ME | The task does not have to be executed, it is up-to-date -| RUN_ME | The task is ready to be executed -|================= - -The following diagram represents the interaction between the main task methods and the states and status: - -image::task_run{PIC}["Task states"{backend@docbook:,width=610:},align="center"] - -=== Build order constraints - -==== The method set_run_after - -The method _set_run_after_ is used to declare ordering constraints between tasks: - -[source,python] ---------------- -task1.set_run_after(task2) ---------------- - -The tasks to wait for are stored in the attribute _run_after_. They are used by the method _runnable_status_ to yield the status 'ASK_LATER' when a task has not run yet. This is merely for the build order and not for forcing a rebuild if one of the previous tasks is executed. - -==== Computed constraints - -===== Attribute after/before - -The attributes _before_ and _after_ are used to declare ordering constraints between tasks: - -[source,python] ---------------- -from waflib.Task import TaskBase -class task_test_a(TaskBase): - before = ['task_test_b'] -class task_test_b(TaskBase): - after = ['task_test_a'] ---------------- - -===== ext_in/ext_out - -Another way to force the order is by declaring lists of abstract symbols on the class attributes. This way the classes are not named explicitly, for example: - -[source,python] ---------------- -from waflib.Task import TaskBase -class task_test_a(TaskBase): - ext_in = ['.h'] -class task_test_b(TaskBase): - ext_out = ['.h'] ---------------- - -The 'extensions' ext_in and ext_out do not mean that the tasks have to produce files with such extensions, but are mere symbols for use as precedence constraints. - -===== Order extraction - -Before feeding the tasks to the producer-consumer system, a constraint extraction is performed on the tasks having input and output files. The attributes _run_after_ are initialized with the tasks to wait for. - -The two functions called on lists of tasks are: - -. _waflib.Task.set_precedence_constraints_: extract the build order from the task classes attributes ext_in/ext_out/before/after -. _waflib.Task.set_file_constraints_: extract the constraints from the tasks having input and output files - -==== Weak order constraints - -Tasks that are known to take a lot of time may be launched first to improve the build times. The general problem of finding an optimal order for launching tasks in parallel and with constraints is called http://en.wikipedia.org/wiki/Job-shop_problem[Job Shop]. In practice this problem can often be reduced to a critical path problem (approximation). - -The following pictures illustrate the difference in scheduling a build with different independent tasks, in which a slow task is clearly identified, and launched first: - -[source,python] ---------------- -def build(ctx): - for x in range(5): - ctx(rule='sleep 1', color='GREEN', name='short task') - ctx(rule='sleep 5', color='RED', name='long task') ---------------- - -image::tasks_nosort{PIC}["No particular order"{backend@docbook:,width=440:},align="center"] - -A function is used to reorder the tasks from a group before they are passed to the producer. We will replace it to reorder the long task in first position: - -// tasks_weak -[source,python] ---------------- -from waflib import Task -old = Task.set_file_constraints -def meth(lst): - lst.sort(cmp=lambda x, y: cmp(x.__class__.__name__, y.__class__.__name__)) <1> - old(lst) <2> -Task.set_file_constraints = meth <3> ---------------- - -<1> Set the long task in first position -<2> Execute the original code -<3> Replace the method - -Here is a representation of the effect: - -image::tasks_sort{PIC}["Slowest task first"{backend@docbook:,width=440:},align="center"] - -=== Dependencies - -==== Task signatures - -The direct instances of 'waflib.Task.TaskBase' are very limited and cannot be used to track file changes. The subclass 'waflib.Task.Task' provides the necessary features for the most common builds in which source files are used to produce target files. - -The dependency tracking is based on the use of hashes of the dependencies called *task signatures*. The signature is computed from various dependencies source, such as input files and configuration set values. - -The following diagram describes how 'waflib.Task.Task' instances are processed: - -image::task_signature{PIC}["Signatures"{backend@docbook:,height=580:},align="center"] - -The following data is used in the signature computation: - -. Explicit dependencies: _input nodes_ and dependencies set explicitly by using _bld.depends_on_ -. Implicit dependencies: dependencies searched by a scanner method (the method _scan_) -. Values: configuration set values such as compilation flags - -==== Explicit dependencies - -===== Input and output nodes - -The task objects do not directly depend on other tasks. Other tasks may exist or not, and be executed or nodes. Rather, the input and output nodes hold themselves signatures values, which come from different sources: - -. Nodes for build files usually inherit the signature of the task that generated the file -. Nodes from elsewhere have a signature computed automatically from the file contents (hash) - -===== Global dependencies on other nodes - -The tasks may be informed that some files may depend on other files transitively without listing them in the inputs. This is achieved by the method _add_manual_dependency_ from the build context: - -// tasks_manual_deps -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - ctx(rule='cp ${SRC} ${TGT}', source='wscript', target='somecopy') - ctx.add_manual_dependency( - ctx.path.find_node('wscript'), - ctx.path.find_node('testfile')) ---------------- - -The file _somecopy_ will be rebuilt whenever _wscript_ or _testfile_ change, even by one character: - -[source,shishell] ---------------- -$ waf build -Waf: Entering directory `/tmp/tasks_manual_deps/build' -[1/1] somecopy: wscript -> build/somecopy -Waf: Leaving directory `/tmp/tasks_manual_deps/build' -'build' finished successfully (0.034s) - -$ waf -Waf: Entering directory `/tmp/tasks_manual_deps/build' -Waf: Leaving directory `/tmp/tasks_manual_deps/build' -'build' finished successfully (0.006s) - -$ echo " " >> testfile - -$ waf -Waf: Entering directory `/tmp/tasks_manual_deps/build' -[1/1] somecopy: wscript -> build/somecopy -Waf: Leaving directory `/tmp/tasks_manual_deps/build' -'build' finished successfully (0.022s) ---------------- - -==== Implicit dependencies (scanner methods) - -Some tasks can be created dynamically after the build has started, so the dependencies cannot be known in advance. Task subclasses can provide a method named _scan_ to obtain additional nodes implicitly. In the following example, the _copy_ task provides a scanner method to depend on the wscript file found next to the input file. - -// tasks_scan -[source,python] ---------------- -import time -from waflib.Task import Task -class copy(Task): - - def run(self): - return self.exec_command('cp %s %s' % (self.inputs[0].abspath(), self.outputs[0].abspath())) - - def scan(self): <1> - print('→ calling the scanner method') - node = self.inputs[0].parent.find_resource('wscript') - return ([node], time.time()) <2> - - def runnable_status(self): - ret = super(copy, self).runnable_status() <3> - bld = self.generator.bld <4> - print('nodes: %r' % bld.node_deps[self.uid()]) <5> - print('custom data: %r' % bld.raw_deps[self.uid()]) <6> - return ret - -def configure(ctx): - pass - -def build(ctx): - tsk = copy(env=ctx.env) <7> - tsk.set_inputs(ctx.path.find_resource('a.in')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) ---------------- - -<1> A scanner method -<2> The return value is a tuple containing a list of nodes to depend on and serializable data for custom uses -<3> Override the method runnable_status to add some logging -<4> Obtain a reference to the build context associated to this task -<5> The nodes returned by the scanner method are stored in the map *bld.node_deps* -<6> The custom data returned by the scanner method is stored in the map *bld.raw_deps* -<7> Create a task manually (encapsulation by task generators will be described in the next chapters) - -[source,shishell] ---------------- -$ waf -→ calling the scanner method <1> -nodes: [/tmp/tasks_scan/wscript] -custom data: 55.51 -[1/1] copy: a.in -> build/b.out -'build' finished successfully (0.021s) - -$ waf <2> -nodes: [/tmp/tasks_scan/wscript] -custom data: 1280561555.512006 -'build' finished successfully (0.005s) - -$ echo " " >> wscript <3> - -$ waf -→ calling the scanner method -nodes: [/tmp/tasks_scan/wscript] -custom data: 64.31 -[1/1] copy: a.in -> build/b.out -'build' finished successfully (0.022s) ---------------- - -<1> The scanner method is always called on a clean build -<2> The scanner method is not called when nothing has changed, although the data returned is retrieved -<3> When a dependency changes, the scanner method is executed once again (the custom data has changed) - -WARNING: If the build order is incorrect, the method _scan_ may fail to find dependent nodes (missing nodes) or the signature calculation may throw an exception (missing signature for dependent nodes). - -==== Values - -The habitual use of command-line parameters such as compilation flags lead to the creation of _dependencies on values_, and more specifically the configuration set values. The Task class attribute 'vars' is used to control what values can enter in the signature calculation. In the following example, the task created has no inputs and no outputs nodes, and only depends on the values. - -// tasks_values -[source,python] ---------------- -from waflib.Task import Task -class foo(Task): <1> - vars = ['FLAGS'] <2> - def run(self): - print('the flags are %r' % self.env.FLAGS) <3> - -def options(ctx): - ctx.add_option('--flags', default='-f', dest='flags', type='string') - -def build(ctx): - ctx.env.FLAGS = ctx.options.flags <4> - tsk = foo(env=ctx.env) - ctx.add_to_group(tsk) - -def configure(ctx): - pass ---------------- - -<1> Create a task class named _foo_ -<2> The task instances will be executed whenever 'self.env.FLAGS' changes -<3> Print the value for debugging purposes -<4> Read the value from the command-line - -The execution will produce the following output: - -[source,shishell] ---------------- -$ waf --flags abcdef -[1/1] foo: -the flags are 'abcdef' <1> -'build' finished successfully (0.006s) - -$ waf --flags abcdef <2> -'build' finished successfully (0.004s) - -$ waf --flags abc -[1/1] foo: <3> -the flags are 'abc' -'build' finished successfully (0.006s) ---------------- - -<1> The task is executed on the first run -<2> The dependencies have not changed, so the task is not executed -<3> The flags have changed so the task is executed - -=== Task tuning - -==== Class access - -When a task provides an attribute named _run_str_ as in the following example: - -// tasks_values2 -[source,python] ---------------- -def configure(ctx): - ctx.env.COPY = '/bin/cp' - ctx.env.COPYFLAGS = ['-f'] - -def build(ctx): - from waflib.Task import Task - class copy(Task): - run_str = '${COPY} ${COPYFLAGS} ${SRC} ${TGT}' - print(copy.vars) - - tsk = copy(env=ctx.env) - tsk.set_inputs(ctx.path.find_resource('wscript')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) ---------------- - -It is assumed that 'run_str' represents a command-line, and that the variables in _$\{}_ such as 'COPYFLAGS' represent variables to add to the dependencies. A metaclass processes 'run_str' to obtain the method 'run' (called to execute the task) and the variables in the attribute 'vars' (merged with existing variables). The function created is displayed in the following output: - -[source,shishell] ---------------- -$ waf --zones=action -13:36:49 action def f(tsk): - env = tsk.env - gen = tsk.generator - bld = gen.bld - wd = getattr(tsk, 'cwd', None) - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - lst = [] - lst.extend(to_list(env['COPY'])) - lst.extend(to_list(env['COPYFLAGS'])) - lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs]) - lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs]) - lst = [x for x in lst if x] - return tsk.exec_command(lst, cwd=wd, env=env.env or None) -[1/1] copy: wscript -> build/b.out -['COPY', 'COPYFLAGS'] -'build' finished successfully (0.007s) ---------------- - -All subclasses of 'waflib.Task.TaskBase' are stored on the module attribute 'waflib.Task.classes'. Therefore, the 'copy' task can be accessed by using: - -[source,python] ---------------- -from waflib import Task -cls = Task.classes['copy'] ---------------- - -==== Scriptlet expressions - -Although the 'run_str' is aimed at configuration set variables, a few special cases are provided for convenience: - -. If the value starts by *env*, *gen*, *bld* or *tsk*, a method call will be made -. If the value starts by SRC[n] or TGT[n], a method call to the input/output node _n_ will be made -. SRC represents the list of task inputs seen from the root of the build directory -. TGT represents the list of task outputs seen from the root of the build directory - -Here are a few examples: - -[source,python] ---------------- -${SRC[0].parent.abspath()} <1> -${bld.root.abspath()} <2> -${tsk.uid()} <3> -${CPPPATH_ST:INCPATHS} <4> ---------------- - -<1> Absolute path of the parent folder of the task first source file -<2> File system root -<3> Print the task unique identifier -<4> Perform a map replacement equivalent to _[env.CPPPATH_ST % x for x in env.INCPATHS]_ - -==== Direct class modifications - -===== Always execute - -The function 'waflib.Task.always_run' is used to force a task to be executed whenever a build is performed. It sets a method 'runnable_status' that always return _RUN_ME_. - -// task_always -[source,python] ---------------- -def configure(ctx): - pass - -def build(ctx): - from waflib import Task - class copy(Task.Task): - run_str = 'cp ${SRC} ${TGT}' - copy = waflib.Task.always_run(copy) - - tsk = copy(env=ctx.env) - tsk.set_inputs(ctx.path.find_resource('wscript')) - tsk.set_outputs(ctx.path.find_or_declare('b.out')) - ctx.add_to_group(tsk) ---------------- - -For convenience, rule-based task generators can declare the *always* attribute to achieve the same results: - -[source,python] ---------------- -def build(ctx): - ctx( - rule = 'echo hello', - always = True - ) ---------------- - -===== File hashes and dependencies - -Nodes created by tasks during the build inherit the signature of the task that created them. -Tasks consuming such nodes as inputs will be executed whenever the first tasks are executed. -This is usually a desirable behaviour, as the tasks will propagate the dependencies in a transitive manner. - -In a few contexts though, there can be an excess of downstream rebuilds even if the output files content have not changed. -This will also cause build files in the source directory to be rebuild whenever a new build is initiated (files in the source directory are hashed). -The function 'waflib.Task.update_outputs' is used to enable file hashes in task classes, it is used in the same way as 'waflib.Task.always_run'. - -For convenience, rule-based task generators can provide the *update_outputs* attribute to simplify the declaration: - -[source,python] ---------------- -def build(ctx): - ctx( - rule = 'touch ${TGT}', - source = 'wscript', - target = ctx.path.make_node('wscript2'), - update_outputs = True - ) - ctx( - rule = 'cp ${SRC} ${TGT}', - source = ctx.path.make_node('wscript2'), - target = 'wscript3' - ) ---------------- - -In this example, the file *wscript2* is created in the source directory. -The *update_outputs* keyword is therefore necessary to prevent unnecessary rebuilds. -Additionally, *wscript3* is only rebuilt when the contents of *wscript2* change. - diff --git a/docs/book/tasks_actors.semd b/docs/book/tasks_actors.semd deleted file mode 100644 index 3c0437db15..0000000000 Binary files a/docs/book/tasks_actors.semd and /dev/null differ diff --git a/docs/book/tasks_nogroup.eps b/docs/book/tasks_nogroup.eps deleted file mode 100644 index c034854e19..0000000000 --- a/docs/book/tasks_nogroup.eps +++ /dev/null @@ -1,700 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: cairo 1.8.10 (http://cairographics.org) -%%CreationDate: Thu Jul 29 16:47:07 2010 -%%Pages: 1 -%%BoundingBox: 0 0 637 164 -%%DocumentData: Clean7Bit -%%LanguageLevel: 2 -%%EndComments -%%BeginProlog -/cairo_eps_state save def -/dict_count countdictstack def -/op_count count 1 sub def -userdict begin -/q { gsave } bind def -/Q { grestore } bind def -/cm { 6 array astore concat } bind def -/w { setlinewidth } bind def -/J { setlinecap } bind def -/j { setlinejoin } bind def -/M { setmiterlimit } bind def -/d { setdash } bind def -/m { moveto } bind def -/l { lineto } bind def -/c { curveto } bind def -/h { closepath } bind def -/re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto - 0 exch rlineto 0 rlineto closepath } bind def -/S { stroke } bind def -/f { fill } bind def -/f* { eofill } bind def -/B { fill stroke } bind def -/B* { eofill stroke } bind def -/n { newpath } bind def -/W { clip } bind def -/W* { eoclip } bind def -/BT { } bind def -/ET { } bind def -/pdfmark where { pop globaldict /?pdfmark /exec load put } - { globaldict begin /?pdfmark /pop load def /pdfmark - /cleartomark load def end } ifelse -/BDC { mark 3 1 roll /BDC pdfmark } bind def -/EMC { mark /EMC pdfmark } bind def -/cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def -/Tj { show currentpoint cairo_store_point } bind def -/TJ { - { - dup - type /stringtype eq - { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse - } forall - currentpoint cairo_store_point -} bind def -/cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore - cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def -/Tf { pop /cairo_font exch def /cairo_font_matrix where - { pop cairo_selectfont } if } bind def -/Td { matrix translate cairo_font_matrix matrix concatmatrix dup - /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point - /cairo_font where { pop cairo_selectfont } if } bind def -/Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def - cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def -/g { setgray } bind def -/rg { setrgbcolor } bind def -/d1 { setcachedevice } bind def -%%EndProlog -11 dict begin -/FontType 42 def -/FontName /f-0-0 def -/PaintType 0 def -/FontMatrix [ 1 0 0 1 0 0 ] def -/FontBBox [ 0 0 0 0 ] def -/Encoding 256 array def -0 1 255 { Encoding exch /.notdef put } for -Encoding 1 /uni0050 put -Encoding 2 /uni0061 put -Encoding 3 /uni0072 put -Encoding 4 /uni006C put -Encoding 5 /uni0065 put -Encoding 6 /uni0020 put -Encoding 7 /uni0062 put -Encoding 8 /uni0075 put -Encoding 9 /uni0069 put -Encoding 10 /uni0064 put -Encoding 11 /uni0070 put -Encoding 12 /uni0073 put -Encoding 13 /uni006E put -Encoding 14 /uni0074 put -Encoding 15 /uni006F put -Encoding 16 /uni0066 put -Encoding 17 /uni0022 put -Encoding 18 /uni0077 put -Encoding 19 /uni002D put -Encoding 20 /uni006A put -Encoding 21 /uni0034 put -Encoding 22 /uni0028 put -Encoding 23 /uni0067 put -Encoding 24 /uni0079 put -Encoding 25 /uni0029 put -Encoding 26 /uni006B put -Encoding 27 /uni0063 put -/CharStrings 28 dict dup begin -/.notdef 0 def -/uni0050 1 def -/uni0061 2 def -/uni0072 3 def -/uni006C 4 def -/uni0065 5 def -/uni0020 6 def -/uni0062 7 def -/uni0075 8 def -/uni0069 9 def -/uni0064 10 def -/uni0070 11 def -/uni0073 12 def -/uni006E 13 def -/uni0074 14 def -/uni006F 15 def -/uni0066 16 def -/uni0022 17 def -/uni0077 18 def -/uni002D 19 def -/uni006A 20 def -/uni0034 21 def -/uni0028 22 def -/uni0067 23 def -/uni0079 24 def -/uni0029 25 def -/uni006B 26 def -/uni0063 27 def -end readonly def -/sfnts [ -<00010000000a008000030020636d617000f2f18200001328000000766376742000691d390000 -13a0000001fe6670676d7134766a000015a0000000ab676c79662855b3c7000000ac0000127c -68656164f1f329920000164c00000036686865610cb8066d0000168400000024686d747873a1 -0dd5000016a8000000706c6f63610000f22400001718000000746d617870048906710000178c -00000020707265703b07f100000017ac0000056800020066fe96046605a400030007001a400c -04fb0006fb0108057f0204002fc4d4ec310010d4ecd4ec301311211125211121660400fc7303 -1bfce5fe96070ef8f2720629000200c90000048d05d500080013003a40180195100095098112 -100a0802040005190d3f11001c09041410fcec32fcec11173931002ff4ecd4ec30400b0f151f -153f155f15af1505015d011133323635342623252132041514042b0111230193fe8d9a9a8dfe -3801c8fb0101fefffbfeca052ffdcf92878692a6e3dbdde2fda80002007bffe3042d047b000a -002500bc4027191f0b17090e00a91706b90e1120861fba1cb923b8118c170c001703180d0908 -0b1f030814452610fcecccd4ec323211393931002fc4e4f4fcf4ec10c6ee10ee113911391239 -30406e301d301e301f3020302130223f27401d401e401f402040214022501d501e501f502050 -21502250277027851d871e871f8720872185229027a027f0271e301e301f30203021401e401f -40204021501e501f50205021601e601f60206021701e701f70207021801e801f80208021185d -015d0122061514163332363d01371123350e01232226353436332135342623220607353e0133 -321602bedfac816f99b9b8b83fbc88accbfdfb0102a79760b65465be5af3f00233667b6273d9 -b4294cfd81aa6661c1a2bdc0127f8b2e2eaa2727fc00000100ba0000034a047b001100304014 -060b0700110b03870eb809bc070a06080008461210fcc4ec3231002fe4f4ecc4d4cc11123930 -b450139f1302015d012e012322061511231133153e0133321617034a1f492c9ca7b9b93aba85 -132e1c03b41211cbbefdb20460ae666305050000000100c100000179061400030022b7009702 -010800460410fcec31002fec30400d10054005500560057005f00506015d13331123c1b8b806 -14f9ec0000020071ffe3047f047b0014001b00704024001501098608880515a90105b90c01bb -18b912b80c8c1c1b1502081508004b02120f451c10fcecf4ecc4111239310010e4f4ece410ee -10ee10f4ee1112393040293f1d701da01dd01df01d053f003f013f023f153f1b052c072f082f -092c0a6f006f016f026f156f1b095d71015d0115211e0133323637150e012320001110003332 -00072e0123220607047ffcb20ccdb76ac76263d06bfef4fec70129fce20107b802a5889ab90e -025e5abec73434ae2a2c0138010a01130143feddc497b4ae9e00000200baffe304a40614000b -001c0038401903b90c0f09b918158c0fb81b971900121247180c06081a461d10fcec3232f4ec -31002fece4f4c4ec10c6ee30b6601e801ea01e03015d013426232206151416333236013e0133 -3200111002232226271523113303e5a79292a7a79292a7fd8e3ab17bcc00ffffcc7bb13ab9b9 -022fcbe7e7cbcbe7e702526461febcfef8fef8febc6164a80614000200aeffe30458047b0013 -0014003b401c030900030e0106870e118c0a01bc14b80c0d0908140b4e020800461510fcecf4 -39ec3231002fe4e432f4c4ec1112173930b46f15c01502015d13113311141633323635113311 -23350e0123222601aeb87c7c95adb8b843b175c1c801cf01ba02a6fd619f9fbea4027bfba0ac -6663f003a800000200c100000179061400030007002b400e06be04b100bc0205010804004608 -10fc3cec3231002fe4fcec30400b1009400950096009700905015d1333112311331523c1b8b8 -b8b80460fba00614e90000020071ffe3045a06140010001c003840191ab9000e14b905088c0e -b801970317040008024711120b451d10fcecf4ec323231002fece4f4c4ec10c4ee30b6601e80 -1ea01e03015d0111331123350e0123220211100033321601141633323635342623220603a2b8 -b83ab17ccbff00ffcb7cb1fdc7a79292a8a89292a703b6025ef9eca864610144010801080144 -61fe15cbe7e7cbcbe7e7000200bafe5604a4047b0010001c003e401b1ab9000e14b90508b80e -8c01bd03bc1d11120b471704000802461d10fcec3232f4ec310010e4e4e4f4c4ec10c4ee3040 -09601e801ea01ee01e04015d2511231133153e01333200111002232226013426232206151416 -3332360173b9b93ab17bcc00ffffcc7bb10238a79292a7a79292a7a8fdae060aaa6461febcfe -f8fef8febc6101ebcbe7e7cbcbe7e70000000001006fffe303c7047b002700e7403c0d0c020e -0b531f1e080902070a531f1f1e420a0b1e1f041500860189041486158918b91104b925b8118c -281e0a0b1f1b0700521b080e07081422452810fcc4ecd4ece4111239393939310010e4f4ec10 -fef5ee10f5ee121739304b535807100eed111739070eed1117395922b2002701015d406d1c0a -1c0b1c0c2e092c0a2c0b2c0c3b093b0a3b0b3b0c0b200020012402280a280b2a132f142f152a -16281e281f292029212427860a860b860c860d12000000010202060a060b030c030d030e030f -03100319031a031b031c041d09272f293f295f297f2980299029a029f029185d005d7101152e -012322061514161f011e0115140623222627351e013332363534262f012e0135343633321603 -8b4ea85a898962943fc4a5f7d85ac36c66c661828c65ab40ab98e0ce66b4043fae2828545440 -49210e2a99899cb62323be353559514b50250f2495829eac1e000000000100ba00000464047b -001300364019030900030e0106870e11b80cbc0a010208004e0d09080b461410fcec32f4ec31 -002f3ce4f4c4ec1112173930b46015cf1502015d0111231134262322061511231133153e0133 -32160464b87c7c95acb9b942b375c1c602a4fd5c029e9f9ebea4fd870460ae6564ef00010037 -000002f2059e0013003840190e05080f03a9001101bc08870a0b08090204000810120e461410 -fc3cc4fc3cc432393931002fecf43cc4ec3211393930b2af1501015d01112115211114163b01 -152322263511233533110177017bfe854b73bdbdd5a28787059efec28ffda0894e9a9fd20260 -8f013e00000000020071ffe30475047b000b0017004a401306b91200b90cb8128c1809120f51 -031215451810fcecf4ec310010e4f4ec10ee3040233f197b007b067f077f087f097f0a7f0b7b -0c7f0d7f0e7f0f7f107f117b12a019f01911015d012206151416333236353426273200111000 -232200111000027394acab9593acac93f00112feeef0f1feef011103dfe7c9c9e7e8c8c7e99c -fec8feecfeedfec7013901130114013800000001002f000002f8061400130059401c0510010c -08a906018700970e06bc0a02130700070905080d0f0b4c1410fc4bb00a5458b9000b00403859 -4bb00e5458b9000bffc038593cc4fc3cc4c412393931002fe432fcec10ee321239393001b640 -155015a015035d01152322061d012115211123112335333534363302f8b0634d012ffed1b9b0 -b0aebd0614995068638ffc2f03d18f4ebbab000200c503aa02e905d5000300070042400f0501 -8404008108040506000502040810fc4bb012544bb013545b58b90002ffc03859fcdcec310010 -f43cec323001400f30094009500960097009a009bf09075d0111231121112311016faa0224aa -05d5fdd5022bfdd5022b000000010056000006350460000c01eb404905550605090a0904550a -0903550a0b0a025501020b0b0a061107080705110405080807021103020c000c011100000c42 -0a050203060300bf0b080c0b0a09080605040302010b07000d10d44bb00a544bb011545b4bb0 -12545b4bb013545b4bb00b545b58b9000000403859014bb00c544bb00d545b4bb010545b58b9 -0000ffc03859cc173931002f3cec32321739304b5358071005ed071008ed071008ed071005ed -071008ed071005ed0705ed071008ed59220140ff050216021605220a350a49024905460a400a -5b025b05550a500a6e026e05660a79027f0279057f05870299029805940abc02bc05ce02c703 -cf051d0502090306040b050a080b09040b050c1502190316041a051b081b09140b150c250025 -0123022703210425052206220725082709240a210b230c390336043608390c300e4602480346 -04400442054006400740084409440a440b400e400e5600560156025004510552065207500853 -09540a550b6300640165026a0365046a056a066a076e09610b670c6f0e7500750179027d0378 -047d057a067f067a077f07780879097f097b0a760b7d0c870288058f0e97009701940293039c -049b05980698079908402f960c9f0ea600a601a402a403ab04ab05a906a907ab08a40caf0eb5 -02b103bd04bb05b809bf0ec402c303cc04ca05795d005d13331b01331b013301230b012356b8 -e6e5d9e6e5b8fedbd9f1f2d90460fc96036afc96036afba00396fc6a0001006401df027f0283 -00030011b6009c020401000410dccc310010d4ec301321152164021bfde50283a4000002ffdb -fe5601790614000b000f0044401c0b0207000ebe0c078705bd00bc0cb110081005064f0d0108 -0c00461010fc3cec32e4391239310010ece4f4ec10ee1112393930400b101140115011601170 -1105015d13331114062b01353332363511331523c1b8a3b54631694cb8b80460fb8cd6c09c61 -990628e9000000020064000004a405d50002000d0081401d010d030d0003030d4200030b07a0 -0501038109010c0a001c0608040c0e10dc4bb00b544bb00d545b58b9000cffc03859d43cc4ec -32113931002fe4d43cec321239304b5358071004c9071005c9592201402a0b002a0048005900 -690077008a000716012b0026012b0336014e014f0c4f0d5601660175017a0385010d5d005d09 -012103331133152311231121350306fe0201fe35fed5d5c9fd5e0525fce303cdfc33a8fea001 -60c30000000100b0fef2027b0612000d0037400f069800970e0d070003120600130a0e10dc4b -b0135458b9000affc038594bb00f5458b9000a00403859e432ec113939310010fcec30010602 -1514121723260235341237027b86828385a0969594970612e6fe3ee7e7fe3be5eb01c6e0df01 -c4ec00020071fe56045a047b000b0028004a4023190c1d0912861316b90f03b92623b827bc09 -b90fbd1a1d261900080c4706121220452910fcc4ecf4ec323231002fc4e4ece4f4c4ec10fed5 -ee1112393930b6602a802aa02a03015d01342623220615141633323617100221222627351e01 -3332363d010e0123220211101233321617353303a2a59594a5a59495a5b8fefefa61ac51519e -52b5b439b27ccefcfcce7cb239b8023dc8dcdcc8c7dcdcebfee2fee91d1eb32c2abdbf5b6362 -013a01030104013a6263aa000001003dfe56047f0460000f018b40430708020911000f0a110b -0a00000f0e110f000f0d110c0d00000f0d110e0d0a0b0a0c110b0b0a420d0b0910000b058703 -bd0e0bbc100e0d0c0a09060300080f040f0b1010d44bb00a544bb008545b58b9000b00403859 -4bb0145458b9000bffc03859c4c4111739310010e432f4ec113911391239304b5358071005ed -071008ed071008ed071005ed071008ed0705ed173259220140f0060005080609030d160a170d -100d230d350d490a4f0a4e0d5a095a0a6a0a870d800d930d120a000a09060b050c0b0e0b0f17 -01150210041005170a140b140c1a0e1a0f2700240124022004200529082809250a240b240c27 -0d2a0e2a0f201137003501350230043005380a360b360c380d390e390f301141004001400240 -03400440054006400740084209450a470d490e490f4011540051015102550350045005560655 -0756085709570a550b550c590e590f501166016602680a690e690f60117b08780e780f89008a -09850b850c890d890e890f9909950b950c9a0e9a0fa40ba40cab0eab0fb011cf11df11ff1165 -5d005d050e012b01353332363f01013309013302934e947c936c4c543321fe3bc3015e015ec3 -68c87a9a488654044efc94036c000000000100a4fef2026f0612000d001f400f079800970e07 -01000b12041308000e10dc3cf4ec113939310010fcec301333161215140207233612353402a4 -a096959596a08583830612ecfe3cdfe0fe3aebe501c5e7e701c20000000100ba0000049c0614 -000a00bc40290811050605071106060503110405040211050504420805020303bc0097090605 -01040608010800460b10fcec32d4c4113931002f3cece41739304b5358071004ed071005ed07 -1005ed071004ed5922b2100c01015d405f04020a081602270229052b08560266026708730277 -05820289058e08930296059708a3021209050906020b030a072803270428052b062b07400c68 -03600c8903850489058d068f079a039707aa03a705b607c507d607f703f003f704f0041a5d71 -005d1333110133090123011123bab90225ebfdae026bf0fdc7b90614fc6901e3fdf4fdac0223 -fddd00010071ffe303e7047b0019003f401b00860188040e860d880ab91104b917b8118c1a07 -120d004814451a10fce432ec310010e4f4ec10fef4ee10f5ee30400b0f1b101b801b901ba01b -05015d01152e0123220615141633323637150e0123220011100021321603e74e9d50b3c6c6b3 -509d4e4da55dfdfed6012d010655a20435ac2b2be3cdcde32b2baa2424013e010e0112013a23 -000000000002000300000000001400010000000000340004002000000004000400010000f01b -ffff0000f000ffff10000001000000000006004200000000001c000000010002000300040005 -0006000700080009000a000b000c000d000e000f001000110012001300140015001600170018 -0019001a001b0000013500b800cb00cb00c100aa009c01a600b800660000007100cb00a002b2 -0085007500b800c301cb0189022d00cb00a600f000d300aa008700cb03aa0400014a003300cb -000000d9050200f4015400b4009c01390114013907060400044e04b4045204b804e704cd0037 -047304cd04600473013303a2055605a60556053903c5021200c9001f00b801df007300ba03e9 -033303bc0444040e00df03cd03aa00e503aa0404000000cb008f00a4007b00b80014016f007f -027b0252008f00c705cd009a009a006f00cb00cd019e01d300f000ba018300d5009803040248 -009e01d500c100cb00f600830354027f00000333026600d300c700a400cd008f009a00730400 -05d5010a00fe022b00a400b4009c00000062009c0000001d032d05d505d505d505f0007f007b -005400a406b80614072301d300b800cb00a601c301ec069300a000d3035c037103db01850423 -04a80448008f0139011401390360008f05d5019a0614072306660179046004600460047b009c -00000277046001aa00e904600762007b00c5007f027b000000b4025205cd006600bc00660077 -061000cd013b01850389008f007b0000001d00cd074a042f009c009c0000077d006f0000006f -0335006a006f007b00ae00b2002d0396008f027b00f600830354063705f6008f009c04e10266 -008f018d02f600cd03440029006604ee00730000140000960000b707060504030201002c2010 -b002254964b040515820c859212d2cb002254964b040515820c859212d2c20100720b00050b0 -0d7920b8ffff5058041b0559b0051cb0032508b0042523e120b00050b00d7920b8ffff505804 -1b0559b0051cb0032508e12d2c4b505820b0fd454459212d2cb002254560442d2c4b5358b002 -25b0022545445921212d2c45442d2cb00225b0022549b00525b005254960b0206368208a108a -233a8a10653a2d000001000000024cccffd9d8525f0f3cf5001f080000000000c6bc48a00000 -0000c6bc48a0f7d6fd330d72095500000008000000010000000000010000076dfe1d00000de2 -f7d6fa510d7200010000000000000000000000000000001c04cd006604d300c904e7007b034a -00ba023900c104ec0071028b0000051400ba051200ae023900c105140071051400ba042b006f -051200ba0323003704e5007102d1002f03ae00c5068b005602e300640239ffdb05170064031f -00b00514007104bc003d031f00a404a200ba046600710000000000000044000000c4000001f0 -000002600000029c0000037000000370000004080000048c000004dc00000574000006140000 -0774000007ec000008680000090c000009a400000a1000000c3400000c6000000cdc00000d98 -00000e0800000ed00000109c000010f4000011e40000127c00010000001c0354002b0068000c -000200100099000800000415021600080004b8028040fffbfe03fa1403f92503f83203f79603 -f60e03f5fe03f4fe03f32503f20e03f19603f02503ef8a4105effe03ee9603ed9603ecfa03eb -fa03eafe03e93a03e84203e7fe03e63203e5e45305e59603e48a4105e45303e3e22f05e3fa03 -e22f03e1fe03e0fe03df3203de1403dd9603dcfe03db1203da7d03d9bb03d8fe03d68a4105d6 -7d03d5d44705d57d03d44703d3d21b05d3fe03d21b03d1fe03d0fe03cffe03cefe03cd9603cc -cb1e05ccfe03cb1e03ca3203c9fe03c6851105c61c03c51603c4fe03c3fe03c2fe03c1fe03c0 -fe03bffe03befe03bdfe03bcfe03bbfe03ba1103b9862505b9fe03b8b7bb05b8fe03b7b65d05 -b7bb03b78004b6b52505b65d40ff03b64004b52503b4fe03b39603b2fe03b1fe03b0fe03affe -03ae6403ad0e03acab2505ac6403abaa1205ab2503aa1203a98a4105a9fa03a8fe03a7fe03a6 -fe03a51203a4fe03a3a20e05a33203a20e03a16403a08a4105a096039ffe039e9d0c059efe03 -9d0c039c9b19059c64039b9a10059b19039a1003990a0398fe0397960d0597fe03960d03958a -410595960394930e05942803930e0392fa039190bb0591fe03908f5d0590bb039080048f8e25 -058f5d038f40048e25038dfe038c8b2e058cfe038b2e038a8625058a410389880b0589140388 -0b03878625058764038685110586250385110384fe038382110583fe0382110381fe0380fe03 -7ffe0340ff7e7d7d057efe037d7d037c64037b5415057b25037afe0379fe03780e03770c0376 -0a0375fe0374fa0373fa0372fa0371fa0370fe036ffe036efe036c21036bfe036a1142056a53 -0369fe03687d036711420566fe0365fe0364fe0363fe0362fe03613a0360fa035e0c035dfe03 -5bfe035afe0359580a0559fa03580a035716190557320356fe03555415055542035415035301 -1005531803521403514a130551fe03500b034ffe034e4d10054efe034d10034cfe034b4a1305 -4bfe034a4910054a1303491d0d05491003480d0347fe0346960345960344fe0343022d0543fa -0342bb03414b0340fe033ffe033e3d12053e14033d3c0f053d12033c3b0d053c40ff0f033b0d -033afe0339fe033837140538fa033736100537140336350b05361003350b03341e03330d0332 -310b0532fe03310b03302f0b05300d032f0b032e2d09052e10032d09032c32032b2a25052b64 -032a2912052a25032912032827250528410327250326250b05260f03250b0324fe0323fe0322 -0f03210110052112032064031ffa031e1d0d051e64031d0d031c1142051cfe031bfa031a4203 -1911420519fe031864031716190517fe031601100516190315fe0314fe0313fe031211420512 -fe0311022d05114203107d030f64030efe030d0c16050dfe030c0110050c16030bfe030a1003 -09fe0308022d0508fe030714030664030401100504fe03401503022d0503fe0302011005022d -0301100300fe0301b80164858d012b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b002b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b1d00> -] def -FontName currentdict end definefont pop -%%Page: 1 1 -%%BeginPageSetup -%%PageBoundingBox: 0 0 637 164 -%%EndPageSetup -q -0 g -BT -12 0 0 12 126.246094 2.496091 Tm -/f-0-0 1 Tf -[<01>44<0203>-1<02>-1<04>1<040504>1<06>-1<0708>-1<09>1<040a0603>20<050b -03>21<050c>-1<05>]TJ -10.523438 0 Td -[<0d>-1<0e02>-1<0e090f>1<0d>-1<06>-1<10>1<0f03>-1<0611>-1<12>-1<021006 -13>-1<14>1<15>-1<11>-1<06>]TJ -10.185547 0 Td -[<160f0d>-1<05061703>20<0f>1<08>-1<0b06>-1<0718060a05100208>]TJ -10.189453 0 Td -<040e19>Tj -ET -0.996078 0.996078 0.266667 rg -0.398 163.298 m 37.223 163.298 l 37.223 145.696 l 0.398 145.696 l 0.398 -163.298 l h -0.398 163.298 m f* -0 g -0.8 w -0 J -0 j -[] 0.0 d -4 M q 1 0 0 -1 0 163.696091 cm -0.398 0.398 m 37.223 0.398 l 37.223 18 l 0.398 18 l 0.398 0.398 l h -0.398 0.398 m S Q -0.996078 0.996078 0.266667 rg -22.645 145.696 m 80.012 145.696 l 80.012 128.095 l 22.645 128.095 l -22.645 145.696 l h -22.645 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -22.645 18 m 80.012 18 l 80.012 35.602 l 22.645 35.602 l 22.645 18 l h -22.645 18 m S Q -0.4 0.529412 0.733333 rg -23.207 128.095 m 79.758 128.095 l 79.758 110.497 l 23.207 110.497 l -23.207 128.095 l h -23.207 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -23.207 35.602 m 79.758 35.602 l 79.758 53.199 l 23.207 53.199 l 23.207 -35.602 l h -23.207 35.602 m S Q -0.996078 0.996078 0.266667 rg -41.914 163.298 m 100.129 163.298 l 100.129 145.696 l 41.914 145.696 l -41.914 163.298 l h -41.914 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -41.914 0.398 m 100.129 0.398 l 100.129 18 l 41.914 18 l 41.914 0.398 l -h -41.914 0.398 m S Q -0.996078 0.996078 0.266667 rg -41.793 110.497 m 129.777 110.497 l 129.777 92.895 l 41.793 92.895 l -41.793 110.497 l h -41.793 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -41.793 53.199 m 129.777 53.199 l 129.777 70.801 l 41.793 70.801 l -41.793 53.199 l h -41.793 53.199 m S Q -0.996078 0.996078 0.266667 rg -84.816 128.095 m 141.984 128.095 l 141.984 110.497 l 84.816 110.497 l -84.816 128.095 l h -84.816 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -84.816 35.602 m 141.984 35.602 l 141.984 53.199 l 84.816 53.199 l -84.816 35.602 l h -84.816 35.602 m S Q -0.4 0.529412 0.733333 rg -85.754 145.696 m 171.168 145.696 l 171.168 128.095 l 85.754 128.095 l -85.754 145.696 l h -85.754 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -85.754 18 m 171.168 18 l 171.168 35.602 l 85.754 35.602 l 85.754 18 l h -85.754 18 m S Q -0.4 0.529412 0.733333 rg -112.055 163.298 m 172.578 163.298 l 172.578 145.696 l 112.055 145.696 l -112.055 163.298 l h -112.055 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -112.055 0.398 m 172.578 0.398 l 172.578 18 l 112.055 18 l 112.055 0.398 -l h -112.055 0.398 m S Q -0.996078 0.996078 0.266667 rg -131.078 110.497 m 271.371 110.497 l 271.371 92.895 l 131.078 92.895 l -131.078 110.497 l h -131.078 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -131.078 53.199 m 271.371 53.199 l 271.371 70.801 l 131.078 70.801 l -131.078 53.199 l h -131.078 53.199 m S Q -0.996078 0.996078 0.266667 rg -149.387 128.095 m 272.578 128.095 l 272.578 110.497 l 149.387 110.497 l -149.387 128.095 l h -149.387 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -149.387 35.602 m 272.578 35.602 l 272.578 53.199 l 149.387 53.199 l -149.387 35.602 l h -149.387 35.602 m S Q -0.4 0.529412 0.733333 rg -172.984 145.696 m 256.016 145.696 l 256.016 128.095 l 172.984 128.095 l -172.984 145.696 l h -172.984 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -172.984 18 m 256.016 18 l 256.016 35.602 l 172.984 35.602 l 172.984 18 -l h -172.984 18 m S Q -0.4 0.529412 0.733333 rg -176.34 163.298 m 230.262 163.298 l 230.262 145.696 l 176.34 145.696 l -176.34 163.298 l h -176.34 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -176.34 0.398 m 230.262 0.398 l 230.262 18 l 176.34 18 l 176.34 0.398 l -h -176.34 0.398 m S Q -0.4 0.529412 0.733333 rg -230.66 163.298 m 291.715 163.298 l 291.715 145.696 l 230.66 145.696 l -230.66 163.298 l h -230.66 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -230.66 0.398 m 291.715 0.398 l 291.715 18 l 230.66 18 l 230.66 0.398 l -h -230.66 0.398 m S Q -0.4 0.529412 0.733333 rg -261.297 145.696 m 330.078 145.696 l 330.078 128.095 l 261.297 128.095 l -261.297 145.696 l h -261.297 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -261.297 18 m 330.078 18 l 330.078 35.602 l 261.297 35.602 l 261.297 18 -l h -261.297 18 m S Q -0.4 0.529412 0.733333 rg -273.594 110.497 m 328.016 110.497 l 328.016 92.895 l 273.594 92.895 l -273.594 110.497 l h -273.594 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -273.594 53.199 m 328.016 53.199 l 328.016 70.801 l 273.594 70.801 l -273.594 53.199 l h -273.594 53.199 m S Q -0.996078 0.996078 0.266667 rg -289.801 128.095 m 383.754 128.095 l 383.754 110.497 l 289.801 110.497 l -289.801 128.095 l h -289.801 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -289.801 35.602 m 383.754 35.602 l 383.754 53.199 l 289.801 53.199 l -289.801 35.602 l h -289.801 35.602 m S Q -0.301961 0.654902 0.301961 rg -291.879 163.298 m 359.137 163.298 l 359.137 145.696 l 291.879 145.696 l -291.879 163.298 l h -291.879 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -291.879 0.398 m 359.137 0.398 l 359.137 18 l 291.879 18 l 291.879 0.398 -l h -291.879 0.398 m S Q -0.301961 0.654902 0.301961 rg -328.812 110.497 m 410.258 110.497 l 410.258 92.895 l 328.812 92.895 l -328.812 110.497 l h -328.812 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -328.812 53.199 m 410.258 53.199 l 410.258 70.801 l 328.812 70.801 l -328.812 53.199 l h -328.812 53.199 m S Q -0.654902 0.317647 1 rg -330.262 145.696 m 383.422 145.696 l 383.422 128.095 l 330.262 128.095 l -330.262 145.696 l h -330.262 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -330.262 18 m 383.422 18 l 383.422 35.602 l 330.262 35.602 l 330.262 18 -l h -330.262 18 m S Q -0.301961 0.654902 0.301961 rg -360.406 163.298 m 441.703 163.298 l 441.703 145.696 l 360.406 145.696 l -360.406 163.298 l h -360.406 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -360.406 0.398 m 441.703 0.398 l 441.703 18 l 360.406 18 l 360.406 0.398 -l h -360.406 0.398 m S Q -0.301961 0.654902 0.301961 rg -385.086 145.696 m 478.281 145.696 l 478.281 128.095 l 385.086 128.095 l -385.086 145.696 l h -385.086 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -385.086 18 m 478.281 18 l 478.281 35.602 l 385.086 35.602 l 385.086 18 -l h -385.086 18 m S Q -0.301961 0.654902 0.301961 rg -389.156 128.095 m 453.434 128.095 l 453.434 110.497 l 389.156 110.497 l -389.156 128.095 l h -389.156 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -389.156 35.602 m 453.434 35.602 l 453.434 53.199 l 389.156 53.199 l -389.156 35.602 l h -389.156 35.602 m S Q -0.654902 0.317647 1 rg -412.273 110.497 m 555.293 110.497 l 555.293 92.895 l 412.273 92.895 l -412.273 110.497 l h -412.273 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -412.273 53.199 m 555.293 53.199 l 555.293 70.801 l 412.273 70.801 l -412.273 53.199 l h -412.273 53.199 m S Q -0.654902 0.317647 1 rg -442.27 163.298 m 481.043 163.298 l 481.043 145.696 l 442.27 145.696 l -442.27 163.298 l h -442.27 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -442.27 0.398 m 481.043 0.398 l 481.043 18 l 442.27 18 l 442.27 0.398 l -h -442.27 0.398 m S Q -0.654902 0.317647 1 rg -454.785 128.095 m 529.438 128.095 l 529.438 110.497 l 454.785 110.497 l -454.785 128.095 l h -454.785 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -454.785 35.602 m 529.438 35.602 l 529.438 53.199 l 454.785 53.199 l -454.785 35.602 l h -454.785 35.602 m S Q -0.654902 0.317647 1 rg -479.332 145.696 m 543.594 145.696 l 543.594 128.095 l 479.332 128.095 l -479.332 145.696 l h -479.332 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -479.332 18 m 543.594 18 l 543.594 35.602 l 479.332 35.602 l 479.332 18 -l h -479.332 18 m S Q -0.301961 0.654902 0.301961 rg -481.242 163.298 m 556.941 163.298 l 556.941 145.696 l 481.242 145.696 l -481.242 163.298 l h -481.242 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -481.242 0.398 m 556.941 0.398 l 556.941 18 l 481.242 18 l 481.242 0.398 -l h -481.242 0.398 m S Q -0.301961 0.654902 0.301961 rg -529.836 128.095 m 592.621 128.095 l 592.621 110.497 l 529.836 110.497 l -529.836 128.095 l h -529.836 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -529.836 35.602 m 592.621 35.602 l 592.621 53.199 l 529.836 53.199 l -529.836 35.602 l h -529.836 35.602 m S Q -0.654902 0.317647 1 rg -544.656 145.696 m 622.719 145.696 l 622.719 128.095 l 544.656 128.095 l -544.656 145.696 l h -544.656 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -544.656 18 m 622.719 18 l 622.719 35.602 l 544.656 35.602 l 544.656 18 -l h -544.656 18 m S Q -0.654902 0.317647 1 rg -557.746 110.497 m 615.496 110.497 l 615.496 92.895 l 557.746 92.895 l -557.746 110.497 l h -557.746 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -557.746 53.199 m 615.496 53.199 l 615.496 70.801 l 557.746 70.801 l -557.746 53.199 l h -557.746 53.199 m S Q -0.654902 0.317647 1 rg -566.812 163.298 m 614.324 163.298 l 614.324 145.696 l 566.812 145.696 l -566.812 163.298 l h -566.812 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -566.812 0.398 m 614.324 0.398 l 614.324 18 l 566.812 18 l 566.812 0.398 -l h -566.812 0.398 m S Q -0.301961 0.654902 0.301961 rg -593.141 128.11 m 636.805 128.11 l 636.805 110.481 l 593.141 110.481 l -593.141 128.11 l h -593.141 128.11 m f* -0 g -0.769764 w -q 1 0 0 -1 0 163.696091 cm -593.141 35.586 m 636.805 35.586 l 636.805 53.215 l 593.141 53.215 l -593.141 35.586 l h -593.141 35.586 m S Q -0.996078 0.996078 0.266667 rg -17.199 82.497 m 26 82.497 l 26 73.696 l 17.199 73.696 l 17.199 82.497 l -h -17.199 82.497 m f* -0 g -0.8 w -q 1 0 0 -1 0 163.696091 cm -17.199 81.199 m 26 81.199 l 26 90 l 17.199 90 l 17.199 81.199 l h -17.199 81.199 m S Q -BT -9.6 0 0 9.6 34.8 74.496091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c1a>-1<0c>-1<0602>]TJ -ET -0.301961 0.654902 0.301961 rg -17.199 64.895 m 26 64.895 l 26 56.095 l 17.199 56.095 l 17.199 64.895 l -h -17.199 64.895 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 98.801 m 26 98.801 l 26 107.602 l 17.199 107.602 l 17.199 98.801 -l h -17.199 98.801 m S Q -BT -9.6 0 0 9.6 34.8 56.896091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c1a>-1<0c>-1<0607>]TJ -ET -0.4 0.529412 0.733333 rg -17.199 47.298 m 26 47.298 l 26 38.497 l 17.199 38.497 l 17.199 47.298 l -h -17.199 47.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 116.398 m 26 116.398 l 26 125.199 l 17.199 125.199 l 17.199 -116.398 l h -17.199 116.398 m S Q -BT -9.6 0 0 9.6 34.8 39.296091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c1a>-1<0c>-1<061b>]TJ -ET -0.654902 0.317647 1 rg -17.199 29.696 m 26 29.696 l 26 20.895 l 17.199 20.895 l 17.199 29.696 l -h -17.199 29.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 134 m 26 134 l 26 142.801 l 17.199 142.801 l 17.199 134 l h -17.199 134 m S Q -BT -9.6 0 0 9.6 34.8 21.696091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c1a>-1<0c>-1<060a>]TJ -ET -Q -showpage -%%Trailer -count op_count sub {pop} repeat -countdictstack dict_count sub {end} repeat -cairo_eps_state restore -%%EOF diff --git a/docs/book/tasks_nosort.eps b/docs/book/tasks_nosort.eps deleted file mode 100644 index ba9793f651..0000000000 --- a/docs/book/tasks_nosort.eps +++ /dev/null @@ -1,385 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: cairo 1.10.2 (http://cairographics.org) -%%CreationDate: Fri Mar 30 23:56:39 2012 -%%Pages: 1 -%%BoundingBox: 0 -1 644 101 -%%DocumentData: Clean7Bit -%%LanguageLevel: 2 -%%EndComments -%%BeginProlog -/cairo_eps_state save def -/dict_count countdictstack def -/op_count count 1 sub def -userdict begin -/q { gsave } bind def -/Q { grestore } bind def -/cm { 6 array astore concat } bind def -/w { setlinewidth } bind def -/J { setlinecap } bind def -/j { setlinejoin } bind def -/M { setmiterlimit } bind def -/d { setdash } bind def -/m { moveto } bind def -/l { lineto } bind def -/c { curveto } bind def -/h { closepath } bind def -/re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto - 0 exch rlineto 0 rlineto closepath } bind def -/S { stroke } bind def -/f { fill } bind def -/f* { eofill } bind def -/n { newpath } bind def -/W { clip } bind def -/W* { eoclip } bind def -/BT { } bind def -/ET { } bind def -/pdfmark where { pop globaldict /?pdfmark /exec load put } - { globaldict begin /?pdfmark /pop load def /pdfmark - /cleartomark load def end } ifelse -/BDC { mark 3 1 roll /BDC pdfmark } bind def -/EMC { mark /EMC pdfmark } bind def -/cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def -/Tj { show currentpoint cairo_store_point } bind def -/TJ { - { - dup - type /stringtype eq - { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse - } forall - currentpoint cairo_store_point -} bind def -/cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore - cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def -/Tf { pop /cairo_font exch def /cairo_font_matrix where - { pop cairo_selectfont } if } bind def -/Td { matrix translate cairo_font_matrix matrix concatmatrix dup - /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point - /cairo_font where { pop cairo_selectfont } if } bind def -/Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def - cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def -/g { setgray } bind def -/rg { setrgbcolor } bind def -/d1 { setcachedevice } bind def -%%EndProlog -11 dict begin -/FontType 42 def -/FontName /DejaVuSans def -/PaintType 0 def -/FontMatrix [ 1 0 0 1 0 0 ] def -/FontBBox [ 0 0 0 0 ] def -/Encoding 256 array def -0 1 255 { Encoding exch /.notdef put } for -Encoding 1 /uni004E put -Encoding 2 /uni006F put -Encoding 3 /uni0020 put -Encoding 4 /uni0070 put -Encoding 5 /uni0061 put -Encoding 6 /uni0072 put -Encoding 7 /uni0074 put -Encoding 8 /uni0069 put -Encoding 9 /uni0063 put -Encoding 10 /uni0075 put -Encoding 11 /uni006C put -Encoding 12 /uni0064 put -Encoding 13 /uni0065 put -Encoding 14 /uni0066 put -Encoding 15 /uni0022 put -Encoding 16 /uni0077 put -Encoding 17 /uni002D put -Encoding 18 /uni006A put -Encoding 19 /uni0032 put -Encoding 20 /uni006E put -Encoding 21 /uni0067 put -Encoding 22 /uni0073 put -Encoding 23 /uni006B put -Encoding 24 /uni0068 put -/CharStrings 25 dict dup begin -/.notdef 0 def -/uni004E 1 def -/uni006F 2 def -/uni0020 3 def -/uni0070 4 def -/uni0061 5 def -/uni0072 6 def -/uni0074 7 def -/uni0069 8 def -/uni0063 9 def -/uni0075 10 def -/uni006C 11 def -/uni0064 12 def -/uni0065 13 def -/uni0066 14 def -/uni0022 15 def -/uni0077 16 def -/uni002D 17 def -/uni006A 18 def -/uni0032 19 def -/uni006E 20 def -/uni0067 21 def -/uni0073 22 def -/uni006B 23 def -/uni0068 24 def -end readonly def -/sfnts [ -<00010000000a008000030020636d617000bbf15f000010dc000000706376742000691d390000 -114c000001fe6670676d7134766a0000134c000000ab676c796642204647000000ac00001030 -68656164f79ac5e7000013f800000036686865610cb8066a0000143000000024686d747869ce -0c7600001454000000646c6f63610000bad4000014b8000000686d6178700486067100001520 -00000020707265703b07f100000015400000056800020066fe96046605a400030007001a400c -04fb0006fb0108057f0204002fc4d4ec310010d4ecd4ec301311211125211121660400fc7303 -1bfce5fe96070ef8f2720629000100c90000053305d500090079401e07110102010211060706 -4207020300af0805060107021c0436071c00040a10fcecfcec11393931002f3cec323939304b -5358071004ed071004ed5922b21f0b01015d4030360238074802470769026607800207060109 -0615011a06460149065701580665016906790685018a0695019a069f0b105d005d1321011133 -1121011123c901100296c4fef0fd6ac405d5fb1f04e1fa2b04e1fb1f00020071ffe30475047b -000b0017004a401306b91200b90cb8128c1809120f51031215451810fcecf4ec310010e4f4ec -10ee3040233f197b007b067f077f087f097f0a7f0b7b0c7f0d7f0e7f0f7f107f117b12a019f0 -1911015d012206151416333236353426273200111000232200111000027394acab9593acac93 -f00112feeef0f1feef011103dfe7c9c9e7e8c8c7e99cfec8feecfeedfec70139011301140138 -0000000200bafe5604a4047b0010001c003e401b1ab9000e14b90508b80e8c01bd03bc1d1112 -0b471704000802461d10fcec3232f4ec310010e4e4e4f4c4ec10c4ee304009601e801ea01ee0 -1e04015d2511231133153e013332001110022322260134262322061514163332360173b9b93a -b17bcc00ffffcc7bb10238a79292a7a79292a7a8fdae060aaa6461febcfef8fef8febc6101eb -cbe7e7cbcbe7e70000000002007bffe3042d047b000a002500bc4027191f0b17090e00a91706 -b90e1120861fba1cb923b8118c170c001703180d09080b1f030814452610fcecccd4ec323211 -393931002fc4e4f4fcf4ec10c6ee10ee11391139123930406e301d301e301f3020302130223f -27401d401e401f402040214022501d501e501f50205021502250277027851d871e871f872087 -2185229027a027f0271e301e301f30203021401e401f40204021501e501f50205021601e601f -60206021701e701f70207021801e801f80208021185d015d0122061514163332363d01371123 -350e01232226353436332135342623220607353e0133321602bedfac816f99b9b8b83fbc88ac -cbfdfb0102a79760b65465be5af3f00233667b6273d9b4294cfd81aa6661c1a2bdc0127f8b2e -2eaa2727fc00000100ba0000034a047b001100304014060b0700110b03870eb809bc070a0608 -0008461210fcc4ec3231002fe4f4ecc4d4cc11123930b450139f1302015d012e012322061511 -231133153e0133321617034a1f492c9ca7b9b93aba85132e1c03b41211cbbefdb20460ae6663 -0505000000010037000002f2059e0013003840190e05080f03a9001101bc08870a0b08090204 -000810120e461410fc3cc4fc3cc432393931002fecf43cc4ec3211393930b2af1501015d0111 -2115211114163b01152322263511233533110177017bfe854b73bdbdd5a28787059efec28ffd -a0894e9a9fd202608f013e000000000200c100000179061400030007002b400e06be04b100bc -020501080400460810fc3cec3231002fe4fcec30400b1009400950096009700905015d133311 -2311331523c1b8b8b8b80460fba00614e90000010071ffe303e7047b0019003f401b00860188 -040e860d880ab91104b917b8118c1a07120d004814451a10fce432ec310010e4f4ec10fef4ee -10f5ee30400b0f1b101b801b901ba01b05015d01152e0123220615141633323637150e012322 -0011100021321603e74e9d50b3c6c6b3509d4e4da55dfdfed6012d010655a20435ac2b2be3cd -cde32b2baa2424013e010e0112013a230000000200aeffe30458047b00130014003b401c0309 -00030e0106870e118c0a01bc14b80c0d0908140b4e020800461510fcecf439ec3231002fe4e4 -32f4c4ec1112173930b46f15c01502015d1311331114163332363511331123350e0123222601 -aeb87c7c95adb8b843b175c1c801cf01ba02a6fd619f9fbea4027bfba0ac6663f003a8000001 -00c100000179061400030022b7009702010800460410fcec31002fec30400d10054005500560 -057005f00506015d13331123c1b8b80614f9ec0000020071ffe3045a06140010001c00384019 -1ab9000e14b905088c0eb801970317040008024711120b451d10fcecf4ec323231002fece4f4 -c4ec10c4ee30b6601e801ea01e03015d0111331123350e012322021110003332160114163332 -3635342623220603a2b8b83ab17ccbff00ffcb7cb1fdc7a79292a8a89292a703b6025ef9eca8 -6461014401080108014461fe15cbe7e7cbcbe7e700020071ffe3047f047b0014001b00704024 -001501098608880515a90105b90c01bb18b912b80c8c1c1b1502081508004b02120f451c10fc -ecf4ecc4111239310010e4f4ece410ee10ee10f4ee1112393040293f1d701da01dd01df01d05 -3f003f013f023f153f1b052c072f082f092c0a6f006f016f026f156f1b095d71015d0115211e -0133323637150e01232000111000333200072e0123220607047ffcb20ccdb76ac76263d06bfe -f4fec70129fce20107b802a5889ab90e025e5abec73434ae2a2c0138010a01130143feddc497 -b4ae9e000001002f000002f8061400130059401c0510010c08a906018700970e06bc0a021307 -00070905080d0f0b4c1410fc4bb00a5458b9000b004038594bb00e5458b9000bffc038593cc4 -fc3cc4c412393931002fe432fcec10ee321239393001b640155015a015035d01152322061d01 -2115211123112335333534363302f8b0634d012ffed1b9b0b0aebd0614995068638ffc2f03d1 -8f4ebbab000200c503aa02e905d5000300070042400f05018404008108040506000502040810 -fc4bb012544bb013545b58b90002ffc03859fcdcec310010f43cec323001400f300940095009 -60097009a009bf09075d0111231121112311016faa0224aa05d5fdd5022bfdd5022b00000001 -0056000006350460000c01eb404905550605090a0904550a0903550a0b0a025501020b0b0a06 -1107080705110405080807021103020c000c011100000c420a050203060300bf0b080c0b0a09 -080605040302010b07000d10d44bb00a544bb011545b4bb012545b4bb013545b4bb00b545b58 -b9000000403859014bb00c544bb00d545b4bb010545b58b90000ffc03859cc173931002f3cec -32321739304b5358071005ed071008ed071008ed071005ed071008ed071005ed0705ed071008 -ed59220140ff050216021605220a350a49024905460a400a5b025b05550a500a6e026e05660a -79027f0279057f05870299029805940abc02bc05ce02c703cf051d0502090306040b050a080b -09040b050c1502190316041a051b081b09140b150c2500250123022703210425052206220725 -082709240a210b230c390336043608390c300e46024803460440044205400640074008440944 -0a440b400e400e560056015602500451055206520750085309540a550b6300640165026a0365 -046a056a066a076e09610b670c6f0e7500750179027d0378047d057a067f067a077f07780879 -097f097b0a760b7d0c870288058f0e97009701940293039c049b05980698079908402f960c9f -0ea600a601a402a403ab04ab05a906a907ab08a40caf0eb502b103bd04bb05b809bf0ec402c3 -03cc04ca05795d005d13331b01331b013301230b012356b8e6e5d9e6e5b8fedbd9f1f2d90460 -fc96036afc96036afba00396fc6a0001006401df027f028300030011b6009c020401000410dc -cc310010d4ec301321152164021bfde50283a4000002ffdbfe5601790614000b000f0044401c -0b0207000ebe0c078705bd00bc0cb110081005064f0d01080c00461010fc3cec32e439123931 -0010ece4f4ec10ee1112393930400b1011401150116011701105015d13331114062b01353332 -363511331523c1b8a3b54631694cb8b80460fb8cd6c09c61990628e90000000100960000044a -05f0001c009a4027191a1b03181c11050400110505044210a111940da014910400a00200100a -02010a1c171003061d10fc4bb015544bb016545b4bb014545b58b90003ffc03859c4d4ecc0c0 -11123931002fec32f4ecf4ec304b5358071005ed0705ed11173959220140325504560556077a -047a05761b87190704000419041a041b051c74007606751a731b741c82008619821a821b821c -a800a81b115d005d25211521353600373e0135342623220607353e0133320415140607060001 -8902c1fc4c73018d33614da7865fd3787ad458e80114455b19fef4aaaaaa7701913a6d974977 -964243cc3132e8c25ca5701dfeeb0000000100ba00000464047b001300364019030900030e01 -06870e11b80cbc0a010208004e0d09080b461410fcec32f4ec31002f3ce4f4c4ec1112173930 -b46015cf1502015d0111231134262322061511231133153e013332160464b87c7c95acb9b942 -b375c1c602a4fd5c029e9f9ebea4fd870460ae6564ef00020071fe56045a047b000b0028004a -4023190c1d0912861316b90f03b92623b827bc09b90fbd1a1d261900080c4706121220452910 -fcc4ecf4ec323231002fc4e4ece4f4c4ec10fed5ee1112393930b6602a802aa02a03015d0134 -2623220615141633323617100221222627351e013332363d010e012322021110123332161735 -3303a2a59594a5a59495a5b8fefefa61ac51519e52b5b439b27ccefcfcce7cb239b8023dc8dc -dcc8c7dcdcebfee2fee91d1eb32c2abdbf5b6362013a01030104013a6263aa000001006fffe3 -03c7047b002700e7403c0d0c020e0b531f1e080902070a531f1f1e420a0b1e1f041500860189 -041486158918b91104b925b8118c281e0a0b1f1b0700521b080e07081422452810fcc4ecd4ec -e4111239393939310010e4f4ec10fef5ee10f5ee121739304b535807100eed111739070eed11 -17395922b2002701015d406d1c0a1c0b1c0c2e092c0a2c0b2c0c3b093b0a3b0b3b0c0b200020 -012402280a280b2a132f142f152a16281e281f292029212427860a860b860c860d1200000001 -0202060a060b030c030d030e030f03100319031a031b031c041d09272f293f295f297f298029 -9029a029f029185d005d7101152e012322061514161f011e0115140623222627351e01333236 -3534262f012e01353436333216038b4ea85a898962943fc4a5f7d85ac36c66c661828c65ab40 -ab98e0ce66b4043fae282854544049210e2a99899cb62323be353559514b50250f2495829eac -1e000000000100ba0000049c0614000a00bc4029081105060507110606050311040504021105 -0504420805020303bc009709060501040608010800460b10fcec32d4c4113931002f3cece417 -39304b5358071004ed071005ed071005ed071004ed5922b2100c01015d405f04020a08160227 -0229052b0856026602670873027705820289058e08930296059708a3021209050906020b030a -072803270428052b062b07400c6803600c8903850489058d068f079a039707aa03a705b607c5 -07d607f703f003f704f0041a5d71005d1333110133090123011123bab90225ebfdae026bf0fd -c7b90614fc6901e3fdf4fdac0223fddd000100ba000004640614001300344019030900030e01 -06870e11b80c970a010208004e0d09080b461410fcec32f4ec31002f3cecf4c4ec1112173930 -b2601501015d0111231134262322061511231133113e013332160464b87c7c95acb9b942b375 -c1c602a4fd5c029e9f9ebea4fd870614fd9e6564ef0000000002000300000000001400010000 -000000340004002000000004000400010000f018ffff0000f000ffff10000001000000000006 -003c0000000000190000000100020003000400050006000700080009000a000b000c000d000e -000f001000110012001300140015001600170018013500b800cb00cb00c100aa009c01a600b8 -00660000007100cb00a002b20085007500b800c301cb0189022d00cb00a600f000d300aa0087 -00cb03aa0400014a003300cb000000d9050200f4015400b4009c01390114013907060400044e -04b4045204b804e704cd0037047304cd04600473013303a2055605a60556053903c5021200c9 -001f00b801df007300ba03e9033303bc0444040e00df03cd03aa00e503aa0404000000cb008f -00a4007b00b80014016f007f027b0252008f00c705cd009a009a006f00cb00cd019e01d300f0 -00ba018300d5009803040248009e01d500c100cb00f600830354027f00000333026600d300c7 -00a400cd008f009a0073040005d5010a00fe022b00a400b4009c00000062009c0000001d032d -05d505d505d505f0007f007b005400a406b80614072301d300b800cb00a601c301ec069300a0 -00d3035c037103db0185042304a80448008f0139011401390360008f05d5019a061407230666 -0179046004600460047b009c00000277046001aa00e904600762007b00c5007f027b000000b4 -025205cd006600bc00660077061000cd013b01850389008f007b0000001d00cd074a042f009c -009c0000077d006f0000006f0335006a006f007b00ae00b2002d0396008f027b00f600830354 -063705f6008f009c04e10266008f018d02f600cd03440029006604ee00730000140000960000 -b707060504030201002c2010b002254964b040515820c859212d2cb002254964b040515820c8 -59212d2c20100720b00050b00d7920b8ffff5058041b0559b0051cb0032508b0042523e120b0 -0050b00d7920b8ffff5058041b0559b0051cb0032508e12d2c4b505820b0fd454459212d2cb0 -02254560442d2c4b5358b00225b0022545445921212d2c45442d2cb00225b0022549b00525b0 -05254960b0206368208a108a233a8a10653a2d00000100000002547ad51003e45f0f3cf5001f -080000000000c990133600000000c9901336f7d6fcae0d720955000000080000000100000000 -00010000076dfe1d00000de2f7d6fa510d7200010000000000000000000000000000001904cd -006605fc00c904e50071028b0000051400ba04e7007b034a00ba03230037023900c104660071 -051200ae023900c10514007104ec007102d1002f03ae00c5068b005602e300640239ffdb0517 -0096051200ba05140071042b006f04a200ba051200ba0000000000000044000000ec00000190 -00000190000002300000035c000003cc000004480000049800000530000005b4000005f00000 -06880000075c000007f40000086000000a8400000ab000000b2c00000c2800000ca000000d68 -00000ec800000fb8000010300001000000190354002b0068000c000200100099000800000415 -021600080004b8028040fffbfe03fa1403f92503f83203f79603f60e03f5fe03f4fe03f32503 -f20e03f19603f02503ef8a4105effe03ee9603ed9603ecfa03ebfa03eafe03e93a03e84203e7 -fe03e63203e5e45305e59603e48a4105e45303e3e22f05e3fa03e22f03e1fe03e0fe03df3203 -de1403dd9603dcfe03db1203da7d03d9bb03d8fe03d68a4105d67d03d5d44705d57d03d44703 -d3d21b05d3fe03d21b03d1fe03d0fe03cffe03cefe03cd9603cccb1e05ccfe03cb1e03ca3203 -c9fe03c6851105c61c03c51603c4fe03c3fe03c2fe03c1fe03c0fe03bffe03befe03bdfe03bc -fe03bbfe03ba1103b9862505b9fe03b8b7bb05b8fe03b7b65d05b7bb03b78004b6b52505b65d -40ff03b64004b52503b4fe03b39603b2fe03b1fe03b0fe03affe03ae6403ad0e03acab2505ac -6403abaa1205ab2503aa1203a98a4105a9fa03a8fe03a7fe03a6fe03a51203a4fe03a3a20e05 -a33203a20e03a16403a08a4105a096039ffe039e9d0c059efe039d0c039c9b19059c64039b9a -10059b19039a1003990a0398fe0397960d0597fe03960d03958a410595960394930e05942803 -930e0392fa039190bb0591fe03908f5d0590bb039080048f8e25058f5d038f40048e25038dfe -038c8b2e058cfe038b2e038a8625058a410389880b05891403880b0387862505876403868511 -0586250385110384fe038382110583fe0382110381fe0380fe037ffe0340ff7e7d7d057efe03 -7d7d037c64037b5415057b25037afe0379fe03780e03770c03760a0375fe0374fa0373fa0372 -fa0371fa0370fe036ffe036efe036c21036bfe036a1142056a530369fe03687d036711420566 -fe0365fe0364fe0363fe0362fe03613a0360fa035e0c035dfe035bfe035afe0359580a0559fa -03580a035716190557320356fe035554150555420354150353011005531803521403514a1305 -51fe03500b034ffe034e4d10054efe034d10034cfe034b4a13054bfe034a4910054a1303491d -0d05491003480d0347fe0346960345960344fe0343022d0543fa0342bb03414b0340fe033ffe -033e3d12053e14033d3c0f053d12033c3b0d053c40ff0f033b0d033afe0339fe033837140538 -fa033736100537140336350b05361003350b03341e03330d0332310b0532fe03310b03302f0b -05300d032f0b032e2d09052e10032d09032c32032b2a25052b64032a2912052a250329120328 -27250528410327250326250b05260f03250b0324fe0323fe03220f0321011005211203206403 -1ffa031e1d0d051e64031d0d031c1142051cfe031bfa031a42031911420519fe031864031716 -190517fe031601100516190315fe0314fe0313fe031211420512fe0311022d05114203107d03 -0f64030efe030d0c16050dfe030c0110050c16030bfe030a100309fe0308022d0508fe030714 -030664030401100504fe03401503022d0503fe0302011005022d0301100300fe0301b8016485 -8d012b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b002b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b1d00> -] def -/f-0-0 currentdict end definefont pop -%%Page: 1 1 -%%BeginPageSetup -%%PageBoundingBox: 0 -1 644 101 -%%EndPageSetup -q 0 -1 644 102 rectclip q -0 100.8 644 -101 re W n -1 g -0.398 100.402 643.203 -100 re f* -1 g -0.8 w -0 J -0 j -[] 0.0 d -4 M q 1 0 0 -1 0 100.800003 cm -0.398 0.398 643.203 100 re S Q -0 g -BT -12 0 0 12 225.642188 7.600003 Tm -/f-0-0 1 Tf -[<010203040506>-1<07>-1<08>1<09>-1<0a>-1<0b0506>-1<03>-1<02>1<06>16<0c -0d06>-1<030e02>1<06>-1<03>-1<0f>-1<1005>-1<0e>1<03>-1<1112130f>]TJ -ET -0.301961 0.654902 0.301961 rg -2 98.8 91.25 -17.602 re f* -0 g -0.32 w -q 1 0 0 -1 0 100.800003 cm -2 2 91.25 17.602 re S Q -0.301961 0.654902 0.301961 rg -2.133 81.198 91.285 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -2.133 19.602 91.285 17.598 re S Q -0.301961 0.654902 0.301961 rg -93.258 98.8 91.289 -17.602 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -93.258 2 91.289 17.602 re S Q -0.301961 0.654902 0.301961 rg -93.441 81.198 90.934 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -93.441 19.602 90.934 17.598 re S Q -0.301961 0.654902 0.301961 rg -184.387 81.198 91.012 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -184.387 19.602 91.012 17.598 re S Q -0.8 0.113725 0.113725 rg -184.559 98.8 452.496 -17.602 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -184.559 2 452.496 17.602 re S Q -0.8 0.113725 0.113725 rg -19.602 52.402 8.797 -8.801 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -19.602 48.398 8.797 8.801 re S Q -BT -9.6 0 0 9.6 37.2 44.400003 Tm -/f-0-0 1 Tf -[<0b02>1<14>-1<150307>-1<0516>-1<17>]TJ -ET -0.301961 0.654902 0.301961 rg -19.602 34.8 8.797 -8.801 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -19.602 66 8.797 8.801 re S Q -BT -9.6 0 0 9.6 37.2 26.800003 Tm -/f-0-0 1 Tf -[<1618>-1<0206>-1<0703>-1<0705>-1<1617>]TJ -ET -Q Q -showpage -%%Trailer -count op_count sub {pop} repeat -countdictstack dict_count sub {end} repeat -cairo_eps_state restore -%%EOF diff --git a/docs/book/tasks_overview.dia b/docs/book/tasks_overview.dia deleted file mode 100644 index d38390f612..0000000000 --- a/docs/book/tasks_overview.dia +++ /dev/null @@ -1,690 +0,0 @@ - - - - - - - - - - - - - #A4# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Execute the build functions -from user scripts# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Have the task generators -generate the tasks# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Set the build order -on the tasks# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Execute -the tasks# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Save the BuildContext -data to the cache# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Start# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #End# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Read the BuildContext -data from the cache# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/book/tasks_sort.eps b/docs/book/tasks_sort.eps deleted file mode 100644 index d7b95f961f..0000000000 --- a/docs/book/tasks_sort.eps +++ /dev/null @@ -1,369 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: cairo 1.10.2 (http://cairographics.org) -%%CreationDate: Fri Mar 30 23:56:19 2012 -%%Pages: 1 -%%BoundingBox: 0 -1 644 101 -%%DocumentData: Clean7Bit -%%LanguageLevel: 2 -%%EndComments -%%BeginProlog -/cairo_eps_state save def -/dict_count countdictstack def -/op_count count 1 sub def -userdict begin -/q { gsave } bind def -/Q { grestore } bind def -/cm { 6 array astore concat } bind def -/w { setlinewidth } bind def -/J { setlinecap } bind def -/j { setlinejoin } bind def -/M { setmiterlimit } bind def -/d { setdash } bind def -/m { moveto } bind def -/l { lineto } bind def -/c { curveto } bind def -/h { closepath } bind def -/re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto - 0 exch rlineto 0 rlineto closepath } bind def -/S { stroke } bind def -/f { fill } bind def -/f* { eofill } bind def -/n { newpath } bind def -/W { clip } bind def -/W* { eoclip } bind def -/BT { } bind def -/ET { } bind def -/pdfmark where { pop globaldict /?pdfmark /exec load put } - { globaldict begin /?pdfmark /pop load def /pdfmark - /cleartomark load def end } ifelse -/BDC { mark 3 1 roll /BDC pdfmark } bind def -/EMC { mark /EMC pdfmark } bind def -/cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def -/Tj { show currentpoint cairo_store_point } bind def -/TJ { - { - dup - type /stringtype eq - { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse - } forall - currentpoint cairo_store_point -} bind def -/cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore - cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def -/Tf { pop /cairo_font exch def /cairo_font_matrix where - { pop cairo_selectfont } if } bind def -/Td { matrix translate cairo_font_matrix matrix concatmatrix dup - /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point - /cairo_font where { pop cairo_selectfont } if } bind def -/Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def - cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def -/g { setgray } bind def -/rg { setrgbcolor } bind def -/d1 { setcachedevice } bind def -%%EndProlog -11 dict begin -/FontType 42 def -/FontName /DejaVuSans def -/PaintType 0 def -/FontMatrix [ 1 0 0 1 0 0 ] def -/FontBBox [ 0 0 0 0 ] def -/Encoding 256 array def -0 1 255 { Encoding exch /.notdef put } for -Encoding 1 /uni0057 put -Encoding 2 /uni0065 put -Encoding 3 /uni0061 put -Encoding 4 /uni006B put -Encoding 5 /uni0020 put -Encoding 6 /uni006F put -Encoding 7 /uni0072 put -Encoding 8 /uni0064 put -Encoding 9 /uni0066 put -Encoding 10 /uni0022 put -Encoding 11 /uni0077 put -Encoding 12 /uni002D put -Encoding 13 /uni006A put -Encoding 14 /uni0032 put -Encoding 15 /uni006C put -Encoding 16 /uni006E put -Encoding 17 /uni0067 put -Encoding 18 /uni0074 put -Encoding 19 /uni0073 put -Encoding 20 /uni0068 put -/CharStrings 21 dict dup begin -/.notdef 0 def -/uni0057 1 def -/uni0065 2 def -/uni0061 3 def -/uni006B 4 def -/uni0020 5 def -/uni006F 6 def -/uni0072 7 def -/uni0064 8 def -/uni0066 9 def -/uni0022 10 def -/uni0077 11 def -/uni002D 12 def -/uni006A 13 def -/uni0032 14 def -/uni006C 15 def -/uni006E 16 def -/uni0067 17 def -/uni0074 18 def -/uni0073 19 def -/uni0068 20 def -end readonly def -/sfnts [ -<00010000000a008000030020636d6170008bf12500000fe4000000686376742000691d390000 -104c000001fe6670676d7134766a0000124c000000ab676c7966779a2930000000ac00000f38 -68656164f79ac5e7000012f800000036686865610cb806660000133000000024686d74785af6 -095700001354000000546c6f63610000ad84000013a8000000586d6178700482067100001400 -00000020707265703b07f100000014200000056800020066fe96046605a400030007001a400c -04fb0006fb0108057f0204002fc4d4ec310010d4ecd4ec301311211125211121660400fc7303 -1bfce5fe96070ef8f272062900010044000007a605d5000c017b4049051a0605090a09041a0a -09031a0a0b0a021a01020b0b0a061107080705110405080807021103020c000c011100000c42 -0a050203060300af0b080c0b0a09080605040302010b07000d10d4cc173931002f3cec323217 -39304b5358071005ed071008ed071008ed071005ed071008ed071005ed0705ed071008ed5922 -b2000e01015d40f206020605020a000a000a120a2805240a200a3e023e05340a300a4c024d05 -420a400a59026a026b05670a600a7b027f027c057f05800a960295051d070009020803000406 -050005000601070408000807090009040a0a0c000e1a0315041508190c100e20042105200620 -0720082309240a250b200e200e3c023a033504330530083609390b3f0c300e460046014a0240 -044505400542064207420840084009440a4d0c400e400e58025608590c500e66026703610462 -056006600760086409640a640b770076017b027803770474057906790777087008780c7f0c7f -0e860287038804890585098a0b8f0e97049f0eaf0e5b5d005d13330901330901330123090123 -44cc013a0139e3013a0139cdfe89fefec5fec2fe05d5fb1204eefb1204eefa2b0510faf00000 -00020071ffe3047f047b0014001b00704024001501098608880515a90105b90c01bb18b912b8 -0c8c1c1b1502081508004b02120f451c10fcecf4ecc4111239310010e4f4ece410ee10ee10f4 -ee1112393040293f1d701da01dd01df01d053f003f013f023f153f1b052c072f082f092c0a6f -006f016f026f156f1b095d71015d0115211e0133323637150e01232000111000333200072e01 -23220607047ffcb20ccdb76ac76263d06bfef4fec70129fce20107b802a5889ab90e025e5abe -c73434ae2a2c0138010a01130143feddc497b4ae9e000002007bffe3042d047b000a002500bc -4027191f0b17090e00a91706b90e1120861fba1cb923b8118c170c001703180d09080b1f0308 -14452610fcecccd4ec323211393931002fc4e4f4fcf4ec10c6ee10ee11391139123930406e30 -1d301e301f3020302130223f27401d401e401f402040214022501d501e501f50205021502250 -277027851d871e871f8720872185229027a027f0271e301e301f30203021401e401f40204021 -501e501f50205021601e601f60206021701e701f70207021801e801f80208021185d015d0122 -061514163332363d01371123350e01232226353436332135342623220607353e0133321602be -dfac816f99b9b8b83fbc88accbfdfb0102a79760b65465be5af3f00233667b6273d9b4294cfd -81aa6661c1a2bdc0127f8b2e2eaa2727fc00000100ba0000049c0614000a00bc402908110506 -05071106060503110405040211050504420805020303bc009709060501040608010800460b10 -fcec32d4c4113931002f3cece41739304b5358071004ed071005ed071005ed071004ed5922b2 -100c01015d405f04020a081602270229052b0856026602670873027705820289058e08930296 -059708a3021209050906020b030a072803270428052b062b07400c6803600c8903850489058d -068f079a039707aa03a705b607c507d607f703f003f704f0041a5d71005d1333110133090123 -011123bab90225ebfdae026bf0fdc7b90614fc6901e3fdf4fdac0223fddd00020071ffe30475 -047b000b0017004a401306b91200b90cb8128c1809120f51031215451810fcecf4ec310010e4 -f4ec10ee3040233f197b007b067f077f087f097f0a7f0b7b0c7f0d7f0e7f0f7f107f117b12a0 -19f01911015d012206151416333236353426273200111000232200111000027394acab9593ac -ac93f00112feeef0f1feef011103dfe7c9c9e7e8c8c7e99cfec8feecfeedfec7013901130114 -01380000000100ba0000034a047b001100304014060b0700110b03870eb809bc070a06080008 -461210fcc4ec3231002fe4f4ecc4d4cc11123930b450139f1302015d012e0123220615112311 -33153e0133321617034a1f492c9ca7b9b93aba85132e1c03b41211cbbefdb20460ae66630505 -000000020071ffe3045a06140010001c003840191ab9000e14b905088c0eb801970317040008 -024711120b451d10fcecf4ec323231002fece4f4c4ec10c4ee30b6601e801ea01e03015d0111 -331123350e0123220211100033321601141633323635342623220603a2b8b83ab17ccbff00ff -cb7cb1fdc7a79292a8a89292a703b6025ef9eca86461014401080108014461fe15cbe7e7cbcb -e7e70001002f000002f8061400130059401c0510010c08a906018700970e06bc0a0213070007 -0905080d0f0b4c1410fc4bb00a5458b9000b004038594bb00e5458b9000bffc038593cc4fc3c -c4c412393931002fe432fcec10ee321239393001b640155015a015035d01152322061d012115 -211123112335333534363302f8b0634d012ffed1b9b0b0aebd0614995068638ffc2f03d18f4e -bbab000200c503aa02e905d5000300070042400f05018404008108040506000502040810fc4b -b012544bb013545b58b90002ffc03859fcdcec310010f43cec323001400f3009400950096009 -7009a009bf09075d0111231121112311016faa0224aa05d5fdd5022bfdd5022b000000010056 -000006350460000c01eb404905550605090a0904550a0903550a0b0a025501020b0b0a061107 -080705110405080807021103020c000c011100000c420a050203060300bf0b080c0b0a090806 -05040302010b07000d10d44bb00a544bb011545b4bb012545b4bb013545b4bb00b545b58b900 -0000403859014bb00c544bb00d545b4bb010545b58b90000ffc03859cc173931002f3cec3232 -1739304b5358071005ed071008ed071008ed071005ed071008ed071005ed0705ed071008ed59 -220140ff050216021605220a350a49024905460a400a5b025b05550a500a6e026e05660a7902 -7f0279057f05870299029805940abc02bc05ce02c703cf051d0502090306040b050a080b0904 -0b050c1502190316041a051b081b09140b150c25002501230227032104250522062207250827 -09240a210b230c390336043608390c300e460248034604400442054006400740084409440a44 -0b400e400e560056015602500451055206520750085309540a550b6300640165026a0365046a -056a066a076e09610b670c6f0e7500750179027d0378047d057a067f067a077f07780879097f -097b0a760b7d0c870288058f0e97009701940293039c049b05980698079908402f960c9f0ea6 -00a601a402a403ab04ab05a906a907ab08a40caf0eb502b103bd04bb05b809bf0ec402c303cc -04ca05795d005d13331b01331b013301230b012356b8e6e5d9e6e5b8fedbd9f1f2d90460fc96 -036afc96036afba00396fc6a0001006401df027f028300030011b6009c020401000410dccc31 -0010d4ec301321152164021bfde50283a4000002ffdbfe5601790614000b000f0044401c0b02 -07000ebe0c078705bd00bc0cb110081005064f0d01080c00461010fc3cec32e4391239310010 -ece4f4ec10ee1112393930400b1011401150116011701105015d13331114062b013533323635 -11331523c1b8a3b54631694cb8b80460fb8cd6c09c61990628e90000000100960000044a05f0 -001c009a4027191a1b03181c11050400110505044210a111940da014910400a00200100a0201 -0a1c171003061d10fc4bb015544bb016545b4bb014545b58b90003ffc03859c4d4ecc0c01112 -3931002fec32f4ecf4ec304b5358071005ed0705ed11173959220140325504560556077a047a -05761b87190704000419041a041b051c74007606751a731b741c82008619821a821b821ca800 -a81b115d005d25211521353600373e0135342623220607353e01333204151406070600018902 -c1fc4c73018d33614da7865fd3787ad458e80114455b19fef4aaaaaa7701913a6d9749779642 -43cc3132e8c25ca5701dfeeb0000000100c100000179061400030022b7009702010800460410 -fcec31002fec30400d10054005500560057005f00506015d13331123c1b8b80614f9ec000001 -00ba00000464047b001300364019030900030e0106870e11b80cbc0a010208004e0d09080b46 -1410fcec32f4ec31002f3ce4f4c4ec1112173930b46015cf1502015d01112311342623220615 -11231133153e013332160464b87c7c95acb9b942b375c1c602a4fd5c029e9f9ebea4fd870460 -ae6564ef00020071fe56045a047b000b0028004a4023190c1d0912861316b90f03b92623b827 -bc09b90fbd1a1d261900080c4706121220452910fcc4ecf4ec323231002fc4e4ece4f4c4ec10 -fed5ee1112393930b6602a802aa02a03015d0134262322061514163332361710022122262735 -1e013332363d010e0123220211101233321617353303a2a59594a5a59495a5b8fefefa61ac51 -519e52b5b439b27ccefcfcce7cb239b8023dc8dcdcc8c7dcdcebfee2fee91d1eb32c2abdbf5b -6362013a01030104013a6263aa0000010037000002f2059e0013003840190e05080f03a90011 -01bc08870a0b08090204000810120e461410fc3cc4fc3cc432393931002fecf43cc4ec321139 -3930b2af1501015d01112115211114163b01152322263511233533110177017bfe854b73bdbd -d5a28787059efec28ffda0894e9a9fd202608f013e0000000001006fffe303c7047b002700e7 -403c0d0c020e0b531f1e080902070a531f1f1e420a0b1e1f041500860189041486158918b911 -04b925b8118c281e0a0b1f1b0700521b080e07081422452810fcc4ecd4ece411123939393931 -0010e4f4ec10fef5ee10f5ee121739304b535807100eed111739070eed1117395922b2002701 -015d406d1c0a1c0b1c0c2e092c0a2c0b2c0c3b093b0a3b0b3b0c0b200020012402280a280b2a -132f142f152a16281e281f292029212427860a860b860c860d12000000010202060a060b030c -030d030e030f03100319031a031b031c041d09272f293f295f297f2980299029a029f029185d -005d7101152e012322061514161f011e0115140623222627351e013332363534262f012e0135 -3436333216038b4ea85a898962943fc4a5f7d85ac36c66c661828c65ab40ab98e0ce66b4043f -ae282854544049210e2a99899cb62323be353559514b50250f2495829eac1e000000000100ba -000004640614001300344019030900030e0106870e11b80c970a010208004e0d09080b461410 -fcec32f4ec31002f3cecf4c4ec1112173930b2601501015d0111231134262322061511231133 -113e013332160464b87c7c95acb9b942b375c1c602a4fd5c029e9f9ebea4fd870614fd9e6564 -ef0000000002000300000000001400010000000000340004002000000004000400010000f014 -ffff0000f000ffff100000010000000000060034000000000015000000010002000300040005 -0006000700080009000a000b000c000d000e000f00100011001200130014013500b800cb00cb -00c100aa009c01a600b800660000007100cb00a002b20085007500b800c301cb0189022d00cb -00a600f000d300aa008700cb03aa0400014a003300cb000000d9050200f4015400b4009c0139 -0114013907060400044e04b4045204b804e704cd0037047304cd04600473013303a2055605a6 -0556053903c5021200c9001f00b801df007300ba03e9033303bc0444040e00df03cd03aa00e5 -03aa0404000000cb008f00a4007b00b80014016f007f027b0252008f00c705cd009a009a006f -00cb00cd019e01d300f000ba018300d5009803040248009e01d500c100cb00f600830354027f -00000333026600d300c700a400cd008f009a0073040005d5010a00fe022b00a400b4009c0000 -0062009c0000001d032d05d505d505d505f0007f007b005400a406b80614072301d300b800cb -00a601c301ec069300a000d3035c037103db0185042304a80448008f0139011401390360008f -05d5019a0614072306660179046004600460047b009c00000277046001aa00e904600762007b -00c5007f027b000000b4025205cd006600bc00660077061000cd013b01850389008f007b0000 -001d00cd074a042f009c009c0000077d006f0000006f0335006a006f007b00ae00b2002d0396 -008f027b00f600830354063705f6008f009c04e10266008f018d02f600cd03440029006604ee -00730000140000960000b707060504030201002c2010b002254964b040515820c859212d2cb0 -02254964b040515820c859212d2c20100720b00050b00d7920b8ffff5058041b0559b0051cb0 -032508b0042523e120b00050b00d7920b8ffff5058041b0559b0051cb0032508e12d2c4b5058 -20b0fd454459212d2cb002254560442d2c4b5358b00225b0022545445921212d2c45442d2cb0 -0225b0022549b00525b005254960b0206368208a108a233a8a10653a2d00000100000002547a -883469d45f0f3cf5001f080000000000c990133600000000c9901336f7d6fcae0d7209550000 -0008000000010000000000010000076dfe1d00000de2f7d6fa510d7200010000000000000000 -000000000000001504cd006607e9004404ec007104e7007b04a200ba028b000004e50071034a -00ba0514007102d1002f03ae00c5068b005602e300640239ffdb05170096023900c1051200ba -0514007103230037042b006f051200ba000000000000004400000200000002d4000004000000 -04f0000004f000000594000006040000069c00000734000007a0000009c4000009f000000a6c -00000b6800000ba400000c1c00000ce400000d6000000ec000000f380001000000150354002b -0068000c000200100099000800000415021600080004b8028040fffbfe03fa1403f92503f832 -03f79603f60e03f5fe03f4fe03f32503f20e03f19603f02503ef8a4105effe03ee9603ed9603 -ecfa03ebfa03eafe03e93a03e84203e7fe03e63203e5e45305e59603e48a4105e45303e3e22f -05e3fa03e22f03e1fe03e0fe03df3203de1403dd9603dcfe03db1203da7d03d9bb03d8fe03d6 -8a4105d67d03d5d44705d57d03d44703d3d21b05d3fe03d21b03d1fe03d0fe03cffe03cefe03 -cd9603cccb1e05ccfe03cb1e03ca3203c9fe03c6851105c61c03c51603c4fe03c3fe03c2fe03 -c1fe03c0fe03bffe03befe03bdfe03bcfe03bbfe03ba1103b9862505b9fe03b8b7bb05b8fe03 -b7b65d05b7bb03b78004b6b52505b65d40ff03b64004b52503b4fe03b39603b2fe03b1fe03b0 -fe03affe03ae6403ad0e03acab2505ac6403abaa1205ab2503aa1203a98a4105a9fa03a8fe03 -a7fe03a6fe03a51203a4fe03a3a20e05a33203a20e03a16403a08a4105a096039ffe039e9d0c -059efe039d0c039c9b19059c64039b9a10059b19039a1003990a0398fe0397960d0597fe0396 -0d03958a410595960394930e05942803930e0392fa039190bb0591fe03908f5d0590bb039080 -048f8e25058f5d038f40048e25038dfe038c8b2e058cfe038b2e038a8625058a410389880b05 -891403880b03878625058764038685110586250385110384fe038382110583fe0382110381fe -0380fe037ffe0340ff7e7d7d057efe037d7d037c64037b5415057b25037afe0379fe03780e03 -770c03760a0375fe0374fa0373fa0372fa0371fa0370fe036ffe036efe036c21036bfe036a11 -42056a530369fe03687d036711420566fe0365fe0364fe0363fe0362fe03613a0360fa035e0c -035dfe035bfe035afe0359580a0559fa03580a035716190557320356fe035554150555420354 -150353011005531803521403514a130551fe03500b034ffe034e4d10054efe034d10034cfe03 -4b4a13054bfe034a4910054a1303491d0d05491003480d0347fe0346960345960344fe034302 -2d0543fa0342bb03414b0340fe033ffe033e3d12053e14033d3c0f053d12033c3b0d053c40ff -0f033b0d033afe0339fe033837140538fa033736100537140336350b05361003350b03341e03 -330d0332310b0532fe03310b03302f0b05300d032f0b032e2d09052e10032d09032c32032b2a -25052b64032a2912052a25032912032827250528410327250326250b05260f03250b0324fe03 -23fe03220f03210110052112032064031ffa031e1d0d051e64031d0d031c1142051cfe031bfa -031a42031911420519fe031864031716190517fe031601100516190315fe0314fe0313fe0312 -11420512fe0311022d05114203107d030f64030efe030d0c16050dfe030c0110050c16030bfe -030a100309fe0308022d0508fe030714030664030401100504fe03401503022d0503fe030201 -1005022d0301100300fe0301b80164858d012b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b002b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b1d00> -] def -/f-0-0 currentdict end definefont pop -%%Page: 1 1 -%%BeginPageSetup -%%PageBoundingBox: 0 -1 644 101 -%%EndPageSetup -q 0 -1 644 102 rectclip q -0 100.8 644 -101 re W n -1 g -0.398 100.402 643.203 -100 re f* -1 g -0.8 w -0 J -0 j -[] 0.0 d -4 M q 1 0 0 -1 0 100.800003 cm -0.398 0.398 643.203 100 re S Q -0 g -BT -12 0 0 12 248.189063 7.600003 Tm -/f-0-0 1 Tf -[<01>59<020304>-1<05>-1<06>1<07>16<080207>-1<050906>1<07>-1<05>-1<0a>-1<0b -03>-1<09>1<05>-1<0c0d0e0a>]TJ -ET -0.301961 0.654902 0.301961 rg -2.137 81.198 91.395 -17.598 re f* -0 g -0.32 w -q 1 0 0 -1 0 100.800003 cm -2.137 19.602 91.395 17.598 re S Q -0.301961 0.654902 0.301961 rg -93.547 81.198 90.918 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -93.547 19.602 90.918 17.598 re S Q -0.301961 0.654902 0.301961 rg -184.488 81.198 90.992 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -184.488 19.602 90.992 17.598 re S Q -0.301961 0.654902 0.301961 rg -275.504 81.198 90.996 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -275.504 19.602 90.996 17.598 re S Q -0.301961 0.654902 0.301961 rg -366.516 81.198 91.117 -17.598 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -366.516 19.602 91.117 17.598 re S Q -0.8 0.113725 0.113725 rg -2 98.8 453.039 -17.602 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -2 2 453.039 17.602 re S Q -0.8 0.113725 0.113725 rg -19.602 52.402 8.797 -8.801 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -19.602 48.398 8.797 8.801 re S Q -BT -9.6 0 0 9.6 37.2 44.400003 Tm -/f-0-0 1 Tf -[<0f06>1<10>-1<110512>-1<0313>-1<04>]TJ -ET -0.301961 0.654902 0.301961 rg -19.602 34.8 8.797 -8.801 re f* -0 g -q 1 0 0 -1 0 100.800003 cm -19.602 66 8.797 8.801 re S Q -BT -9.6 0 0 9.6 37.2 26.800003 Tm -/f-0-0 1 Tf -[<1314>-1<0607>-1<1205>-1<1203>-1<1304>]TJ -ET -Q Q -showpage -%%Trailer -count op_count sub {pop} repeat -countdictstack dict_count sub {end} repeat -cairo_eps_state restore -%%EOF diff --git a/docs/book/tasks_twogroups.eps b/docs/book/tasks_twogroups.eps deleted file mode 100644 index 95085bfb87..0000000000 --- a/docs/book/tasks_twogroups.eps +++ /dev/null @@ -1,684 +0,0 @@ -%!PS-Adobe-3.0 EPSF-3.0 -%%Creator: cairo 1.8.10 (http://cairographics.org) -%%CreationDate: Thu Jul 29 16:55:39 2010 -%%Pages: 1 -%%BoundingBox: 0 0 637 164 -%%DocumentData: Clean7Bit -%%LanguageLevel: 2 -%%EndComments -%%BeginProlog -/cairo_eps_state save def -/dict_count countdictstack def -/op_count count 1 sub def -userdict begin -/q { gsave } bind def -/Q { grestore } bind def -/cm { 6 array astore concat } bind def -/w { setlinewidth } bind def -/J { setlinecap } bind def -/j { setlinejoin } bind def -/M { setmiterlimit } bind def -/d { setdash } bind def -/m { moveto } bind def -/l { lineto } bind def -/c { curveto } bind def -/h { closepath } bind def -/re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto - 0 exch rlineto 0 rlineto closepath } bind def -/S { stroke } bind def -/f { fill } bind def -/f* { eofill } bind def -/B { fill stroke } bind def -/B* { eofill stroke } bind def -/n { newpath } bind def -/W { clip } bind def -/W* { eoclip } bind def -/BT { } bind def -/ET { } bind def -/pdfmark where { pop globaldict /?pdfmark /exec load put } - { globaldict begin /?pdfmark /pop load def /pdfmark - /cleartomark load def end } ifelse -/BDC { mark 3 1 roll /BDC pdfmark } bind def -/EMC { mark /EMC pdfmark } bind def -/cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def -/Tj { show currentpoint cairo_store_point } bind def -/TJ { - { - dup - type /stringtype eq - { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse - } forall - currentpoint cairo_store_point -} bind def -/cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore - cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def -/Tf { pop /cairo_font exch def /cairo_font_matrix where - { pop cairo_selectfont } if } bind def -/Td { matrix translate cairo_font_matrix matrix concatmatrix dup - /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point - /cairo_font where { pop cairo_selectfont } if } bind def -/Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def - cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def -/g { setgray } bind def -/rg { setrgbcolor } bind def -/d1 { setcachedevice } bind def -%%EndProlog -11 dict begin -/FontType 42 def -/FontName /f-0-0 def -/PaintType 0 def -/FontMatrix [ 1 0 0 1 0 0 ] def -/FontBBox [ 0 0 0 0 ] def -/Encoding 256 array def -0 1 255 { Encoding exch /.notdef put } for -Encoding 1 /uni0050 put -Encoding 2 /uni0061 put -Encoding 3 /uni0072 put -Encoding 4 /uni006C put -Encoding 5 /uni0065 put -Encoding 6 /uni0020 put -Encoding 7 /uni0062 put -Encoding 8 /uni0075 put -Encoding 9 /uni0069 put -Encoding 10 /uni0064 put -Encoding 11 /uni0070 put -Encoding 12 /uni0073 put -Encoding 13 /uni006E put -Encoding 14 /uni0074 put -Encoding 15 /uni006F put -Encoding 16 /uni0066 put -Encoding 17 /uni0022 put -Encoding 18 /uni0077 put -Encoding 19 /uni002D put -Encoding 20 /uni006A put -Encoding 21 /uni0034 put -Encoding 22 /uni0028 put -Encoding 23 /uni0067 put -Encoding 24 /uni0029 put -Encoding 25 /uni006B put -Encoding 26 /uni0063 put -/CharStrings 27 dict dup begin -/.notdef 0 def -/uni0050 1 def -/uni0061 2 def -/uni0072 3 def -/uni006C 4 def -/uni0065 5 def -/uni0020 6 def -/uni0062 7 def -/uni0075 8 def -/uni0069 9 def -/uni0064 10 def -/uni0070 11 def -/uni0073 12 def -/uni006E 13 def -/uni0074 14 def -/uni006F 15 def -/uni0066 16 def -/uni0022 17 def -/uni0077 18 def -/uni002D 19 def -/uni006A 20 def -/uni0034 21 def -/uni0028 22 def -/uni0067 23 def -/uni0029 24 def -/uni006B 25 def -/uni0063 26 def -end readonly def -/sfnts [ -<00010000000a008000030020636d617000d6f17f0000115c000000746376742000691d390000 -11d0000001fe6670676d7134766a000013d0000000ab676c7966fd4948e5000000ac000010b0 -68656164f1f329920000147c00000036686865610cb8066c000014b400000024686d74786ee5 -0d98000014d80000006c6c6f63610000dc2400001544000000706d61787004880671000015b4 -00000020707265703b07f100000015d40000056800020066fe96046605a400030007001a400c -04fb0006fb0108057f0204002fc4d4ec310010d4ecd4ec301311211125211121660400fc7303 -1bfce5fe96070ef8f2720629000200c90000048d05d500080013003a40180195100095098112 -100a0802040005190d3f11001c09041410fcec32fcec11173931002ff4ecd4ec30400b0f151f -153f155f15af1505015d011133323635342623252132041514042b0111230193fe8d9a9a8dfe -3801c8fb0101fefffbfeca052ffdcf92878692a6e3dbdde2fda80002007bffe3042d047b000a -002500bc4027191f0b17090e00a91706b90e1120861fba1cb923b8118c170c001703180d0908 -0b1f030814452610fcecccd4ec323211393931002fc4e4f4fcf4ec10c6ee10ee113911391239 -30406e301d301e301f3020302130223f27401d401e401f402040214022501d501e501f502050 -21502250277027851d871e871f8720872185229027a027f0271e301e301f30203021401e401f -40204021501e501f50205021601e601f60206021701e701f70207021801e801f80208021185d -015d0122061514163332363d01371123350e01232226353436332135342623220607353e0133 -321602bedfac816f99b9b8b83fbc88accbfdfb0102a79760b65465be5af3f00233667b6273d9 -b4294cfd81aa6661c1a2bdc0127f8b2e2eaa2727fc00000100ba0000034a047b001100304014 -060b0700110b03870eb809bc070a06080008461210fcc4ec3231002fe4f4ecc4d4cc11123930 -b450139f1302015d012e012322061511231133153e0133321617034a1f492c9ca7b9b93aba85 -132e1c03b41211cbbefdb20460ae666305050000000100c100000179061400030022b7009702 -010800460410fcec31002fec30400d10054005500560057005f00506015d13331123c1b8b806 -14f9ec0000020071ffe3047f047b0014001b00704024001501098608880515a90105b90c01bb -18b912b80c8c1c1b1502081508004b02120f451c10fcecf4ecc4111239310010e4f4ece410ee -10ee10f4ee1112393040293f1d701da01dd01df01d053f003f013f023f153f1b052c072f082f -092c0a6f006f016f026f156f1b095d71015d0115211e0133323637150e012320001110003332 -00072e0123220607047ffcb20ccdb76ac76263d06bfef4fec70129fce20107b802a5889ab90e -025e5abec73434ae2a2c0138010a01130143feddc497b4ae9e00000200baffe304a40614000b -001c0038401903b90c0f09b918158c0fb81b971900121247180c06081a461d10fcec3232f4ec -31002fece4f4c4ec10c6ee30b6601e801ea01e03015d013426232206151416333236013e0133 -3200111002232226271523113303e5a79292a7a79292a7fd8e3ab17bcc00ffffcc7bb13ab9b9 -022fcbe7e7cbcbe7e702526461febcfef8fef8febc6164a80614000200aeffe30458047b0013 -0014003b401c030900030e0106870e118c0a01bc14b80c0d0908140b4e020800461510fcecf4 -39ec3231002fe4e432f4c4ec1112173930b46f15c01502015d13113311141633323635113311 -23350e0123222601aeb87c7c95adb8b843b175c1c801cf01ba02a6fd619f9fbea4027bfba0ac -6663f003a800000200c100000179061400030007002b400e06be04b100bc0205010804004608 -10fc3cec3231002fe4fcec30400b1009400950096009700905015d1333112311331523c1b8b8 -b8b80460fba00614e90000020071ffe3045a06140010001c003840191ab9000e14b905088c0e -b801970317040008024711120b451d10fcecf4ec323231002fece4f4c4ec10c4ee30b6601e80 -1ea01e03015d0111331123350e0123220211100033321601141633323635342623220603a2b8 -b83ab17ccbff00ffcb7cb1fdc7a79292a8a89292a703b6025ef9eca864610144010801080144 -61fe15cbe7e7cbcbe7e7000200bafe5604a4047b0010001c003e401b1ab9000e14b90508b80e -8c01bd03bc1d11120b471704000802461d10fcec3232f4ec310010e4e4e4f4c4ec10c4ee3040 -09601e801ea01ee01e04015d2511231133153e01333200111002232226013426232206151416 -3332360173b9b93ab17bcc00ffffcc7bb10238a79292a7a79292a7a8fdae060aaa6461febcfe -f8fef8febc6101ebcbe7e7cbcbe7e70000000001006fffe303c7047b002700e7403c0d0c020e -0b531f1e080902070a531f1f1e420a0b1e1f041500860189041486158918b91104b925b8118c -281e0a0b1f1b0700521b080e07081422452810fcc4ecd4ece4111239393939310010e4f4ec10 -fef5ee10f5ee121739304b535807100eed111739070eed1117395922b2002701015d406d1c0a -1c0b1c0c2e092c0a2c0b2c0c3b093b0a3b0b3b0c0b200020012402280a280b2a132f142f152a -16281e281f292029212427860a860b860c860d12000000010202060a060b030c030d030e030f -03100319031a031b031c041d09272f293f295f297f2980299029a029f029185d005d7101152e -012322061514161f011e0115140623222627351e013332363534262f012e0135343633321603 -8b4ea85a898962943fc4a5f7d85ac36c66c661828c65ab40ab98e0ce66b4043fae2828545440 -49210e2a99899cb62323be353559514b50250f2495829eac1e000000000100ba00000464047b -001300364019030900030e0106870e11b80cbc0a010208004e0d09080b461410fcec32f4ec31 -002f3ce4f4c4ec1112173930b46015cf1502015d0111231134262322061511231133153e0133 -32160464b87c7c95acb9b942b375c1c602a4fd5c029e9f9ebea4fd870460ae6564ef00010037 -000002f2059e0013003840190e05080f03a9001101bc08870a0b08090204000810120e461410 -fc3cc4fc3cc432393931002fecf43cc4ec3211393930b2af1501015d01112115211114163b01 -152322263511233533110177017bfe854b73bdbdd5a28787059efec28ffda0894e9a9fd20260 -8f013e00000000020071ffe30475047b000b0017004a401306b91200b90cb8128c1809120f51 -031215451810fcecf4ec310010e4f4ec10ee3040233f197b007b067f077f087f097f0a7f0b7b -0c7f0d7f0e7f0f7f107f117b12a019f01911015d012206151416333236353426273200111000 -232200111000027394acab9593acac93f00112feeef0f1feef011103dfe7c9c9e7e8c8c7e99c -fec8feecfeedfec7013901130114013800000001002f000002f8061400130059401c0510010c -08a906018700970e06bc0a02130700070905080d0f0b4c1410fc4bb00a5458b9000b00403859 -4bb00e5458b9000bffc038593cc4fc3cc4c412393931002fe432fcec10ee321239393001b640 -155015a015035d01152322061d012115211123112335333534363302f8b0634d012ffed1b9b0 -b0aebd0614995068638ffc2f03d18f4ebbab000200c503aa02e905d5000300070042400f0501 -8404008108040506000502040810fc4bb012544bb013545b58b90002ffc03859fcdcec310010 -f43cec323001400f30094009500960097009a009bf09075d0111231121112311016faa0224aa -05d5fdd5022bfdd5022b000000010056000006350460000c01eb404905550605090a0904550a -0903550a0b0a025501020b0b0a061107080705110405080807021103020c000c011100000c42 -0a050203060300bf0b080c0b0a09080605040302010b07000d10d44bb00a544bb011545b4bb0 -12545b4bb013545b4bb00b545b58b9000000403859014bb00c544bb00d545b4bb010545b58b9 -0000ffc03859cc173931002f3cec32321739304b5358071005ed071008ed071008ed071005ed -071008ed071005ed0705ed071008ed59220140ff050216021605220a350a49024905460a400a -5b025b05550a500a6e026e05660a79027f0279057f05870299029805940abc02bc05ce02c703 -cf051d0502090306040b050a080b09040b050c1502190316041a051b081b09140b150c250025 -0123022703210425052206220725082709240a210b230c390336043608390c300e4602480346 -04400442054006400740084409440a440b400e400e5600560156025004510552065207500853 -09540a550b6300640165026a0365046a056a066a076e09610b670c6f0e7500750179027d0378 -047d057a067f067a077f07780879097f097b0a760b7d0c870288058f0e97009701940293039c -049b05980698079908402f960c9f0ea600a601a402a403ab04ab05a906a907ab08a40caf0eb5 -02b103bd04bb05b809bf0ec402c303cc04ca05795d005d13331b01331b013301230b012356b8 -e6e5d9e6e5b8fedbd9f1f2d90460fc96036afc96036afba00396fc6a0001006401df027f0283 -00030011b6009c020401000410dccc310010d4ec301321152164021bfde50283a4000002ffdb -fe5601790614000b000f0044401c0b0207000ebe0c078705bd00bc0cb110081005064f0d0108 -0c00461010fc3cec32e4391239310010ece4f4ec10ee1112393930400b101140115011601170 -1105015d13331114062b01353332363511331523c1b8a3b54631694cb8b80460fb8cd6c09c61 -990628e9000000020064000004a405d50002000d0081401d010d030d0003030d4200030b07a0 -0501038109010c0a001c0608040c0e10dc4bb00b544bb00d545b58b9000cffc03859d43cc4ec -32113931002fe4d43cec321239304b5358071004c9071005c9592201402a0b002a0048005900 -690077008a000716012b0026012b0336014e014f0c4f0d5601660175017a0385010d5d005d09 -012103331133152311231121350306fe0201fe35fed5d5c9fd5e0525fce303cdfc33a8fea001 -60c30000000100b0fef2027b0612000d0037400f069800970e0d070003120600130a0e10dc4b -b0135458b9000affc038594bb00f5458b9000a00403859e432ec113939310010fcec30010602 -1514121723260235341237027b86828385a0969594970612e6fe3ee7e7fe3be5eb01c6e0df01 -c4ec00020071fe56045a047b000b0028004a4023190c1d0912861316b90f03b92623b827bc09 -b90fbd1a1d261900080c4706121220452910fcc4ecf4ec323231002fc4e4ece4f4c4ec10fed5 -ee1112393930b6602a802aa02a03015d01342623220615141633323617100221222627351e01 -3332363d010e0123220211101233321617353303a2a59594a5a59495a5b8fefefa61ac51519e -52b5b439b27ccefcfcce7cb239b8023dc8dcdcc8c7dcdcebfee2fee91d1eb32c2abdbf5b6362 -013a01030104013a6263aa00000100a4fef2026f0612000d001f400f079800970e0701000b12 -041308000e10dc3cf4ec113939310010fcec301333161215140207233612353402a4a0969595 -96a08583830612ecfe3cdfe0fe3aebe501c5e7e701c20000000100ba0000049c0614000a00bc -40290811050605071106060503110405040211050504420805020303bc009709060501040608 -010800460b10fcec32d4c4113931002f3cece41739304b5358071004ed071005ed071005ed07 -1004ed5922b2100c01015d405f04020a081602270229052b0856026602670873027705820289 -058e08930296059708a3021209050906020b030a072803270428052b062b07400c6803600c89 -03850489058d068f079a039707aa03a705b607c507d607f703f003f704f0041a5d71005d1333 -110133090123011123bab90225ebfdae026bf0fdc7b90614fc6901e3fdf4fdac0223fddd0001 -0071ffe303e7047b0019003f401b00860188040e860d880ab91104b917b8118c1a07120d0048 -14451a10fce432ec310010e4f4ec10fef4ee10f5ee30400b0f1b101b801b901ba01b05015d01 -152e0123220615141633323637150e0123220011100021321603e74e9d50b3c6c6b3509d4e4d -a55dfdfed6012d010655a20435ac2b2be3cdcde32b2baa2424013e010e0112013a2300000000 -0002000300000000001400010000000000340004002000000004000400010000f01affff0000 -f000ffff10000001000000000006004000000000001b00000001000200030004000500060007 -00080009000a000b000c000d000e000f0010001100120013001400150016001700180019001a -013500b800cb00cb00c100aa009c01a600b800660000007100cb00a002b20085007500b800c3 -01cb0189022d00cb00a600f000d300aa008700cb03aa0400014a003300cb000000d9050200f4 -015400b4009c01390114013907060400044e04b4045204b804e704cd0037047304cd04600473 -013303a2055605a60556053903c5021200c9001f00b801df007300ba03e9033303bc0444040e -00df03cd03aa00e503aa0404000000cb008f00a4007b00b80014016f007f027b0252008f00c7 -05cd009a009a006f00cb00cd019e01d300f000ba018300d5009803040248009e01d500c100cb -00f600830354027f00000333026600d300c700a400cd008f009a0073040005d5010a00fe022b -00a400b4009c00000062009c0000001d032d05d505d505d505f0007f007b005400a406b80614 -072301d300b800cb00a601c301ec069300a000d3035c037103db0185042304a80448008f0139 -011401390360008f05d5019a0614072306660179046004600460047b009c00000277046001aa -00e904600762007b00c5007f027b000000b4025205cd006600bc00660077061000cd013b0185 -0389008f007b0000001d00cd074a042f009c009c0000077d006f0000006f0335006a006f007b -00ae00b2002d0396008f027b00f600830354063705f6008f009c04e10266008f018d02f600cd -03440029006604ee00730000140000960000b707060504030201002c2010b002254964b04051 -5820c859212d2cb002254964b040515820c859212d2c20100720b00050b00d7920b8ffff5058 -041b0559b0051cb0032508b0042523e120b00050b00d7920b8ffff5058041b0559b0051cb003 -2508e12d2c4b505820b0fd454459212d2cb002254560442d2c4b5358b00225b0022545445921 -212d2c45442d2cb00225b0022549b00525b005254960b0206368208a108a233a8a10653a2d00 -0001000000024ccc5fa4ecce5f0f3cf5001f080000000000c6bc48a000000000c6bc48a0f7d6 -fd330d72095500000008000000010000000000010000076dfe1d00000de2f7d6fa510d720001 -0000000000000000000000000000001b04cd006604d300c904e7007b034a00ba023900c104ec -0071028b0000051400ba051200ae023900c105140071051400ba042b006f051200ba03230037 -04e5007102d1002f03ae00c5068b005602e300640239ffdb05170064031f00b005140071031f -00a404a200ba046600710000000000000044000000c4000001f0000002600000029c00000370 -00000370000004080000048c000004dc000005740000061400000774000007ec000008680000 -090c000009a400000a1000000c3400000c6000000cdc00000d9800000e0800000ed000000f28 -00001018000010b000010000001b0354002b0068000c00020010009900080000041502160008 -0004b8028040fffbfe03fa1403f92503f83203f79603f60e03f5fe03f4fe03f32503f20e03f1 -9603f02503ef8a4105effe03ee9603ed9603ecfa03ebfa03eafe03e93a03e84203e7fe03e632 -03e5e45305e59603e48a4105e45303e3e22f05e3fa03e22f03e1fe03e0fe03df3203de1403dd -9603dcfe03db1203da7d03d9bb03d8fe03d68a4105d67d03d5d44705d57d03d44703d3d21b05 -d3fe03d21b03d1fe03d0fe03cffe03cefe03cd9603cccb1e05ccfe03cb1e03ca3203c9fe03c6 -851105c61c03c51603c4fe03c3fe03c2fe03c1fe03c0fe03bffe03befe03bdfe03bcfe03bbfe -03ba1103b9862505b9fe03b8b7bb05b8fe03b7b65d05b7bb03b78004b6b52505b65d40ff03b6 -4004b52503b4fe03b39603b2fe03b1fe03b0fe03affe03ae6403ad0e03acab2505ac6403abaa -1205ab2503aa1203a98a4105a9fa03a8fe03a7fe03a6fe03a51203a4fe03a3a20e05a33203a2 -0e03a16403a08a4105a096039ffe039e9d0c059efe039d0c039c9b19059c64039b9a10059b19 -039a1003990a0398fe0397960d0597fe03960d03958a410595960394930e05942803930e0392 -fa039190bb0591fe03908f5d0590bb039080048f8e25058f5d038f40048e25038dfe038c8b2e -058cfe038b2e038a8625058a410389880b05891403880b038786250587640386851105862503 -85110384fe038382110583fe0382110381fe0380fe037ffe0340ff7e7d7d057efe037d7d037c -64037b5415057b25037afe0379fe03780e03770c03760a0375fe0374fa0373fa0372fa0371fa -0370fe036ffe036efe036c21036bfe036a1142056a530369fe03687d036711420566fe0365fe -0364fe0363fe0362fe03613a0360fa035e0c035dfe035bfe035afe0359580a0559fa03580a03 -5716190557320356fe035554150555420354150353011005531803521403514a130551fe0350 -0b034ffe034e4d10054efe034d10034cfe034b4a13054bfe034a4910054a1303491d0d054910 -03480d0347fe0346960345960344fe0343022d0543fa0342bb03414b0340fe033ffe033e3d12 -053e14033d3c0f053d12033c3b0d053c40ff0f033b0d033afe0339fe033837140538fa033736 -100537140336350b05361003350b03341e03330d0332310b0532fe03310b03302f0b05300d03 -2f0b032e2d09052e10032d09032c32032b2a25052b64032a2912052a25032912032827250528 -410327250326250b05260f03250b0324fe0323fe03220f03210110052112032064031ffa031e -1d0d051e64031d0d031c1142051cfe031bfa031a42031911420519fe031864031716190517fe -031601100516190315fe0314fe0313fe031211420512fe0311022d05114203107d030f64030e -fe030d0c16050dfe030c0110050c16030bfe030a100309fe0308022d0508fe03071403066403 -0401100504fe03401503022d0503fe0302011005022d0301100300fe0301b80164858d012b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b002b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b -2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b1d00> -] def -FontName currentdict end definefont pop -%%Page: 1 1 -%%BeginPageSetup -%%PageBoundingBox: 0 0 637 164 -%%EndPageSetup -q -0 g -BT -12 0 0 12 155.648438 2.496091 Tm -/f-0-0 1 Tf -[<01>44<0203>-1<02>-1<04>1<040504>1<06>-1<0708>-1<09>1<040a0603>20<050b -03>21<050c>-1<05>]TJ -10.523438 0 Td -[<0d>-1<0e02>-1<0e090f>1<0d>-1<06>-1<10>1<0f03>-1<0611>-1<12>-1<021006 -13>-1<14>1<15>-1<11>-1<06>]TJ -10.185547 0 Td -[<160e>-1<120f061703>20<0f>1<08>-1<0b0c>-1<18>]TJ -ET -0.996078 0.996078 0.266667 rg -0.398 163.298 m 32.543 163.298 l 32.543 145.696 l 0.398 145.696 l 0.398 -163.298 l h -0.398 163.298 m f* -0 g -0.8 w -0 J -0 j -[] 0.0 d -4 M q 1 0 0 -1 0 163.696091 cm -0.398 0.398 m 32.543 0.398 l 32.543 18 l 0.398 18 l 0.398 0.398 l h -0.398 0.398 m S Q -0.996078 0.996078 0.266667 rg -16.613 145.696 m 57.781 145.696 l 57.781 128.095 l 16.613 128.095 l -16.613 145.696 l h -16.613 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -16.613 18 m 57.781 18 l 57.781 35.602 l 16.613 35.602 l 16.613 18 l h -16.613 18 m S Q -0.996078 0.996078 0.266667 rg -30.258 128.095 m 90.859 128.095 l 90.859 110.497 l 30.258 110.497 l -30.258 128.095 l h -30.258 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -30.258 35.602 m 90.859 35.602 l 90.859 53.199 l 30.258 53.199 l 30.258 -35.602 l h -30.258 35.602 m S Q -0.996078 0.996078 0.266667 rg -32.984 110.497 m 135.012 110.497 l 135.012 92.895 l 32.984 92.895 l -32.984 110.497 l h -32.984 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -32.984 53.199 m 135.012 53.199 l 135.012 70.801 l 32.984 70.801 l -32.984 53.199 l h -32.984 53.199 m S Q -0.996078 0.996078 0.266667 rg -33.492 163.298 m 106.977 163.298 l 106.977 145.696 l 33.492 145.696 l -33.492 163.298 l h -33.492 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -33.492 0.398 m 106.977 0.398 l 106.977 18 l 33.492 18 l 33.492 0.398 l -h -33.492 0.398 m S Q -0.996078 0.996078 0.266667 rg -59.219 145.696 m 149.867 145.696 l 149.867 128.095 l 59.219 128.095 l -59.219 145.696 l h -59.219 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -59.219 18 m 149.867 18 l 149.867 35.602 l 59.219 35.602 l 59.219 18 l h -59.219 18 m S Q -0.996078 0.996078 0.266667 rg -91.305 128.095 m 151.242 128.095 l 151.242 110.497 l 91.305 110.497 l -91.305 128.095 l h -91.305 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -91.305 35.602 m 151.242 35.602 l 151.242 53.199 l 91.305 53.199 l -91.305 35.602 l h -91.305 35.602 m S Q -0.996078 0.996078 0.266667 rg -107.328 163.298 m 172.461 163.298 l 172.461 145.696 l 107.328 145.696 l -107.328 163.298 l h -107.328 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -107.328 0.398 m 172.461 0.398 l 172.461 18 l 107.328 18 l 107.328 0.398 -l h -107.328 0.398 m S Q -0.301961 0.654902 0.301961 rg -135.324 110.497 m 188.598 110.497 l 188.598 92.895 l 135.324 92.895 l -135.324 110.497 l h -135.324 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -135.324 53.199 m 188.598 53.199 l 188.598 70.801 l 135.324 70.801 l -135.324 53.199 l h -135.324 53.199 m S Q -0.301961 0.654902 0.301961 rg -150.242 145.696 m 277.414 145.696 l 277.414 128.095 l 150.242 128.095 l -150.242 145.696 l h -150.242 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -150.242 18 m 277.414 18 l 277.414 35.602 l 150.242 35.602 l 150.242 18 -l h -150.242 18 m S Q -0.301961 0.654902 0.301961 rg -152.797 128.095 m 285.664 128.095 l 285.664 110.497 l 152.797 110.497 l -152.797 128.095 l h -152.797 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -152.797 35.602 m 285.664 35.602 l 285.664 53.199 l 152.797 53.199 l -152.797 35.602 l h -152.797 35.602 m S Q -0.301961 0.654902 0.301961 rg -173.746 163.298 m 234.887 163.298 l 234.887 145.696 l 173.746 145.696 l -173.746 163.298 l h -173.746 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -173.746 0.398 m 234.887 0.398 l 234.887 18 l 173.746 18 l 173.746 0.398 -l h -173.746 0.398 m S Q -0.301961 0.654902 0.301961 rg -188.934 110.497 m 266.977 110.497 l 266.977 92.895 l 188.934 92.895 l -188.934 110.497 l h -188.934 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -188.934 53.199 m 266.977 53.199 l 266.977 70.801 l 188.934 70.801 l -188.934 53.199 l h -188.934 53.199 m S Q -0.301961 0.654902 0.301961 rg -235.305 163.298 m 288.809 163.298 l 288.809 145.696 l 235.305 145.696 l -235.305 163.298 l h -235.305 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -235.305 0.398 m 288.809 0.398 l 288.809 18 l 235.305 18 l 235.305 0.398 -l h -235.305 0.398 m S Q -0.301961 0.654902 0.301961 rg -267.375 110.497 m 302.695 110.497 l 302.695 92.895 l 267.375 92.895 l -267.375 110.497 l h -267.375 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -267.375 53.199 m 302.695 53.199 l 302.695 70.801 l 267.375 70.801 l -267.375 53.199 l h -267.375 53.199 m S Q -0.301961 0.654902 0.301961 rg -277.965 145.696 m 316.703 145.696 l 316.703 128.095 l 277.965 128.095 l -277.965 145.696 l h -277.965 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -277.965 18 m 316.703 18 l 316.703 35.602 l 277.965 35.602 l 277.965 18 -l h -277.965 18 m S Q -0.4 0.529412 0.733333 rg -323.336 128.095 m 355.992 128.095 l 355.992 110.497 l 323.336 110.497 l -323.336 128.095 l h -323.336 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -323.336 35.602 m 355.992 35.602 l 355.992 53.199 l 323.336 53.199 l -323.336 35.602 l h -323.336 35.602 m S Q -0.4 0.529412 0.733333 rg -329.84 163.298 m 388.895 163.298 l 388.895 145.696 l 329.84 145.696 l -329.84 163.298 l h -329.84 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -329.84 0.398 m 388.895 0.398 l 388.895 18 l 329.84 18 l 329.84 0.398 l -h -329.84 0.398 m S Q -0.4 0.529412 0.733333 rg -339.926 110.497 m 389.945 110.497 l 389.945 92.895 l 339.926 92.895 l -339.926 110.497 l h -339.926 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -339.926 53.199 m 389.945 53.199 l 389.945 70.801 l 339.926 70.801 l -339.926 53.199 l h -339.926 53.199 m S Q -0.4 0.529412 0.733333 rg -350.719 145.696 m 444.461 145.696 l 444.461 128.095 l 350.719 128.095 l -350.719 145.696 l h -350.719 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -350.719 18 m 444.461 18 l 444.461 35.602 l 350.719 35.602 l 350.719 18 -l h -350.719 18 m S Q -0.4 0.529412 0.733333 rg -356.457 128.095 m 430.148 128.095 l 430.148 110.497 l 356.457 110.497 l -356.457 128.095 l h -356.457 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -356.457 35.602 m 430.148 35.602 l 430.148 53.199 l 356.457 53.199 l -356.457 35.602 l h -356.457 35.602 m S Q -0.4 0.529412 0.733333 rg -390.84 163.298 m 460.586 163.298 l 460.586 145.696 l 390.84 145.696 l -390.84 163.298 l h -390.84 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -390.84 0.398 m 460.586 0.398 l 460.586 18 l 390.84 18 l 390.84 0.398 l -h -390.84 0.398 m S Q -0.4 0.529412 0.733333 rg -391.004 110.497 m 480.523 110.497 l 480.523 92.895 l 391.004 92.895 l -391.004 110.497 l h -391.004 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -391.004 53.199 m 480.523 53.199 l 480.523 70.801 l 391.004 70.801 l -391.004 53.199 l h -391.004 53.199 m S Q -0.4 0.529412 0.733333 rg -430.762 128.095 m 555.664 128.095 l 555.664 110.497 l 430.762 110.497 l -430.762 128.095 l h -430.762 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -430.762 35.602 m 555.664 35.602 l 555.664 53.199 l 430.762 53.199 l -430.762 35.602 l h -430.762 35.602 m S Q -0.654902 0.317647 1 rg -444.758 145.696 m 567.516 145.696 l 567.516 128.095 l 444.758 128.095 l -444.758 145.696 l h -444.758 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -444.758 18 m 567.516 18 l 567.516 35.602 l 444.758 35.602 l 444.758 18 -l h -444.758 18 m S Q -0.654902 0.317647 1 rg -460.945 163.298 m 523.148 163.298 l 523.148 145.696 l 460.945 145.696 l -460.945 163.298 l h -460.945 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -460.945 0.398 m 523.148 0.398 l 523.148 18 l 460.945 18 l 460.945 0.398 -l h -460.945 0.398 m S Q -0.654902 0.317647 1 rg -480.906 110.497 m 542.262 110.497 l 542.262 92.895 l 480.906 92.895 l -480.906 110.497 l h -480.906 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -480.906 53.199 m 542.262 53.199 l 542.262 70.801 l 480.906 70.801 l -480.906 53.199 l h -480.906 53.199 m S Q -0.654902 0.317647 1 rg -523.539 163.298 m 583.68 163.298 l 583.68 145.696 l 523.539 145.696 l -523.539 163.298 l h -523.539 163.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -523.539 0.398 m 583.68 0.398 l 583.68 18 l 523.539 18 l 523.539 0.398 l -h -523.539 0.398 m S Q -0.654902 0.317647 1 rg -543.719 110.497 m 635.758 110.497 l 635.758 92.895 l 543.719 92.895 l -543.719 110.497 l h -543.719 110.497 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -543.719 53.199 m 635.758 53.199 l 635.758 70.801 l 543.719 70.801 l -543.719 53.199 l h -543.719 53.199 m S Q -0.654902 0.317647 1 rg -556.992 128.095 m 620.215 128.095 l 620.215 110.497 l 556.992 110.497 l -556.992 128.095 l h -556.992 128.095 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -556.992 35.602 m 620.215 35.602 l 620.215 53.199 l 556.992 53.199 l -556.992 35.602 l h -556.992 35.602 m S Q -0.654902 0.317647 1 rg -568.746 145.696 m 612.66 145.696 l 612.66 128.095 l 568.746 128.095 l -568.746 145.696 l h -568.746 145.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -568.746 18 m 612.66 18 l 612.66 35.602 l 568.746 35.602 l 568.746 18 l -h -568.746 18 m S Q -0.654902 0.317647 1 rg -584.07 163.309 m 636.793 163.309 l 636.793 145.684 l 584.07 145.684 l -584.07 163.309 l h -584.07 163.309 m f* -0 g -0.774621 w -q 1 0 0 -1 0 163.696091 cm -584.07 0.387 m 636.793 0.387 l 636.793 18.012 l 584.07 18.012 l 584.07 -0.387 l h -584.07 0.387 m S Q -0.996078 0.996078 0.266667 rg -17.199 82.497 m 26 82.497 l 26 73.696 l 17.199 73.696 l 17.199 82.497 l -h -17.199 82.497 m f* -0 g -0.8 w -q 1 0 0 -1 0 163.696091 cm -17.199 81.199 m 26 81.199 l 26 90 l 17.199 90 l 17.199 81.199 l h -17.199 81.199 m S Q -BT -9.6 0 0 9.6 34.8 74.496091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c19>-1<0c>-1<0602>]TJ -ET -0.301961 0.654902 0.301961 rg -17.199 64.895 m 26 64.895 l 26 56.095 l 17.199 56.095 l 17.199 64.895 l -h -17.199 64.895 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 98.801 m 26 98.801 l 26 107.602 l 17.199 107.602 l 17.199 98.801 -l h -17.199 98.801 m S Q -BT -9.6 0 0 9.6 34.8 56.896091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c19>-1<0c>-1<0607>]TJ -ET -0.4 0.529412 0.733333 rg -17.199 47.298 m 26 47.298 l 26 38.497 l 17.199 38.497 l 17.199 47.298 l -h -17.199 47.298 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 116.398 m 26 116.398 l 26 125.199 l 17.199 125.199 l 17.199 -116.398 l h -17.199 116.398 m S Q -BT -9.6 0 0 9.6 34.8 39.296091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c19>-1<0c>-1<061a>]TJ -ET -0.654902 0.317647 1 rg -17.199 29.696 m 26 29.696 l 26 20.895 l 17.199 20.895 l 17.199 29.696 l -h -17.199 29.696 m f* -0 g -q 1 0 0 -1 0 163.696091 cm -17.199 134 m 26 134 l 26 142.801 l 17.199 142.801 l 17.199 134 l h -17.199 134 m S Q -BT -9.6 0 0 9.6 34.8 21.696091 Tm -/f-0-0 1 Tf -[<0e02>-1<0c19>-1<0c>-1<060a>]TJ -ET -Q -showpage -%%Trailer -count op_count sub {pop} repeat -countdictstack dict_count sub {end} repeat -cairo_eps_state restore -%%EOF diff --git a/docs/book/waf-activity.dia b/docs/book/waf-activity.dia deleted file mode 100644 index d30cbb19b8..0000000000 --- a/docs/book/waf-activity.dia +++ /dev/null @@ -1,1300 +0,0 @@ - - - - - - - - - - - - - #A4# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Call the function "options"# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Load user scripts# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Parse the -command-line options# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Failure?# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Unpack the waf library# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #The waf library -is present?# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Execute the command# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #More commands -to process?# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #yes# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #no# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Command is -'configure', -or the project -is configured# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #Error: configure -the project# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #no# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #yes# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #yes# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #no# - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/book/waf.css b/docs/book/waf.css deleted file mode 100644 index 0e365379ce..0000000000 --- a/docs/book/waf.css +++ /dev/null @@ -1,28 +0,0 @@ -div.tableblock > table { - border: 1px solid gray; -} - -div#header h1 { - background: url('waf-64x64.png') no-repeat left center; - padding-left: 80px; - line-height: 80px; - height: 80px; -} - -div.title, caption.title { - text-align: center; - margin-bottom: 0.2em; -} - -div.tableblock > table th { - background-color: #F4F4F4; -} - -h1, h2, h3, h4, h5, h6, span#author, div.title, caption.title, div.admonitionblock .icon, div#toctitle, div.sidebar-title, div.image-title { - color: #333; -} - -body, div.sectionbody, div#toctitle { - font-family: 'Lucida Grande', Verdana, Arial, sans-serif; -} - diff --git a/docs/book/waf.txt b/docs/book/waf.txt deleted file mode 100644 index 1549b0a728..0000000000 --- a/docs/book/waf.txt +++ /dev/null @@ -1,54 +0,0 @@ -The Waf Book -============ -:author: Thomas Nagy -:quotes.++: -:numbered!: - -{set:PIC:{backend@docbook:.eps:.png}} - -[preface] -== Introduction - -Copyright (C) 2010-2011 Thomas Nagy - -Copies of this book may be redistributed, verbatim, and for non-commercial -purposes. The license for this book is - http://creativecommons.org/licenses/by-nc-nd/3.0/[by-nc-nd license]. - -=== A word on build systems - -As software is becoming increasingly complex, the process of creating software is becoming more complex too. Today's software uses various languages, requires various compilers, and the input data is spread into many files. - -Software is now used to express the process of building software, it can take the form of simple scripts (shell scripts, makefiles), or compilers (CMake, Qmake), or complete applications (SCons, Maven, Waf). The term `build system' is used to design the tools used to build applications. - -=== The Waf framework - -Build systems make assumptions on software it is trying to build, and are typically limited where it comes to processing other languages or different projects. For example, Ant is better suited than Make for managing Java projects, but is more limited than Make for managing simple c projects. The programming tools are evolving constantly, making the creation of a complete build system for end-users impossible. - -The Waf framework is somewhat different from traditional build systems in the sense that it does not provide support for a specific language. Rather, the focus is to support the major usecases encountered when working on a software project. As such, it is essentially a library of components that are suitable for use in a build system, with an emphasis on extensibility. Although the default distribution contains various plugins for several programming languages and different tools (c, d, ocaml, java, etc), it is by no means a frozen product. Creating new extensions is both a standard and a recommended practice. - -=== Objectives of this book - -The objective of this book is to expose the use of the Waf build system though the use of Waf in practice, the description of the Waf extension system, and an overview of the Waf internals. We hope that this book will serve as a reference for both new and advanced users. Although this book does not deal with build systems in general, a secondary objective is to illustrate quite a few new techniques and patterns through numerous examples. - -The chapters are ordered by difficulty, starting from the basic use of Waf and Python, and diving gradually into the most difficult topics. It is therefore recommended to read the chapters in order. It is also possible to start by looking at the https://github.com/waf-project/waf/tree/master/demos[examples] from the Waf distribution before starting the reading. - -:numbered: - -include::download.txt[] -include::execution.txt[] -include::configuration.txt[] -include::build.txt[] -include::nodes.txt[] -include::advbuild.txt[] -include::tasks.txt[] -include::make_like_rules.txt[] -include::chains.txt[] -include::task_generators.txt[] -include::cprog.txt[] -include::scenarios.txt[] -include::development.txt[] -include::architecture.txt[] -include::conclusion.txt[] -include::glossary.txt[] - diff --git a/docs/book/waflib.semd b/docs/book/waflib.semd deleted file mode 100644 index b671590dc6..0000000000 Binary files a/docs/book/waflib.semd and /dev/null differ diff --git a/docs/book/wscript b/docs/book/wscript deleted file mode 100644 index efe012b190..0000000000 --- a/docs/book/wscript +++ /dev/null @@ -1,224 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) - -""" -call 'waf --targets=waf.pdf' or use 'waf list' to see the targets available -""" - -VERSION='0.0.1' -APPNAME='wafdocs' - -import os, re, shutil -from waflib import TaskGen - -top = '.' -out = 'build' - -re_xi = re.compile('''^(include|image)::([^.]*.(txt|\\{PIC\\}))\[''', re.M) -def ascii_doc_scan(self): - p = self.inputs[0].parent - node_lst = [self.inputs[0]] - seen = [] - depnodes = [] - while node_lst: - nd = node_lst.pop(0) - if nd in seen: continue - seen.append(nd) - - code = nd.read() - for m in re_xi.finditer(code): - name = m.group(2) - if m.group(3) == '{PIC}': - - ext = '.eps' - if self.generator.rule.rfind('A2X') > 0: - ext = '.png' - - k = p.find_resource(name.replace('{PIC}', ext)) - if k: - depnodes.append(k) - else: - k = p.find_resource(name) - if k: - depnodes.append(k) - node_lst.append(k) - return [depnodes, ()] - -import re -def scansize(self): - - base, _, ext = self.inputs[0].name.partition('.') - - name = 'image::%s\\{PIC\\}\\[.*,(width|height)=(\\d+)' % base - re_src = re.compile(name) - lst = self.inputs[0].parent.get_src().ant_glob('*.txt') - for x in lst: - m = re_src.search(x.read()) - if m: - val = str(int(1.6 * int(m.group(2)))) - if m.group(1) == 'width': - w = val - h = "800" - else: - w = "800" - h = val - - if ext == 'eps': - code = '-geometry %sx%s' % (w, h) - elif ext == 'dia': - if m.group(1) == 'width': - h = '' - else: - w = '' - code = '--size %sx%s' % (w, h) - elif ext == 'semd': - if m.group(1) == 'width': - code = '--width=%s' % w - else: - code = '--height=%s' % h - else: - code = '-Gsize="%s,%s"' % (w, h) - break - else: - return ([], '') - - return ([], code) - -def options(opt): - opt.add_option('--exe', action='store_true', default=False, help='Execute the program after it is compiled') - -def configure(conf): - conf.find_program('a2x', var='A2X') - conf.find_program('asciidoc', var='ADOC') - conf.find_program('dia', var='DIA') - conf.find_program('convert', var='CONVERT') - conf.find_program('source-highlight', var='SOURCE_HIGHLIGHT') - conf.find_program('dot') - conf.find_program('semantik-d') - -def build(bld): - - #bld(features='subst', is_copy=True, source='asciidoc-dblatex.sty asciidoc-dblatex.xsl', target='asciidoc-dblatex.sty asciidoc-dblatex.xsl') - - for x in bld.path.ant_glob('*.eps'): - bld(features='subst', source=x.name, target=x.name, is_copy=True) - bld(rule='${CONVERT} ${bld.raw_deps[tsk.uid()]} -density 600 ${SRC} ${TGT}', source=x, target=x.change_ext('.png'), scan=scansize) - - for x in bld.path.ant_glob('*.dot'): - tg = bld(rule='${DOT} -Teps -o${TGT} ${SRC}', source=x, target=x.change_ext('.eps')) - tg = bld(rule='${DOT} -Tpng -o${TGT} ${SRC}', source=x, target=x.change_ext('.png'), scan=scansize) - #tg = bld(rule='${CONVERT} ${bld.raw_deps[tsk.uid()]} ${SRC} ${TGT}', source=x.change_ext('.eps'), target=x.change_ext('.png'), scan=scansize) - - for x in bld.path.ant_glob('*.dia'): - tg = bld(rule='${DIA} -t eps ${SRC} -e ${TGT}', source=x, target=x.change_ext('.eps')) - tg = bld(rule='${DIA} -t png ${SRC} -e ${TGT}', source=x, target=x.change_ext('.png'), scan=scansize) - #bld(rule='${CONVERT} ${bld.raw_deps[tsk.uid()]} ${SRC} ${TGT}', source=tg.target, target=tg.target.change_ext('.png'), scan=scansize) - - for x in bld.path.ant_glob('*.semd'): - bld(rule='${SEMANTIK_D} ${SRC[0].abspath()} -o ${TGT[0].abspath()}', source=x, target=x.change_ext('.svg')) - bld(rule='${SEMANTIK_D} ${SRC[0].abspath()} -o ${TGT[0].abspath()}', source=x, target=x.change_ext('.pdf')) - bld(rule='${SEMANTIK_D} ${SIZEPARAMS} ${SRC[0].abspath()} -o ${TGT[0].abspath()}', source=x, target=x.change_ext('.png'), features="sizer") - - for x in bld.path.ant_glob('pics/*.png'): - bld(features='subst', source=x, target=x.name, is_copy=True) - - for x in bld.path.ant_glob('callouts/*.png'): - bld(features='subst', source=x.name, target=x.name, is_copy=True, path=x.parent) - - #bld(rule='mkdir -p ${SRC[0].parent.get_bld().abspath()} && cp ${SRC} ${SRC[0].parent.get_bld().abspath()}', - # source=bld.path.ant_glob('callouts/*.png')) - - for x in 'shishell.lang symbols.lang default.style lang.map waf.css'.split(): - bld(features='subst', source=x, target=x, is_copy=True) - - bld.add_group() # separator, the documents may require any of the pictures from above - - bld(rule='${ADOC} -a icons=true -a stylesheet=${SRC[1].abspath()} -a iconsdir=. -a toc -d book -o ${TGT} ${SRC[0].abspath()}', - source='waf.txt waf.css', target='index.html', scan=ascii_doc_scan) - - bld(rule='${A2X} -L -a toc --icons-dir=. --icons -D ${gen.path.get_bld().abspath()} \ - -d book -f pdf --dblatex-opts "-s ${SRC[1].abspath()} -p ${SRC[2].abspath()}" ${SRC[0].bldpath()}', - shell=True, - source='waf.txt asciidoc-dblatex.sty asciidoc-dblatex.xsl', target='waf.pdf', scan=ascii_doc_scan) - - #bld(rule='ln -sf single.html index.html', shell=True) - - if bld.options.exe: - def exe(ctx): - bld.exec_command('firefox build/index.html') - bld.add_post_fun(exe) - -@TaskGen.feature('sizer') -@TaskGen.after_method('process_rule') -def process_sizes(self): - - try: - tbl = self.bld.size_table - except AttributeError: - tbl = self.bld.size_table = make_size_table(self) - - base, _, ext = self.tasks[0].outputs[0].name.partition('.') - if ext == 'png' and base in tbl: - self.env.SIZEPARAMS = "--%s=%s" % tbl[base] - -@TaskGen.taskgen_method -def make_size_table(self): - name = 'image::(.*?)\\{PIC\\}\\[.*,(width|height)=(\\d+)' - re_src = re.compile(name) - - tbl = {} - - lst = self.path.ant_glob('*.txt') - for x in lst: - for m in re.finditer(re_src, x.read()): - tbl[m.group(1)] = (m.group(2), m.group(3)) - - return tbl - - - if 0: - val = str(int(1.6 * int(m.group(2)))) - if m.group(1) == 'width': - w = val - h = "800" - else: - w = "800" - h = val - - if ext == 'eps': - code = '-geometry %sx%s' % (w, h) - elif ext == 'dia': - if m.group(1) == 'width': - h = '' - else: - w = '' - code = '--size %sx%s' % (w, h) - elif ext == 'semd': - if m.group(1) == 'width': - code = '--width=%s' % w - else: - code = '--height=%s' % h - else: - code = '-Gsize="%s,%s"' % (w, h) - break - else: - return ([], '') - - return ([], code) - - -""" -For vim highlighting: -cp vim/syntax/asciidoc.vim /usr/share/vim/site/syntax/ -cp vim/ftdetect/asciidoc_filetype.vim /usr/share/vim/site/ftdetect/ - -When adding an eps from a svg file, convert it with inscape first -convert (imagemagick) does not process svg files too well - -colors: - yellow fffea6 - green aef9a5 - blue d2d5ff -""" - diff --git a/docs/slides/presentation/gfx/wscript b/docs/slides/presentation/gfx/wscript index 958ba0c93f..94c8549756 100644 --- a/docs/slides/presentation/gfx/wscript +++ b/docs/slides/presentation/gfx/wscript @@ -14,14 +14,14 @@ def build(bld): for x in bld.path.ant_glob('*.svg'): bld( rule='${CONVERT} -density 600 ${SRC} ${TGT}', - source=x, + source=[x], target=x.change_ext('.png'), ) for x in bld.path.ant_glob('*.dia'): bld( rule='${DIA} -t png ${SRC} -e ${TGT}', - source=x, + source=[x], target=x.change_ext('.png'), ) diff --git a/docs/sphinx/Build.rst b/docs/sphinx/Build.rst index 10de154173..ec00c6e380 100644 --- a/docs/sphinx/Build.rst +++ b/docs/sphinx/Build.rst @@ -2,4 +2,5 @@ Build ----- .. automodule:: waflib.Build + :members: diff --git a/docs/sphinx/ConfigSet.rst b/docs/sphinx/ConfigSet.rst index 36a2f278f4..3e5ec9f440 100644 --- a/docs/sphinx/ConfigSet.rst +++ b/docs/sphinx/ConfigSet.rst @@ -2,4 +2,5 @@ ConfigSet --------- .. automodule:: waflib.ConfigSet + :members: diff --git a/docs/sphinx/Configure.rst b/docs/sphinx/Configure.rst index 46b423ba0c..3e63516132 100644 --- a/docs/sphinx/Configure.rst +++ b/docs/sphinx/Configure.rst @@ -2,4 +2,5 @@ Configure --------- .. automodule:: waflib.Configure + :members: diff --git a/docs/sphinx/Context.rst b/docs/sphinx/Context.rst index b1f9074749..fb1556bb44 100644 --- a/docs/sphinx/Context.rst +++ b/docs/sphinx/Context.rst @@ -2,4 +2,5 @@ Context --------- .. automodule:: waflib.Context + :members: diff --git a/docs/sphinx/Errors.rst b/docs/sphinx/Errors.rst index 03d49dfdb4..da10d06a44 100644 --- a/docs/sphinx/Errors.rst +++ b/docs/sphinx/Errors.rst @@ -2,4 +2,5 @@ Errors ------ .. automodule:: waflib.Errors + :members: diff --git a/docs/sphinx/Logs.rst b/docs/sphinx/Logs.rst index 37ce2b8b8a..d71427d9a7 100644 --- a/docs/sphinx/Logs.rst +++ b/docs/sphinx/Logs.rst @@ -2,4 +2,5 @@ Logs ---- .. automodule:: waflib.Logs + :members: diff --git a/docs/sphinx/Node.rst b/docs/sphinx/Node.rst index 6fd9bc85a9..b40a709b29 100644 --- a/docs/sphinx/Node.rst +++ b/docs/sphinx/Node.rst @@ -2,4 +2,5 @@ Node ---- .. automodule:: waflib.Node + :members: diff --git a/docs/sphinx/Options.rst b/docs/sphinx/Options.rst index b7a2e917e8..142dd976ad 100644 --- a/docs/sphinx/Options.rst +++ b/docs/sphinx/Options.rst @@ -2,4 +2,5 @@ Options ------- .. automodule:: waflib.Options + :members: diff --git a/docs/sphinx/Runner.rst b/docs/sphinx/Runner.rst index fb25542945..aaccdabf8e 100644 --- a/docs/sphinx/Runner.rst +++ b/docs/sphinx/Runner.rst @@ -2,4 +2,5 @@ Runner ------ .. automodule:: waflib.Runner + :members: diff --git a/docs/sphinx/Scripting.rst b/docs/sphinx/Scripting.rst index d0f134cf55..395954cdc0 100644 --- a/docs/sphinx/Scripting.rst +++ b/docs/sphinx/Scripting.rst @@ -2,4 +2,5 @@ Scripting --------- .. automodule:: waflib.Scripting + :members: diff --git a/docs/sphinx/Task.rst b/docs/sphinx/Task.rst index 222a9a3931..a622f8180e 100644 --- a/docs/sphinx/Task.rst +++ b/docs/sphinx/Task.rst @@ -2,4 +2,5 @@ Task ---- .. automodule:: waflib.Task + :members: diff --git a/docs/sphinx/TaskGen.rst b/docs/sphinx/TaskGen.rst index 55e43406c8..05f1d831b5 100644 --- a/docs/sphinx/TaskGen.rst +++ b/docs/sphinx/TaskGen.rst @@ -2,4 +2,5 @@ TaskGen ------- .. automodule:: waflib.TaskGen + :members: diff --git a/docs/sphinx/Utils.rst b/docs/sphinx/Utils.rst index d3ec33d4fd..69f94c63f2 100644 --- a/docs/sphinx/Utils.rst +++ b/docs/sphinx/Utils.rst @@ -2,4 +2,5 @@ Utils ----- .. automodule:: waflib.Utils + :members: diff --git a/docs/sphinx/_images/waf-64x64.png b/docs/sphinx/_images/waf-64x64.png index cbe55f639e..2ce7296ffe 100644 Binary files a/docs/sphinx/_images/waf-64x64.png and b/docs/sphinx/_images/waf-64x64.png differ diff --git a/docs/sphinx/_templates/indexcontent.html b/docs/sphinx/_templates/indexcontent.html index b1a1028138..0fcb017837 100644 --- a/docs/sphinx/_templates/indexcontent.html +++ b/docs/sphinx/_templates/indexcontent.html @@ -1,7 +1,11 @@ -{% extends "defindex.html" %} -{% block tables %} +{% extends "layout.html" %} +{%- block htmltitle -%} +{{ shorttitle }} +{%- endblock -%} +{% block body %} +

{{ docstitle|e }}

+

{% trans %}Parts of the documentation:{% endtrans %}

-

Parts of the documentation:

diff --git a/docs/sphinx/about.rst b/docs/sphinx/about.rst index 4ebbbbbc9c..b58e86dd68 100644 --- a/docs/sphinx/about.rst +++ b/docs/sphinx/about.rst @@ -3,13 +3,5 @@ About this documentation ------------------------ -These documents are generated from `reStructuredText`_ sources by `Sphinx`_, a -document processor specifically written for the Python documentation. - -.. _reStructuredText: http://docutils.sf.net/rst.html -.. _Sphinx: http://sphinx.pocoo.org/ - -The development of the documentation takes place on the mailing-list -http://groups.google.com/group/waf-users. We are always looking for volunteers wanting -to help with the docs, so feel free to send a mail there! +This is the automatically generated documentation for the Waf project. diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py index 8c47195d3c..a80192705f 100644 --- a/docs/sphinx/conf.py +++ b/docs/sphinx/conf.py @@ -11,7 +11,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import sys, os, re # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -19,6 +19,16 @@ sys.path.insert(0, os.path.abspath(os.path.join('..', ".."))) sys.path.append(os.path.abspath('.')) +graphviz_output_format = 'svg' + +html_theme_options = { + "body_min_width": "none", + "body_max_width": "none", +} + + +inheritance_graph_attrs = dict(rankdir="LR", size='""', fontsize=14, ratio='compress') + # monkey patch a few waf classes for documentation purposes! #----------------------------------------------------------- @@ -96,8 +106,7 @@ def deco(func): exclude_taskgen.append(func.__name__) setattr(task_gen, func.__name__, func) for fun_name in k: - if not func.__name__ in task_gen.prec[fun_name]: - task_gen.prec[fun_name].append(func.__name__) + task_gen.prec[func.__name__].add(fun_name) fix_fun_doc(func) append_doc(func, 'before', k) return func @@ -110,8 +119,7 @@ def deco(func): exclude_taskgen.append(func.__name__) setattr(task_gen, func.__name__, func) for fun_name in k: - if not fun_name in task_gen.prec[func.__name__]: - task_gen.prec[func.__name__].append(fun_name) + task_gen.prec[fun_name].add(func.__name__) fix_fun_doc(func) append_doc(func, 'after', k) return func @@ -134,7 +142,7 @@ def deco(func): Task.__dict__['post_run'].__doc__ = "Update the cache files (executed by threads). Override in subclasses." -from waflib import Configure, Build +from waflib import Configure, Build, Errors confmeths = [] def conf(f): def fun(*k, **kw): @@ -170,8 +178,9 @@ def configure(ctx): ctx.myhelper() """ - - +from waflib.Tools import asm +del asm.__dict__['link_task'] +del asm.__dict__['stlink_task'] # Import all tools and build tool->feature map tool_to_features = {} @@ -199,7 +208,7 @@ def configure(ctx): for func_name in funcs: thefunc = getattr(TaskGen.task_gen, func_name, None) if getattr(thefunc, "__name__", None) is None: continue - for feat in TaskGen.feats.keys(): + for feat in TaskGen.feats: funcs = list(TaskGen.feats[feat]) if func_name in funcs: if x not in tool_to_features: @@ -207,7 +216,7 @@ def configure(ctx): tool_to_features[x].append(feat) txt = "" - txt += "%s\n%s\n\n.. automodule:: waflib.Tools.%s\n\n" % (x, "="*len(x), x) + txt += "%s\n%s\n\n.. automodule:: waflib.Tools.%s\n :members:\n\n" % (x, "="*len(x), x) if x in tool_to_features: txt += "Features defined in this module:" for feat in sorted(list(set(tool_to_features[x]))): @@ -229,36 +238,43 @@ def configure(ctx): links = [] allmeths = set(TaskGen.feats[z]) - for x in meths: - for y in TaskGen.task_gen.prec.get(x, []): - links.append((x, y)) - allmeths.add(x) - allmeths.add(y) + for x, lst in TaskGen.task_gen.prec.items(): + if x in meths: + for y in lst: + links.append((x, y)) + allmeths.add(y) + else: + for y in lst: + if y in meths: + links.append((x, y)) + allmeths.add(x) color = ',fillcolor="#fffea6",style=filled' ms = [] for x in allmeths: try: m = TaskGen.task_gen.__dict__[x] - except: + except KeyError: raise ValueError("undefined method %r" % x) k = "%s.html#%s.%s" % (m.__module__.split('.')[-1], m.__module__, m.__name__) if str(m.__module__).find('.Tools') > 0: k = 'tools/' + k + k = '../' + k - ms.append('\t"%s" [style="setlinewidth(0.5)",URL="%s",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10%s];' % (x, k, x in TaskGen.feats[z] and color or '')) + ms.append('\t\t"%s" [style="setlinewidth(0.5)",URL="%s",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10%s];' % (x, k, x in TaskGen.feats[z] and color or '')) for x, y in links: - ms.append('\t"%s" -> "%s" [arrowsize=0.5,style="setlinewidth(0.5)"];' % (x, y)) + ms.append('\t\t"%s" -> "%s" [arrowsize=0.5,style="setlinewidth(0.5)"];' % (x, y)) - rs = '\tdigraph feature_%s {\n\tsize="8.0, 12.0";\n\t%s\n\t}\n' % (z == '*' and 'all' or z, '\n'.join(ms)) + #rs = '\tdigraph feature_%s {\n\t\tsize="8.0, 12.0";\n%s\n\t}\n' % (z == '*' and 'all' or z, '\n'.join(ms)) + rs = '\tdigraph feature_%s {\n\t\t\n%s\n\t}\n' % (z == '*' and 'all' or z, '\n'.join(ms)) title = "Feature %s" % (z == '*' and '\\*' or z) title += "\n" + len(title) * '=' accu.append("%s\n\n.. graphviz::\n\n%s\n\n" % (title, rs)) -f = open('tmpmap', 'w') +f = open('featuremap.rst', 'w') f.write(""".. _featuremap: Feature reference @@ -303,7 +319,7 @@ def configure(ctx): accu.append('.. _%s: %s#waflib.%s.%s\n' % (x, d, modname, x)) accu.append('* %s_\n' % x) -f = open('tmpconf', 'w') +f = open('confmap.rst', 'w') f.write(""".. _confmap: Configuration methods @@ -325,7 +341,7 @@ def configure(ctx): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.pngmath', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.graphviz', 'sphinx.ext.viewcode'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.imgmath', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.graphviz', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -341,16 +357,21 @@ def configure(ctx): # General information about the project. project = u'Waf' -copyright = u'2005-2015, Thomas Nagy' +copyright = u'2005-2023 waf.io' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '1.8.8' +#version = '1.8.10' # The full version, including alpha/beta/rc tags. -release = '1.8.8' +#release = version +# +with open('../../waflib/Context.py', 'r') as f: + txt = f.read() + m = re.compile('WAFVERSION=[\'"]([^\'"]+)', re.M).search(txt) + version = release = m.group(1) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -391,7 +412,14 @@ def configure(ctx): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +try: + from sphinx import version_info +except ImportError: + version_info = None +if version_info and (1, 3) <= version_info: + html_theme = 'classic' +else: + html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -469,17 +497,15 @@ def configure(ctx): # -- Options for LaTeX output -------------------------------------------------- -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +latex_elements = { + 'papersize':'a4paper', +} # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'waf.tex', u'waf Documentation', - u'Thomas Nagy', 'manual'), + u'waf.io', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -512,7 +538,7 @@ def configure(ctx): # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'waf', u'waf Documentation', - [u'Thomas Nagy'], 1) + [u'waf.io'], 1) ] #autodoc_default_flags = ['members', 'no-undoc-members', 'show-inheritance'] @@ -523,13 +549,11 @@ def maybe_skip_member(app, what, name, obj, skip, options): # from http://sphinx.pocoo.org/ext/autodoc.html#event-autodoc-skip-member # param name: the fully qualified name of the object <- it is not, the name does not contain the module path - if name == 'Nod3': + if name in ('__doc__', '__module__', 'Nod3', '__weakref__'): return True global exclude_taskgen if what == 'class' and name in exclude_taskgen: return True - if name == '__weakref__': - return True if obj.__doc__: return False diff --git a/docs/sphinx/confmap.rst b/docs/sphinx/confmap.rst index 23eec1c468..5832d1a8e0 100644 --- a/docs/sphinx/confmap.rst +++ b/docs/sphinx/confmap.rst @@ -13,6 +13,10 @@ Configuration methods * add_os_flags_ +.. _add_qt5_rpath: tools/qt5.html#waflib.Tools.qt5.add_qt5_rpath + +* add_qt5_rpath_ + .. _autodetect: tools/msvc.html#waflib.Tools.msvc.autodetect * autodetect_ @@ -45,6 +49,14 @@ Configuration methods * check_dlibrary_ +.. _check_endianness: tools/c_tests.html#waflib.Tools.c_tests.check_endianness + +* check_endianness_ + +.. _check_fc: tools/fc_config.html#waflib.Tools.fc_config.check_fc + +* check_fc_ + .. _check_fortran: tools/fc_config.html#waflib.Tools.fc_config.check_fortran * check_fortran_ @@ -65,6 +77,14 @@ Configuration methods * check_fortran_verbose_flag_ +.. _check_gcc_o_space: tools/c_config.html#waflib.Tools.c_config.check_gcc_o_space + +* check_gcc_o_space_ + +.. _check_gfortran_o_space: tools/fc_config.html#waflib.Tools.fc_config.check_gfortran_o_space + +* check_gfortran_o_space_ + .. _check_inline: tools/c_tests.html#waflib.Tools.c_tests.check_inline * check_inline_ @@ -121,6 +141,10 @@ Configuration methods * check_ruby_ext_devel_ +.. _check_ruby_module: tools/ruby.html#waflib.Tools.ruby.check_ruby_module + +* check_ruby_module_ + .. _check_ruby_version: tools/ruby.html#waflib.Tools.ruby.check_ruby_version * check_ruby_version_ @@ -153,6 +177,10 @@ Configuration methods * common_flags_ldc_ +.. _common_flags_ldc2: tools/ldc2.html#waflib.Tools.ldc2.common_flags_ldc2 + +* common_flags_ldc2_ + .. _cxx_add_flags: tools/c_config.html#waflib.Tools.c_config.cxx_add_flags * cxx_add_flags_ @@ -173,10 +201,26 @@ Configuration methods * define_cond_ +.. _detect_ifort: tools/ifort.html#waflib.Tools.ifort.detect_ifort + +* detect_ifort_ + +.. _detect_msvc: tools/msvc.html#waflib.Tools.msvc.detect_msvc + +* detect_msvc_ + +.. _detect_openmp: tools/fc_config.html#waflib.Tools.fc_config.detect_openmp + +* detect_openmp_ + .. _exec_cfg: tools/c_config.html#waflib.Tools.c_config.exec_cfg * exec_cfg_ +.. _fc_add_flags: tools/fc_config.html#waflib.Tools.fc_config.fc_add_flags + +* fc_add_flags_ + .. _fc_flags: tools/fc_config.html#waflib.Tools.fc_config.fc_flags * fc_flags_ @@ -185,6 +229,14 @@ Configuration methods * find_ar_ +.. _find_clang: tools/clang.html#waflib.Tools.clang.find_clang + +* find_clang_ + +.. _find_clangxx: tools/clangxx.html#waflib.Tools.clangxx.find_clangxx + +* find_clangxx_ + .. _find_dmd: tools/dmd.html#waflib.Tools.dmd.find_dmd * find_dmd_ @@ -209,6 +261,22 @@ Configuration methods * find_gfortran_ +.. _find_glib_compile_resources: tools/glib2.html#waflib.Tools.glib2.find_glib_compile_resources + +* find_glib_compile_resources_ + +.. _find_glib_compile_schemas: tools/glib2.html#waflib.Tools.glib2.find_glib_compile_schemas + +* find_glib_compile_schemas_ + +.. _find_glib_genmarshal: tools/glib2.html#waflib.Tools.glib2.find_glib_genmarshal + +* find_glib_genmarshal_ + +.. _find_glib_mkenums: tools/glib2.html#waflib.Tools.glib2.find_glib_mkenums + +* find_glib_mkenums_ + .. _find_gxx: tools/gxx.html#waflib.Tools.gxx.find_gxx * find_gxx_ @@ -225,10 +293,30 @@ Configuration methods * find_ifort_ +.. _find_ifort_win32: tools/ifort.html#waflib.Tools.ifort.find_ifort_win32 + +* find_ifort_win32_ + +.. _find_intltool_merge: tools/intltool.html#waflib.Tools.intltool.find_intltool_merge + +* find_intltool_merge_ + +.. _find_irixcc: tools/irixcc.html#waflib.Tools.irixcc.find_irixcc + +* find_irixcc_ + +.. _find_ldc2: tools/ldc2.html#waflib.Tools.ldc2.find_ldc2 + +* find_ldc2_ + .. _find_lt_names_msvc: tools/msvc.html#waflib.Tools.msvc.find_lt_names_msvc * find_lt_names_msvc_ +.. _find_msgfmt: tools/intltool.html#waflib.Tools.intltool.find_msgfmt + +* find_msgfmt_ + .. _find_msvc: tools/msvc.html#waflib.Tools.msvc.find_msvc * find_msvc_ @@ -241,10 +329,22 @@ Configuration methods * find_program_ +.. _find_qt5_binaries: tools/qt5.html#waflib.Tools.qt5.find_qt5_binaries + +* find_qt5_binaries_ + +.. _find_qt5_libraries: tools/qt5.html#waflib.Tools.qt5.find_qt5_libraries + +* find_qt5_libraries_ + .. _find_scc: tools/suncc.html#waflib.Tools.suncc.find_scc * find_scc_ +.. _find_single_qt5_lib: tools/qt5.html#waflib.Tools.qt5.find_single_qt5_lib + +* find_single_qt5_lib_ + .. _find_sxx: tools/suncxx.html#waflib.Tools.suncxx.find_sxx * find_sxx_ @@ -297,10 +397,34 @@ Configuration methods * gather_icl_versions_ +.. _gather_ifort_versions: tools/ifort.html#waflib.Tools.ifort.gather_ifort_versions + +* gather_ifort_versions_ + +.. _gather_intel_composer_versions: tools/msvc.html#waflib.Tools.msvc.gather_intel_composer_versions + +* gather_intel_composer_versions_ + +.. _gather_msvc_targets: tools/msvc.html#waflib.Tools.msvc.gather_msvc_targets + +* gather_msvc_targets_ + .. _gather_msvc_versions: tools/msvc.html#waflib.Tools.msvc.gather_msvc_versions * gather_msvc_versions_ +.. _gather_vswhere_versions: tools/msvc.html#waflib.Tools.msvc.gather_vswhere_versions + +* gather_vswhere_versions_ + +.. _gather_wince_targets: tools/msvc.html#waflib.Tools.msvc.gather_wince_targets + +* gather_wince_targets_ + +.. _gather_winphone_targets: tools/msvc.html#waflib.Tools.msvc.gather_winphone_targets + +* gather_winphone_targets_ + .. _gather_wsdk_versions: tools/msvc.html#waflib.Tools.msvc.gather_wsdk_versions * gather_wsdk_versions_ @@ -325,6 +449,18 @@ Configuration methods * gcc_modifier_hpux_ +.. _gcc_modifier_openbsd: tools/gcc.html#waflib.Tools.gcc.gcc_modifier_openbsd + +* gcc_modifier_openbsd_ + +.. _gcc_modifier_osf1V: tools/gxx.html#waflib.Tools.gxx.gcc_modifier_osf1V + +* gcc_modifier_osf1V_ + +.. _gcc_modifier_osf1V: tools/gxx.html#waflib.Tools.gxx.gcc_modifier_osf1V + +* gcc_modifier_osf1V_ + .. _gcc_modifier_platform: tools/gcc.html#waflib.Tools.gcc.gcc_modifier_platform * gcc_modifier_platform_ @@ -345,6 +481,10 @@ Configuration methods * get_define_ +.. _get_define_comment: tools/c_config.html#waflib.Tools.c_config.get_define_comment + +* get_define_comment_ + .. _get_g95_version: tools/g95.html#waflib.Tools.g95.get_g95_version * get_g95_version_ @@ -357,6 +497,14 @@ Configuration methods * get_ifort_version_ +.. _get_ifort_version_win32: tools/ifort.html#waflib.Tools.ifort.get_ifort_version_win32 + +* get_ifort_version_win32_ + +.. _get_ifort_versions: tools/ifort.html#waflib.Tools.ifort.get_ifort_versions + +* get_ifort_versions_ + .. _get_msvc_version: tools/msvc.html#waflib.Tools.msvc.get_msvc_version * get_msvc_version_ @@ -369,6 +517,14 @@ Configuration methods * get_python_variables_ +.. _get_suncc_version: tools/c_config.html#waflib.Tools.c_config.get_suncc_version + +* get_suncc_version_ + +.. _get_xlc_version: tools/c_config.html#waflib.Tools.c_config.get_xlc_version + +* get_xlc_version_ + .. _gfortran_flags: tools/gfortran.html#waflib.Tools.gfortran.gfortran_flags * gfortran_flags_ @@ -409,6 +565,10 @@ Configuration methods * gxx_modifier_hpux_ +.. _gxx_modifier_openbsd: tools/gxx.html#waflib.Tools.gxx.gxx_modifier_openbsd + +* gxx_modifier_openbsd_ + .. _gxx_modifier_platform: tools/gxx.html#waflib.Tools.gxx.gxx_modifier_platform * gxx_modifier_platform_ @@ -421,14 +581,22 @@ Configuration methods * have_define_ -.. _ifort_modifier_cygwin: tools/ifort.html#waflib.Tools.ifort.ifort_modifier_cygwin +.. _ifort_modifier_darwin: tools/ifort.html#waflib.Tools.ifort.ifort_modifier_darwin -* ifort_modifier_cygwin_ +* ifort_modifier_darwin_ .. _ifort_modifier_platform: tools/ifort.html#waflib.Tools.ifort.ifort_modifier_platform * ifort_modifier_platform_ +.. _ifort_modifier_win32: tools/ifort.html#waflib.Tools.ifort.ifort_modifier_win32 + +* ifort_modifier_win32_ + +.. _irixcc_common_flags: tools/irixcc.html#waflib.Tools.irixcc.irixcc_common_flags + +* irixcc_common_flags_ + .. _is_defined: tools/c_config.html#waflib.Tools.c_config.is_defined * is_defined_ @@ -445,6 +613,10 @@ Configuration methods * link_add_flags_ +.. _modfile: tools/fc.html#waflib.Tools.fc.modfile + +* modfile_ + .. _msvc_common_flags: tools/msvc.html#waflib.Tools.msvc.msvc_common_flags * msvc_common_flags_ @@ -469,18 +641,22 @@ Configuration methods * post_check_ -.. _print_all_msvc_detected: tools/msvc.html#waflib.Tools.msvc.print_all_msvc_detected - -* print_all_msvc_detected_ - .. _program: tools/c_aliases.html#waflib.Tools.c_aliases.program * program_ +.. _python_cross_compile: tools/python.html#waflib.Tools.python.python_cross_compile + +* python_cross_compile_ + .. _read_csshlib: tools/cs.html#waflib.Tools.cs.read_csshlib * read_csshlib_ +.. _read_object: tools/ccroot.html#waflib.Tools.ccroot.read_object + +* read_object_ + .. _read_shlib: tools/ccroot.html#waflib.Tools.ccroot.read_shlib * read_shlib_ @@ -489,18 +665,42 @@ Configuration methods * read_stlib_ -.. _ret_msg: tools/c_config.html#waflib.Tools.c_config.ret_msg - -* ret_msg_ - .. _scc_common_flags: tools/suncc.html#waflib.Tools.suncc.scc_common_flags * scc_common_flags_ +.. _set_define_comment: tools/c_config.html#waflib.Tools.c_config.set_define_comment + +* set_define_comment_ + +.. _set_qt5_defines: tools/qt5.html#waflib.Tools.qt5.set_qt5_defines + +* set_qt5_defines_ + +.. _set_qt5_libs_dir: tools/qt5.html#waflib.Tools.qt5.set_qt5_libs_dir + +* set_qt5_libs_dir_ + +.. _set_qt5_libs_to_check: tools/qt5.html#waflib.Tools.qt5.set_qt5_libs_to_check + +* set_qt5_libs_to_check_ + +.. _setup_ifort: tools/ifort.html#waflib.Tools.ifort.setup_ifort + +* setup_ifort_ + +.. _setup_msvc: tools/msvc.html#waflib.Tools.msvc.setup_msvc + +* setup_msvc_ + .. _shlib: tools/c_aliases.html#waflib.Tools.c_aliases.shlib * shlib_ +.. _simplify_qt5_libs: tools/qt5.html#waflib.Tools.qt5.simplify_qt5_libs + +* simplify_qt5_libs_ + .. _stlib: tools/c_aliases.html#waflib.Tools.c_aliases.stlib * stlib_ @@ -509,6 +709,14 @@ Configuration methods * sxx_common_flags_ +.. _test_pyembed: tools/python.html#waflib.Tools.python.test_pyembed + +* test_pyembed_ + +.. _test_pyext: tools/python.html#waflib.Tools.python.test_pyext + +* test_pyext_ + .. _undefine: tools/c_config.html#waflib.Tools.c_config.undefine * undefine_ @@ -521,6 +729,10 @@ Configuration methods * validate_cfg_ +.. _visual_studio_add_flags: tools/msvc.html#waflib.Tools.msvc.visual_studio_add_flags + +* visual_studio_add_flags_ + .. _write_config_header: tools/c_config.html#waflib.Tools.c_config.write_config_header * write_config_header_ diff --git a/docs/sphinx/featuremap.rst b/docs/sphinx/featuremap.rst index e70679db86..925ee23288 100644 --- a/docs/sphinx/featuremap.rst +++ b/docs/sphinx/featuremap.rst @@ -10,31 +10,52 @@ Feature \* .. graphviz:: digraph feature_all { - size="8.0, 12.0"; - "apply_intltool_in_f" [style="setlinewidth(0.5)",URL="tools/intltool.html#waflib.Tools.intltool.apply_intltool_in_f",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "process_rule" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_rule",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "jar_files" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.jar_files",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_marshal" [style="setlinewidth(0.5)",URL="tools/glib2.html#waflib.Tools.glib2.process_marshal",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "link_lib_test_fun" [style="setlinewidth(0.5)",URL="tools/c_tests.html#waflib.Tools.c_tests.link_lib_test_fun",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_enums" [style="setlinewidth(0.5)",URL="tools/glib2.html#waflib.Tools.glib2.process_enums",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_java" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.apply_java",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "link_main_routines_tg_method" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.link_main_routines_tg_method",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_subst" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_subst",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_cs" [style="setlinewidth(0.5)",URL="tools/cs.html#waflib.Tools.cs.apply_cs",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_tex" [style="setlinewidth(0.5)",URL="tools/tex.html#waflib.Tools.tex.apply_tex",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_rule" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "process_rule" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "link_lib_test_fun" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "link_main_routines_tg_method" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "apply_tex" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "jar_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "process_marshal" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "process_enums" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_source" -> "apply_intltool_in_f" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "grep_for_endianness_fun" [style="setlinewidth(0.5)",URL="../tools/c_tests.html#waflib.Tools.c_tests.grep_for_endianness_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "link_lib_test_fun" [style="setlinewidth(0.5)",URL="../tools/c_tests.html#waflib.Tools.c_tests.link_lib_test_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_install_task" [style="setlinewidth(0.5)",URL="../Build.html#waflib.Build.process_install_task",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_javadoc" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.create_javadoc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_subst" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_subst",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_marshal" [style="setlinewidth(0.5)",URL="../tools/glib2.html#waflib.Tools.glib2.process_marshal",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_java" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.apply_java",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "jar_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.jar_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_tex" [style="setlinewidth(0.5)",URL="../tools/tex.html#waflib.Tools.tex.apply_tex",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_intltool_in_f" [style="setlinewidth(0.5)",URL="../tools/intltool.html#waflib.Tools.intltool.apply_intltool_in_f",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "feature_py" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.feature_py",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_mocs" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.process_mocs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_rule" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_rule",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_enums" [style="setlinewidth(0.5)",URL="../tools/glib2.html#waflib.Tools.glib2.process_enums",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_objs" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_objs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "link_main_routines_tg_method" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.link_main_routines_tg_method",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_cs" [style="setlinewidth(0.5)",URL="../tools/cs.html#waflib.Tools.cs.apply_cs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_javadoc" -> "process_rule" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_rule" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_install_task" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "link_lib_test_fun" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_objs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "link_main_routines_tg_method" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_tex" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "jar_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_marshal" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_enums" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_mocs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "feature_py" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_intltool_in_f" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "grep_for_endianness_fun" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_rule" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_rule" -> "process_install_task" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -45,31 +66,73 @@ Feature asm .. graphviz:: digraph feature_asm { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_nasm_vars" [style="setlinewidth(0.5)",URL="tools/nasm.html#waflib.Tools.nasm.apply_nasm_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_flags_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_nasm_vars" [style="setlinewidth(0.5)",URL="../tools/nasm.html#waflib.Tools.nasm.apply_nasm_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_msvc" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -80,43 +143,78 @@ Feature c .. graphviz:: digraph feature_c { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_macosx_deployment_target" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.set_macosx_deployment_target",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_flags_msvc" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_bundle" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_flags_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_macosx_deployment_target" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.set_macosx_deployment_target",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_msvc" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -127,14 +225,18 @@ Feature cprogram .. graphviz:: digraph feature_cprogram { - size="8.0, 12.0"; - "create_task_macapp" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "create_task_macplist" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_manifest" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_manifest",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -145,26 +247,40 @@ Feature cs .. graphviz:: digraph feature_cs { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "debug_cs" [style="setlinewidth(0.5)",URL="tools/cs.html#waflib.Tools.cs.debug_cs",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "use_cs" [style="setlinewidth(0.5)",URL="tools/cs.html#waflib.Tools.cs.use_cs",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_cs" [style="setlinewidth(0.5)",URL="tools/cs.html#waflib.Tools.cs.apply_cs",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "use_cs" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "debug_cs" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "debug_cs" -> "use_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "debug_cs" [style="setlinewidth(0.5)",URL="../tools/cs.html#waflib.Tools.cs.debug_cs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "use_cs" [style="setlinewidth(0.5)",URL="../tools/cs.html#waflib.Tools.cs.use_cs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_cs" [style="setlinewidth(0.5)",URL="../tools/cs.html#waflib.Tools.cs.apply_cs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "use_cs" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "debug_cs" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "debug_cs" -> "use_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_cs" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -175,16 +291,25 @@ Feature cshlib .. graphviz:: digraph feature_cshlib { - size="8.0, 12.0"; - "apply_implib" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_implib",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_bundle_remove_dynamiclib" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle_remove_dynamiclib",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_vnum" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_manifest" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_manifest",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_bundle_remove_dynamiclib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_bundle" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "init_pyext" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -195,43 +320,78 @@ Feature cxx .. graphviz:: digraph feature_cxx { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_macosx_deployment_target" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.set_macosx_deployment_target",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_flags_msvc" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_bundle" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_flags_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_macosx_deployment_target" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.set_macosx_deployment_target",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_msvc" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -242,14 +402,18 @@ Feature cxxprogram .. graphviz:: digraph feature_cxxprogram { - size="8.0, 12.0"; - "create_task_macapp" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "create_task_macplist" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_manifest" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_manifest",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -260,16 +424,25 @@ Feature cxxshlib .. graphviz:: digraph feature_cxxshlib { - size="8.0, 12.0"; - "apply_implib" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_implib",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_bundle_remove_dynamiclib" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle_remove_dynamiclib",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_vnum" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_manifest" [style="setlinewidth(0.5)",URL="tools/msvc.html#waflib.Tools.msvc.apply_manifest",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_bundle_remove_dynamiclib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_bundle" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "init_pyext" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -280,40 +453,78 @@ Feature d .. graphviz:: digraph feature_d { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "process_header" [style="setlinewidth(0.5)",URL="tools/d.html#waflib.Tools.d.process_header",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_flags_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_header" [style="setlinewidth(0.5)",URL="../tools/d.html#waflib.Tools.d.process_header",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_msvc" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -324,10 +535,12 @@ Feature dshlib .. graphviz:: digraph feature_dshlib { - size="8.0, 12.0"; - "apply_vnum" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -338,8 +551,22 @@ Feature fake_lib .. graphviz:: digraph feature_fake_lib { - size="8.0, 12.0"; - "process_lib" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_lib",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "process_lib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_lib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + } + + + +Feature fake_obj +================ + +.. graphviz:: + + digraph feature_fake_obj { + size="8.0, 12.0"; + "process_objs" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_objs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "process_objs" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -350,39 +577,77 @@ Feature fc .. graphviz:: digraph feature_fc { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_flags_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macplist" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macplist",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_manifest",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_task_macapp" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.create_task_macapp",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_flags_msvc" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.apply_flags_msvc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macplist" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "create_task_macapp" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_flags_msvc" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -393,8 +658,10 @@ Feature fcprogram .. graphviz:: digraph feature_fcprogram { - size="8.0, 12.0"; - "dummy" [style="setlinewidth(0.5)",URL="tools/fc.html#waflib.Tools.fc.dummy",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -405,8 +672,10 @@ Feature fcprogram_test .. graphviz:: digraph feature_fcprogram_test { - size="8.0, 12.0"; - "dummy" [style="setlinewidth(0.5)",URL="tools/fc.html#waflib.Tools.fc.dummy",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -417,69 +686,54 @@ Feature fcshlib .. graphviz:: digraph feature_fcshlib { - size="8.0, 12.0"; - "dummy" [style="setlinewidth(0.5)",URL="tools/fc.html#waflib.Tools.fc.dummy",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_vnum" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_implib" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_implib",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_manifest_ifort" [style="setlinewidth(0.5)",URL="../tools/ifort.html#waflib.Tools.ifort.apply_manifest_ifort",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_manifest_ifort" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_implib" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature fcstlib -=============== +Feature glib2 +============= .. graphviz:: - digraph feature_fcstlib { - size="8.0, 12.0"; - "dummy" [style="setlinewidth(0.5)",URL="tools/fc.html#waflib.Tools.fc.dummy",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + digraph feature_glib2 { + size="8.0, 12.0"; + "process_settings" [style="setlinewidth(0.5)",URL="../tools/glib2.html#waflib.Tools.glib2.process_settings",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; } -Feature glib2 -============= +Feature grep_for_endianness +=========================== .. graphviz:: - digraph feature_glib2 { - size="8.0, 12.0"; - "process_settings" [style="setlinewidth(0.5)",URL="tools/glib2.html#waflib.Tools.glib2.process_settings",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + digraph feature_grep_for_endianness { + size="8.0, 12.0"; + "grep_for_endianness_fun" [style="setlinewidth(0.5)",URL="../tools/c_tests.html#waflib.Tools.c_tests.grep_for_endianness_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "grep_for_endianness_fun" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature go -========== +Feature gresource +================= .. graphviz:: - digraph feature_go { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + digraph feature_gresource { + size="8.0, 12.0"; + "process_gresource_bundle" [style="setlinewidth(0.5)",URL="../tools/glib2.html#waflib.Tools.glib2.process_gresource_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; } @@ -490,18 +744,34 @@ Feature includes .. graphviz:: digraph feature_includes { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + } + + + +Feature install_task +==================== + +.. graphviz:: + + digraph feature_install_task { + size="8.0, 12.0"; + "process_rule" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_rule",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_install_task" [style="setlinewidth(0.5)",URL="../Build.html#waflib.Build.process_install_task",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "process_install_task" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_rule" -> "process_install_task" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -512,8 +782,10 @@ Feature intltool_in .. graphviz:: digraph feature_intltool_in { - size="8.0, 12.0"; - "apply_intltool_in_f" [style="setlinewidth(0.5)",URL="tools/intltool.html#waflib.Tools.intltool.apply_intltool_in_f",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_intltool_in_f" [style="setlinewidth(0.5)",URL="../tools/intltool.html#waflib.Tools.intltool.apply_intltool_in_f",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "apply_intltool_in_f" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -524,8 +796,8 @@ Feature intltool_po .. graphviz:: digraph feature_intltool_po { - size="8.0, 12.0"; - "apply_intltool_po" [style="setlinewidth(0.5)",URL="tools/intltool.html#waflib.Tools.intltool.apply_intltool_po",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_intltool_po" [style="setlinewidth(0.5)",URL="../tools/intltool.html#waflib.Tools.intltool.apply_intltool_po",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; } @@ -536,14 +808,16 @@ Feature jar .. graphviz:: digraph feature_jar { - size="8.0, 12.0"; - "apply_java" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.apply_java",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "jar_files" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.jar_files",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "use_javac_files" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.use_javac_files",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "use_jar_files" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.use_jar_files",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "jar_files" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "jar_files" -> "use_javac_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "use_jar_files" -> "jar_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_java" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.apply_java",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "jar_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.jar_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "use_javac_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.use_javac_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "use_jar_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.use_jar_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "use_jar_files" -> "jar_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "jar_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "jar_files" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "jar_files" -> "use_javac_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -554,63 +828,84 @@ Feature javac .. graphviz:: digraph feature_javac { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_java" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.apply_java",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "use_javac_files" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.use_javac_files",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "set_classpath" [style="setlinewidth(0.5)",URL="tools/javaw.html#waflib.Tools.javaw.set_classpath",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "use_javac_files" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "set_classpath" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "set_classpath" -> "use_javac_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_java" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.apply_java",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "jar_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.jar_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "use_javac_files" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.use_javac_files",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "use_javac_files" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "jar_files" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "jar_files" -> "use_javac_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "apply_java" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "use_javac_files" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature link_lib_test -===================== +Feature javadoc +=============== .. graphviz:: - digraph feature_link_lib_test { - size="8.0, 12.0"; - "link_lib_test_fun" [style="setlinewidth(0.5)",URL="tools/c_tests.html#waflib.Tools.c_tests.link_lib_test_fun",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + digraph feature_javadoc { + size="8.0, 12.0"; + "process_rule" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_rule",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "create_javadoc" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.create_javadoc",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "create_javadoc" -> "process_rule" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature link_main_routines_func -=============================== +Feature link_lib_test +===================== .. graphviz:: - digraph feature_link_main_routines_func { - size="8.0, 12.0"; - "link_main_routines_tg_method" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.link_main_routines_tg_method",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + digraph feature_link_lib_test { + size="8.0, 12.0"; + "link_lib_test_fun" [style="setlinewidth(0.5)",URL="../tools/c_tests.html#waflib.Tools.c_tests.link_lib_test_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "link_lib_test_fun" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature msgfmt -============== +Feature link_main_routines_func +=============================== .. graphviz:: - digraph feature_msgfmt { - size="8.0, 12.0"; - "apply_msgfmt" [style="setlinewidth(0.5)",URL="tools/kde4.html#waflib.Tools.kde4.apply_msgfmt",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + digraph feature_link_main_routines_func { + size="8.0, 12.0"; + "link_main_routines_tg_method" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.link_main_routines_tg_method",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "link_main_routines_tg_method" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -621,8 +916,14 @@ Feature perlext .. graphviz:: digraph feature_perlext { - size="8.0, 12.0"; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -633,8 +934,10 @@ Feature py .. graphviz:: digraph feature_py { - size="8.0, 12.0"; - "feature_py" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.feature_py",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "feature_py" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.feature_py",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "feature_py" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -645,8 +948,10 @@ Feature pyembed .. graphviz:: digraph feature_pyembed { - size="8.0, 12.0"; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -657,23 +962,37 @@ Feature pyext .. graphviz:: digraph feature_pyext { - size="8.0, 12.0"; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_bundle" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.set_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "set_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "init_pyext" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; } -Feature qt4 +Feature qt5 =========== .. graphviz:: - digraph feature_qt4 { - size="8.0, 12.0"; - "apply_qt4" [style="setlinewidth(0.5)",URL="tools/qt4.html#waflib.Tools.qt4.apply_qt4",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_qt4" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + digraph feature_qt5 { + size="8.0, 12.0"; + "apply_qt5" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.apply_qt5",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_mocs" [style="setlinewidth(0.5)",URL="../tools/qt5.html#waflib.Tools.qt5.process_mocs",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_qt5" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "process_mocs" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -684,9 +1003,20 @@ Feature rubyext .. graphviz:: digraph feature_rubyext { - size="8.0, 12.0"; - "init_rubyext" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.init_rubyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_rubyext" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.init_rubyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_bundle" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_source" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "init_rubyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_link" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -697,8 +1027,8 @@ Feature seq .. graphviz:: digraph feature_seq { - size="8.0, 12.0"; - "sequence_order" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.sequence_order",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "sequence_order" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.sequence_order",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; } @@ -709,8 +1039,12 @@ Feature subst .. graphviz:: digraph feature_subst { - size="8.0, 12.0"; - "process_subst" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_subst",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "process_rule" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_rule",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_subst" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_subst",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_rule" -> "process_subst" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -721,10 +1055,12 @@ Feature test .. graphviz:: digraph feature_test { - size="8.0, 12.0"; - "make_test" [style="setlinewidth(0.5)",URL="tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_test" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -735,10 +1071,10 @@ Feature test_exec .. graphviz:: digraph feature_test_exec { - size="8.0, 12.0"; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "test_exec_fun" [style="setlinewidth(0.5)",URL="tools/c_config.html#waflib.Tools.c_config.test_exec_fun",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "test_exec_fun" [style="setlinewidth(0.5)",URL="../tools/c_config.html#waflib.Tools.c_config.test_exec_fun",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "test_exec_fun" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -749,8 +1085,10 @@ Feature tex .. graphviz:: digraph feature_tex { - size="8.0, 12.0"; - "apply_tex" [style="setlinewidth(0.5)",URL="tools/tex.html#waflib.Tools.tex.apply_tex",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; + size="8.0, 12.0"; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_tex" [style="setlinewidth(0.5)",URL="../tools/tex.html#waflib.Tools.tex.apply_tex",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_source" -> "apply_tex" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -761,12 +1099,24 @@ Feature use .. graphviz:: digraph feature_use { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_source" [style="setlinewidth(0.5)",URL="TaskGen.html#waflib.TaskGen.process_source",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "make_test" [style="setlinewidth(0.5)",URL="../tools/waf_unit_test.html#waflib.Tools.waf_unit_test.make_test",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_source" [style="setlinewidth(0.5)",URL="../TaskGen.html#waflib.TaskGen.process_source",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_full_paths_hpux" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.set_full_paths_hpux",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_test" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_incpaths" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "process_use" -> "process_source" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_full_paths_hpux" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -777,20 +1127,32 @@ Feature uselib .. graphviz:: digraph feature_uselib { - size="8.0, 12.0"; - "process_use" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.process_use",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_perlext" [style="setlinewidth(0.5)",URL="tools/perl.html#waflib.Tools.perl.init_perlext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_pyext" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyext",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "set_lib_pat" [style="setlinewidth(0.5)",URL="tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_bundle" [style="setlinewidth(0.5)",URL="tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "init_pyembed" [style="setlinewidth(0.5)",URL="tools/python.html#waflib.Tools.python.init_pyembed",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; - "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_perlext" [style="setlinewidth(0.5)",URL="../tools/perl.html#waflib.Tools.perl.init_perlext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_ruby_so_name" [style="setlinewidth(0.5)",URL="../tools/ruby.html#waflib.Tools.ruby.apply_ruby_so_name",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_incpaths",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyembed" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyembed",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "init_pyext" [style="setlinewidth(0.5)",URL="../tools/python.html#waflib.Tools.python.init_pyext",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_lib_pat" [style="setlinewidth(0.5)",URL="../tools/fc_config.html#waflib.Tools.fc_config.set_lib_pat",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "set_classpath" [style="setlinewidth(0.5)",URL="../tools/javaw.html#waflib.Tools.javaw.set_classpath",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_bundle" [style="setlinewidth(0.5)",URL="../tools/c_osx.html#waflib.Tools.c_osx.apply_bundle",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_incpaths" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "set_classpath" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_bundle" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "set_lib_pat" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "apply_ruby_so_name" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_perlext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyext" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "propagate_uselib_vars" -> "init_pyembed" [arrowsize=0.5,style="setlinewidth(0.5)"]; } @@ -801,10 +1163,44 @@ Feature vnum .. graphviz:: digraph feature_vnum { - size="8.0, 12.0"; - "apply_vnum" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10,fillcolor="#fffea6",style=filled]; - "apply_link" [style="setlinewidth(0.5)",URL="tools/ccroot.html#waflib.Tools.ccroot.apply_link",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape=box,fontsize=10]; - "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + size="8.0, 12.0"; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_vnum",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "apply_link" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.apply_link",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "apply_vnum" -> "apply_link" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "apply_vnum" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + } + + + +Feature winapp +============== + +.. graphviz:: + + digraph feature_winapp { + size="8.0, 12.0"; + "make_windows_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_windows_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_windows_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_windows_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; + } + + + +Feature winphoneapp +=================== + +.. graphviz:: + + digraph feature_winphoneapp { + size="8.0, 12.0"; + "process_use" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.process_use",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "propagate_uselib_vars" [style="setlinewidth(0.5)",URL="../tools/ccroot.html#waflib.Tools.ccroot.propagate_uselib_vars",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10]; + "make_winphone_app" [style="setlinewidth(0.5)",URL="../tools/msvc.html#waflib.Tools.msvc.make_winphone_app",target="_top",fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",height=0.25,shape="rectangle",fontsize=10,fillcolor="#fffea6",style=filled]; + "make_winphone_app" -> "propagate_uselib_vars" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "make_winphone_app" -> "process_use" [arrowsize=0.5,style="setlinewidth(0.5)"]; } diff --git a/docs/sphinx/featuremap_example.txt b/docs/sphinx/featuremap_example.txt index a7a408a826..91a9b22da2 100644 --- a/docs/sphinx/featuremap_example.txt +++ b/docs/sphinx/featuremap_example.txt @@ -1,34 +1,36 @@ +The Waf `feature` system weaves additional behaviors onto existing targets (task generators) with no changes to the build scripts. The main benefits are flexibility and reduced maintenance efforts. This approach is comparable to the CSS class declarations in the web design context. -The Waf features are names linked to specific functions by the decorator -:py:func:`waflib.TaskGen.feature`. The functions -are mapped to the class :py:class:`waflib.TaskGen.task_gen` as methods. +The `features` themselves are declared by annotating functions +through a specific Python decorator function :py:func:`waflib.TaskGen.feature` and passing a name. +Such functions then become methods bound to the Task generator class :py:class:`waflib.TaskGen.task_gen`. -The association between feature names and methods is *many-to-many*, which means -that a method may be involved in several features, and that a feature may be bound -to several methods. - -Here is how to create and use a new feature named **foo**:: +The association between feature names and methods is *many-to-many*; in other words +a method may be used by in several features, and that a feature may reference +to several methods. Here is for example how to bind the function *print_hello* +to the feature named **foo**:: from waflib.TaskGen import feature @feature('foo') def print_hello(self): print("Hello, World!") -The function *print_hello* is now associated with the :py:class:`waflib.TaskGen.task_gen` class, which means -that it may be used directly:: +Since the function *print_hello* is now associated with the :py:class:`waflib.TaskGen.task_gen` class, +it becomes usable as method:: def build(bld): tg = bld() tg.print_hello() -The method may be called directly, and several times. If a method creates task, the same tasks will be created -more than once, which may cause build errors. The *feature* attribute is used to have the associated -methods called *exactly once* before the build starts:: +Calling such methods directly is problematic as it interferes with partial +build declarations and with target dependencies. +The *feature* attribute enables the methods to be called *exactly once* +before the build starts and in a purely declarative matter:: def build(bld): bld(features='foo') -Here is a more complete example with two methods:: +When several methods are involved, the order of execution must be specified, +else the methods are sorted by alphabetical order:: from waflib.TaskGen import feature, after_method @@ -44,11 +46,10 @@ Here is a more complete example with two methods:: def build(bld): bld(features='foo bar') -The order of method execution is unrelated to the order of the features given. For instance, -this example will print "Hello, Bar!" then "Hello, Foo!". The decorators -:py:func:`waflib.TaskGen.after` and :py:func:`waflib.TaskGen.before` are -enforcing partial order constraints on the methods to execute. +The order of method execution is unrelated to the order of the features given though. +For instance, this example will print "Hello, Bar!" then "Hello, Foo!". -The following maps represent the associations betwen feature methods (represented in yellow) and -methods associated to other feature names. +The following maps represent the associations between feature methods (represented +in yellow) and methods associated to other feature names. The arrows represent +constraints over the order of execution. diff --git a/docs/sphinx/index.rst b/docs/sphinx/index.rst index 91aa36dbd5..61153ba66e 100644 --- a/docs/sphinx/index.rst +++ b/docs/sphinx/index.rst @@ -8,6 +8,7 @@ Waf documentation tutorial.rst about.rst copyright.rst + confmap.rst featuremap.rst Build.rst diff --git a/docs/sphinx/tools.rst b/docs/sphinx/tools.rst index 48305b228f..6f3dc29d54 100644 --- a/docs/sphinx/tools.rst +++ b/docs/sphinx/tools.rst @@ -124,9 +124,7 @@ The next tools provide support for code generators used in C and C++ projects. tools/dbus tools/vala tools/glib2 - tools/qt4 tools/qt5 - tools/kde4 tools/perl tools/python tools/ruby @@ -134,7 +132,7 @@ The next tools provide support for code generators used in C and C++ projects. Other compilers and tools ------------------------- -.. _extras: https://github.com/waf-project/waf/tree/master/waflib/extras +.. _extras: https://gitlab.com/ita1024/waf/tree/master/waflib/extras The following tools provide support for specific compilers or configurations. More tools are present in the extras_ folder, although they are not documented and as stable as the default tools. @@ -147,4 +145,6 @@ The following tools provide support for specific compilers or configurations. Mo tools/gnu_dirs tools/intltool tools/lua + tools/md5_tstamp + tools/nobuild diff --git a/docs/sphinx/tools/.gitignore b/docs/sphinx/tools/.gitignore new file mode 100644 index 0000000000..30d85567b5 --- /dev/null +++ b/docs/sphinx/tools/.gitignore @@ -0,0 +1 @@ +*.rst diff --git a/docs/sphinx/tutorial.rst b/docs/sphinx/tutorial.rst index 754ac1ec42..1b405fd00a 100644 --- a/docs/sphinx/tutorial.rst +++ b/docs/sphinx/tutorial.rst @@ -24,20 +24,20 @@ In general, a project will consist of several phases: Each phase is modelled in the wscript file as a python function which takes as argument an instance of :py:class:`waflib.Context.Context`. Let's start with a new wscript file in the directory '/tmp/myproject':: - def configure(conf): + def configure(cnf): print("configure!") def build(bld): print("build!") -We will also use a Waf binary file, for example waf-1.8.8, which we will copy in the project directory:: +We will also use a Waf binary file, for example waf-2.0.0, which we will copy in the project directory:: $ cd /tmp/myproject - $ wget https://waf.io/waf-1.8.8 + $ wget https://waf.io/waf-2.0.0 To execute the project, we will simply call the command as an argument to ``waf``:: - $ ./waf-1.8.8 configure build + $ python ./waf-2.0.0 configure build configure! build! @@ -62,7 +62,7 @@ by using the *${}* symbol, which reads the values from the attribute bld.env:: bld(rule='echo ${MESSAGE}', always=True) The bld object is an instance of :py:class:`waflib.Build.BuildContext`, its *env* attribute is an instance :py:class:`waflib.ConfigSet.ConfigSet`. -The values are set in this object to be shared/stored/loaded easily. Here is how to do the same thing by sharing data between the configuration and build:: +This object is also accessible as an attribute on the `configure()` method's `cnf` parameter. Therefore, values can be shared/stored/loaded easily: def configure(cnf): cnf.env.MESSAGE = 'Hello, world!' @@ -111,9 +111,9 @@ Here is a script for a more complicated project:: The method :py:func:`waflib.Tools.c_config.check` executes a build internally to check if the library ``libm`` is present on the operating system. It will then define variables such as: -* ``conf.env.LIB_M = ['m']`` -* ``conf.env.CFLAGS_M = ['-Wall']`` -* ``conf.env.DEFINES_M = ['var=foo']`` +* ``cnf.env.LIB_M = ['m']`` +* ``cnf.env.CFLAGS_M = ['-Wall']`` +* ``cnf.env.DEFINES_M = ['var=foo']`` By stating ``use=['M', 'mylib']``, the program *app* is going to inherit all the *M* variables defined during the configuration. The program will also use the library *mylib* and both the build order and the dependencies @@ -165,7 +165,7 @@ The declaration can be made more user-friendly by binding new methods to the con bld.enterprise_program(source='main.c', target='app') The support code may be turned into a Waf tool by moving it to a separate file. -To ease the deployment, the new Waf tool can even be added to the waf file (see https://github.com/waf-project/waf/blob/master/README#L10). +To ease the deployment, the new Waf tool can even be added to the waf file (see https://gitlab.com/ita1024/waf/blob/master/README.md#L20). Conclusion ---------- diff --git a/docs/sphinx/wscript b/docs/sphinx/wscript index 86bc021838..d9fd4b4973 100644 --- a/docs/sphinx/wscript +++ b/docs/sphinx/wscript @@ -19,16 +19,17 @@ def configure(cfg): cfg.find_program('convert', var='CONVERT') cfg.load('daemon', tooldir=['../../playground/daemon/']) cfg.find_program("sphinx-build", var="SPHINX_BUILD") + cfg.env.SPHINX_ARGS = ['-W'] def build(bld): - + bld.path.make_node('_static').mkdir() bld( - rule = "${SPHINX_BUILD} -b html -d %s . %s" % (os.path.join(out, "doctrees"), os.path.join(out, "html")), - cwd = bld.path.abspath(), + rule = "${SPHINX_BUILD} ${SPHINX_ARGS} -b html -d %s . %s" % (os.path.join(out, "doctrees"), os.path.join(out, "html")), + cwd = bld.path, source = bld.path.parent.parent.find_dir('waflib').ant_glob('**/*.py') + bld.path.ant_glob('**/*.rst') + bld.path.ant_glob('_templates/indexcontent.html') + bld.path.ant_glob('conf.py'), target = bld.path.find_or_declare('html/index.html') - ) + ) diff --git a/playground/boost_log/main.cpp b/playground/boost_log/main.cpp new file mode 100644 index 0000000000..114b823bd1 --- /dev/null +++ b/playground/boost_log/main.cpp @@ -0,0 +1,11 @@ +#include + +int main(int, char*[]) +{ + BOOST_LOG_TRIVIAL(trace) << "A trace severity message"; + BOOST_LOG_TRIVIAL(debug) << "A debug severity message"; + BOOST_LOG_TRIVIAL(info) << "An informational severity message"; + BOOST_LOG_TRIVIAL(warning) << "A warning severity message"; + BOOST_LOG_TRIVIAL(error) << "An error severity message"; + BOOST_LOG_TRIVIAL(fatal) << "A fatal severity message"; +} diff --git a/playground/boost_log/wscript b/playground/boost_log/wscript new file mode 100644 index 0000000000..6bd4e01424 --- /dev/null +++ b/playground/boost_log/wscript @@ -0,0 +1,16 @@ +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_cxx boost') + +def configure(conf): + conf.load('compiler_cxx boost') + + if conf.options.boost_mt: + conf.check_boost('system thread log log_setup') + else: + conf.check_boost('log log_setup') + +def build(bld): + bld.program(source='main.cpp', target='app', use='BOOST') diff --git a/playground/c-objects-stlib-shlib/wscript b/playground/c-objects-stlib-shlib/wscript index 2e662cacc3..901072e400 100644 --- a/playground/c-objects-stlib-shlib/wscript +++ b/playground/c-objects-stlib-shlib/wscript @@ -14,14 +14,17 @@ def configure(conf): conf.load('compiler_c gnu_dirs') def build(bld): - + bld( features='c', source='test_shlib.c', - use='cshlib', + # it is -uselib' in this case to avoid propagation of '-shared' + # to the program below. A more explicit alternative is to set + # cflags=bld.env.CFLAGS_cshlib + uselib='cshlib', target='objects-for-shlib', ) - + bld( features='c', source='test_shlib.c', @@ -47,7 +50,7 @@ def build(bld): source='main.c', use='something-shared', ) - + bld( features='c cprogram', target='exe-static', diff --git a/playground/c/wscript b/playground/c/wscript index bff2be1aa7..7fad6c5f10 100644 --- a/playground/c/wscript +++ b/playground/c/wscript @@ -26,13 +26,9 @@ def configure(conf): conf.check_cc(fragment="""#include\nint main(){fprintf(stderr, "mu"); printf("%d", 22);return 0;}\n""", execute=True, define_name='HAVE_MU') conf.write_config_header('config.h') - # gotcha - the config.h must be written for each variant - txt = conf.bldnode.search_node('config.h').read() for x in lst: - node = conf.bldnode.make_node(x + '/config.h') - node.parent.mkdir() - node.write(txt) + conf.write_config_header(x + '/config.h') from waflib import Utils, Build class buildall_ctx(Build.BuildContext): @@ -50,7 +46,7 @@ def buildall(ctx): class sub_build(Utils.threading.Thread): def run(self): bld = self.bld = self.cls(top_dir=ctx.top_dir, out_dir=ctx.out_dir) - bld.load() + bld.restore() bld.siblings = threads bld.count = count bld.line_lock = line_lock @@ -96,13 +92,13 @@ def cleanall(ctx): for x in lst: cls = type(Build.CleanContext)(x, (Build.CleanContext,), {'cmd': x, 'variant': x}) bld = cls(top_dir=ctx.top_dir, out_dir=ctx.out_dir) - bld.load() + bld.restore() bld.load_envs() bld.recurse([bld.run_dir]) try: bld.clean() finally: - bld.save() + bld.store() # produces dict/json compatible output diff --git a/docs/book/examples/scenarios_end/a.h b/playground/clang_compilation_database/a.c similarity index 100% rename from docs/book/examples/scenarios_end/a.h rename to playground/clang_compilation_database/a.c diff --git a/playground/clang_compilation_database/b.cpp b/playground/clang_compilation_database/b.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/clang_compilation_database/wscript b/playground/clang_compilation_database/wscript new file mode 100644 index 0000000000..06c2d9f5e9 --- /dev/null +++ b/playground/clang_compilation_database/wscript @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Alibek Omarov, 2019 (a1batross) + +import os +from waflib import ConfigSet, Logs + +VERSION='0.0.1' +APPNAME='clang_compilation_database_test' + +top = '.' +out = 'build' + +INCLUDES_TO_TEST = ['common'] # check if include flag appeared in result json +DEFINES_TO_TEST = ['TEST'] # check if definition flag will appear in result json +SOURCE_FILES_TO_TEST = ['a.c', 'b.cpp'] # check if source files are persist in database + +def actual_test(bld): + db = bld.bldnode.find_node('compile_commands.json').read_json() + + for entry in db: + env = ConfigSet.ConfigSet() + line = ' '.join(entry['arguments'][1:]) # ignore compiler exe, unneeded + directory = entry['directory'] + srcname = entry['file'].split(os.sep)[-1] # file name only + + bld.parse_flags(line, 'test', env) # ignore unhandled flag, it's harmless for test + + if bld.bldnode.abspath() in directory: + Logs.info('Directory test passed') + else: + Logs.error('Directory test failed') + + if srcname in SOURCE_FILES_TO_TEST: + Logs.info('Source file test passed') + else: + Logs.error('Source file test failed') + + passed = True + for inc in INCLUDES_TO_TEST: + if inc not in env.INCLUDES_test: + passed = False + + if passed: Logs.info('Includes test passed') + else: Logs.error('Includes test failed') + + passed = True + for define in DEFINES_TO_TEST: + if define not in env.DEFINES_test: + passed = False + if passed: Logs.info('Defines test passed') + else: Logs.error('Defines test failed') + +def options(opt): + # check by ./waf clangdb + opt.load('compiler_c compiler_cxx clang_compilation_database') + +def configure(conf): + # check if database always generated before build + conf.load('compiler_c compiler_cxx clang_compilation_database') + +def build(bld): + bld.shlib(features = 'c cxx', source = SOURCE_FILES_TO_TEST, + defines = DEFINES_TO_TEST, + includes = INCLUDES_TO_TEST, + target = 'test') + + bld.add_post_fun(actual_test) diff --git a/playground/clang_cross/msvc/ReadMe.txt b/playground/clang_cross/msvc/ReadMe.txt new file mode 100644 index 0000000000..e82291a4f5 --- /dev/null +++ b/playground/clang_cross/msvc/ReadMe.txt @@ -0,0 +1,14 @@ +To cross compile for Windows in MSVC mode from Linux, you will require the following: +* A partition with Windows installed (NTFS). +* Visual Studio (Tested with 2017). +* The Windows SDK. +* lowntfs-3g file system driver. + +Make sure the Windows partition is mounted with "-t lowntfs-3g -o defaults,ignore_case,windows_names". +This will allow Clang to find all headers and libraries referenced by scripts and headers, otherwise you will run into case sensitivity errors. + +Clang uses the following environment variables to detect the Visual Studio install: VCINSTALLDIR, VCToolsInstallDir, INCLUDE, LIB, LIBPATH +I just copied these from the output of the "set" command in an MSVC command prompt on Windows and translated the paths to Linux paths. +Notice how the semicolon is still used as a path separator. +See "example_environment_linux.sh" for how my setup looks like. +It expects the Windows partition to be mounted on /mnt/windows, with VS2017 installed and Windows 10 SDK 10.0.17763.0. diff --git a/playground/clang_cross/msvc/example_environment_linux.sh b/playground/clang_cross/msvc/example_environment_linux.sh new file mode 100644 index 0000000000..1fd5a0054e --- /dev/null +++ b/playground/clang_cross/msvc/example_environment_linux.sh @@ -0,0 +1,5 @@ +export VCINSTALLDIR="/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/" +export VCToolsInstallDir="/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/" +export INCLUDE="/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/atlmfc/include;/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/include;/mnt/windows/program files (x86)/windows kits/10/include/10.0.17763.0/ucrt;/mnt/windows/program files (x86)/windows kits/10/include/10.0.17763.0/shared;/mnt/windows/program files (x86)/windows kits/10/include/10.0.17763.0/um;/mnt/windows/program files (x86)/windows kits/10/include/10.0.17763.0/winrt;/mnt/windows/program files (x86)/windows kits/10/include/10.0.17763.0/cppwinrt" +export LIB="/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/atlmfc/lib/x64;/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/lib/x64;/mnt/windows/program files (x86)/windows kits/10/lib/10.0.17763.0/ucrt/x64;/mnt/windows/program files (x86)/windows kits/10/lib/10.0.17763.0/um/x64" +export LIBPATH="/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/atlmfc/lib/x64;/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/lib/x64;/mnt/windows/program files (x86)/microsoft visual studio/2017/community/vc/tools/msvc/14.16.27023/lib/x86/store/references;/mnt/windows/program files (x86)/windows kits/10/unionmetadata/10.0.17763.0;/mnt/windows/program files (x86)/windows kits/10/references/10.0.17763.0" diff --git a/playground/clang_cross/msvc/hello.c b/playground/clang_cross/msvc/hello.c new file mode 100644 index 0000000000..3ae11826fb --- /dev/null +++ b/playground/clang_cross/msvc/hello.c @@ -0,0 +1,11 @@ +#include + +int main(int argc, char* argv[]) +{ + (void)argc; + (void)argv; + + WriteConsole(GetStdHandle(STD_OUTPUT_HANDLE), "Hello world!\n", 13, NULL, NULL); + + return 0; +} diff --git a/playground/clang_cross/msvc/wscript b/playground/clang_cross/msvc/wscript new file mode 100644 index 0000000000..a1ed74a32f --- /dev/null +++ b/playground/clang_cross/msvc/wscript @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# DragoonX6 2019 + +# the following two variables are used by the target "waf dist" +VERSION='0.0.1' +APPNAME='hello_msvc' + +top = '.' + +from waflib.Configure import conf, ConfigurationContext +from waflib.Options import OptionsContext + +def options(opt): + opt.load('clang_cross') + +def configure(conf): + conf.load('clang_cross') + + if not conf.env.implib_PATTERN == '%s.lib': + conf.fatal('''clang is not configured to compile in msvc mode. +Use flag '--clang-target-triple=x86_64-windows-msvc' to configure. +On Windows you're likely to require running from an MSVC command prompt. +On Linux you will need to have access to a Windows partition with VS installed, and the environment set up properly. +See the ReadMe for more information.''') + + conf.env.append_value('CFLAGS', conf.env.CFLAGS_CRT_MULTITHREADED_DLL) + +def build(bld): + bld.program( + source = 'hello.c', + target = 'hello_msvc') diff --git a/playground/compress/optim.py b/playground/compress/optim.py index f61627329c..e318cef0b7 100755 --- a/playground/compress/optim.py +++ b/playground/compress/optim.py @@ -43,20 +43,20 @@ def compare(a, b): LEN = len(lst) POP = 3*LEN + 1 - popul = [range(LEN) for x in xrange(POP)] - fitn = [0 for x in xrange(POP)] + popul = [range(LEN) for x in range(POP)] + fitn = [0 for x in range(POP)] def rnd(): return random.randint(0, LEN -1) def mutate(): - for x in xrange(LEN): + for x in range(LEN): # rotate the previous element by one v = popul[x+LEN] = popul[x+LEN - 1] a = v.pop(0) v.append(a) - for x in xrange(LEN): + for x in range(LEN): # swap elements a = rnd() b = rnd() @@ -66,7 +66,7 @@ def mutate(): v[a] = v[b] v[b] = c - for x in xrange(LEN): + for x in range(LEN): # get one element out, add at the end v = popul[x+2*LEN] @@ -79,7 +79,7 @@ def evil(): best = opti_ref pos = -1 - for x in xrange(len(popul)): + for x in range(len(popul)): v = popul[x] arr = [lst[a] for a in v] tmp = '%s %s' % (cmd, ' '.join(arr)) @@ -99,14 +99,14 @@ def evil(): assert (sum(popul[x]) == sum(range(LEN))) #print pos - for x in xrange(len(popul)): + for x in range(len(popul)): if x == pos: continue popul[x] = popul[pos][:] assert(len(popul[x]) == LEN) return best - for i in xrange(10000): + for i in range(10000): mutate() print(evil()) diff --git a/playground/compress/wscript b/playground/compress/wscript index f411c1d382..40f7a126a8 100644 --- a/playground/compress/wscript +++ b/playground/compress/wscript @@ -138,7 +138,7 @@ def build(bld): bld(rule=try_compress, target=ini, always=True, kind=kind, frompath=node, files=rels) # for the same reason, count_result will be executed each time - bld(rule=count_result, target=dist, source=ini, always=True, update_outputs=True) + bld(rule=count_result, target=dist, source=[ini], always=True) bld(rule=write_template, target=plot, triplet=[png, kind, dist], always=True) bld(rule='${GNUPLOT} < ${SRC[1].abspath()}', target=png, source=[dist, plot]) diff --git a/playground/cuda/cuda.py b/playground/cuda/cuda.py index 791920d018..d4135eb189 100644 --- a/playground/cuda/cuda.py +++ b/playground/cuda/cuda.py @@ -4,14 +4,13 @@ "cuda" -import os from waflib import Task from waflib.TaskGen import extension from waflib.Tools import ccroot, c_preproc from waflib.Configure import conf class cuda(Task.Task): - run_str = '${NVCC} ${CUDAFLAGS} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' + run_str = '${NVCC} ${CUDAFLAGS} ${NVCCFLAGS_ST:CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F} ${TGT}' color = 'GREEN' ext_in = ['.h'] vars = ['CCDEPS'] @@ -22,9 +21,18 @@ class cuda(Task.Task): def c_hook(self, node): return self.create_compiled_task('cuda', node) +@extension('.cpp') +def cxx_hook(self, node): + # override processing for one particular type of file + if getattr(self, 'cuda', False): + return self.create_compiled_task('cuda', node) + else: + return self.create_compiled_task('cxx', node) + def configure(conf): conf.find_program('nvcc', var='NVCC') conf.find_cuda_libs() + conf.env.NVCCFLAGS_ST = "--compiler-options=%s" @conf def find_cuda_libs(self): @@ -43,7 +51,7 @@ def find_cuda_libs(self): _includes = node and node.abspath() or '' _libpath = [] - for x in ('lib64', 'lib'): + for x in ('lib64', 'lib64/stubs', 'lib', 'lib/stubs'): try: _libpath.append(d.find_node(x).abspath()) except: diff --git a/docs/book/examples/scenarios_expansion/main.c b/playground/cuda/test.cpp similarity index 96% rename from docs/book/examples/scenarios_expansion/main.c rename to playground/cuda/test.cpp index a46866d92e..ffacf6e6c0 100644 --- a/docs/book/examples/scenarios_expansion/main.c +++ b/playground/cuda/test.cpp @@ -2,3 +2,4 @@ int main() { return 0; } + diff --git a/playground/cuda/wscript b/playground/cuda/wscript index f2592536ce..db592d4d62 100644 --- a/playground/cuda/wscript +++ b/playground/cuda/wscript @@ -1,5 +1,5 @@ #!/usr/bin/env python -# encoding: ISO8859-1 +# encoding: utf-8 # Thomas Nagy, 2010 top = '.' @@ -18,8 +18,12 @@ def configure(conf): conf.load('cuda', tooldir='.') + # Add a few flags to test proper passing to nvcc + conf.env.CXXFLAGS=['-fPIC', '--std=c++11'] + def build(bld): + # cuda application t = bld.program( source = 'test.cu main.cpp', target = 'app', @@ -29,3 +33,18 @@ def build(bld): # --ptxas-options="-v" # --ptxas-options="-v -maxrregcount=10" + # ----------------------- + + # native application + bld.program( + source = 'test.cpp', + target = 'testapp-native') + + # cuda application + bld.program( + source = 'test.cpp', + target = 'testapp', + cuda = True, + use = 'CUDA CUDART') + + diff --git a/playground/cython/c_lib/extra_dep.h.in b/playground/cython/c_lib/extra_dep.h.in new file mode 100644 index 0000000000..acc41c53cd --- /dev/null +++ b/playground/cython/c_lib/extra_dep.h.in @@ -0,0 +1,2 @@ + +#define bindir "@BINDIR@" diff --git a/playground/cython/cython_cache/src/cy_ctest.c b/playground/cython/cython_cache/src/cy_ctest.c index 58fae2a72d..071c34479c 100644 --- a/playground/cython/cython_cache/src/cy_ctest.c +++ b/playground/cython/cython_cache/src/cy_ctest.c @@ -1,16 +1,19 @@ -/* Generated by Cython 0.15.1 on Tue May 29 23:42:45 2012 */ +/* Generated by Cython 0.29 */ #define PY_SSIZE_T_CLEAN #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. #else - -#include /* For offsetof */ +#define CYTHON_ABI "0_29" +#define CYTHON_HEX_VERSION 0x001D00F0 +#define CYTHON_FUTURE_DIVISION 0 +#include #ifndef offsetof -#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif - #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall @@ -22,122 +25,494 @@ #define __fastcall #endif #endif - #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif - +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif - -#if PY_VERSION_HEX < 0x02040000 - #define METH_COEXIST 0 - #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type) - #define PyDict_Contains(d,o) PySequence_Contains(d,o) +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL #endif - -#if PY_VERSION_HEX < 0x02050000 - typedef int Py_ssize_t; - #define PY_SSIZE_T_MAX INT_MAX - #define PY_SSIZE_T_MIN INT_MIN - #define PY_FORMAT_SIZE_T "" - #define PyInt_FromSsize_t(z) PyInt_FromLong(z) - #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) - #define PyNumber_Index(o) PyNumber_Int(o) - #define PyIndex_Check(o) PyNumber_Check(o) - #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif #endif -#if PY_VERSION_HEX < 0x02060000 - #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) - #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) - #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) - #define PyVarObject_HEAD_INIT(type, size) \ - PyObject_HEAD_INIT(type) size, - #define PyType_Modified(t) - - typedef struct { - void *buf; - PyObject *obj; - Py_ssize_t len; - Py_ssize_t itemsize; - int readonly; - int ndim; - char *format; - Py_ssize_t *shape; - Py_ssize_t *strides; - Py_ssize_t *suboffsets; - void *internal; - } Py_buffer; - - #define PyBUF_SIMPLE 0 - #define PyBUF_WRITABLE 0x0001 - #define PyBUF_FORMAT 0x0004 - #define PyBUF_ND 0x0008 - #define PyBUF_STRIDES (0x0010 | PyBUF_ND) - #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) - #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) - #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) - #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) - +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif #endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type #endif - -#if PY_MAJOR_VERSION >= 3 +#ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif - -#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif - +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif - -#if PY_VERSION_HEX < 0x02060000 - #define PyBytesObject PyStringObject - #define PyBytes_Type PyString_Type - #define PyBytes_Check PyString_Check - #define PyBytes_CheckExact PyString_CheckExact - #define PyBytes_FromString PyString_FromString - #define PyBytes_FromStringAndSize PyString_FromStringAndSize - #define PyBytes_FromFormat PyString_FromFormat - #define PyBytes_DecodeEscape PyString_DecodeEscape - #define PyBytes_AsString PyString_AsString - #define PyBytes_AsStringAndSize PyString_AsStringAndSize - #define PyBytes_Size PyString_Size - #define PyBytes_AS_STRING PyString_AS_STRING - #define PyBytes_GET_SIZE PyString_GET_SIZE - #define PyBytes_Repr PyString_Repr - #define PyBytes_Concat PyString_Concat - #define PyBytes_ConcatAndDel PyString_ConcatAndDel -#endif - -#if PY_VERSION_HEX < 0x02060000 - #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) - #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif - -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) - +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type @@ -153,13 +528,17 @@ #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long #endif - #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif - -#if PY_VERSION_HEX < 0x03020000 +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong @@ -167,57 +546,54 @@ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif - - #if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) #else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif - -#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) - #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) - #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif #else - #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) - #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PyType_AsAsync(obj) NULL #endif - -#if PY_MAJOR_VERSION >= 3 - #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; #endif -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) #else - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} #endif - -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_NAMESTR(n) ((char *)(n)) - #define __Pyx_DOCSTR(n) ((char *)(n)) +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc #else - #define __Pyx_NAMESTR(n) (n) - #define __Pyx_DOCSTR(n) (n) +#define __Pyx_truncl truncl #endif + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" @@ -226,88 +602,210 @@ #endif #endif -#if defined(WIN32) || defined(MS_WINDOWS) -#define _USE_MATH_DEFINES -#endif -#include #define __PYX_HAVE__cy_ctest #define __PYX_HAVE_API__cy_ctest +/* Early includes */ #include "lib.h" #ifdef _OPENMP #include #endif /* _OPENMP */ -#ifdef PYREX_WITHOUT_ASSERTIONS +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif - -/* inline attribute */ -#ifndef CYTHON_INLINE - #if defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif - -/* unused attribute */ -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || defined(__INTEL_COMPILER) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif - -typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ - - -/* Type Conversion Predeclarations */ - -#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) -#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) - -#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); - +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); - +#if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif -#ifdef __GNUC__ - /* Test for GCC > 2.95 */ - #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) - #else /* __GNUC__ > 2 ... */ - #define likely(x) (x) - #define unlikely(x) (x) - #endif /* __GNUC__ > 2 ... */ -#else /* __GNUC__ */ +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ - -static PyObject *__pyx_m; +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; @@ -320,10 +818,11 @@ static const char *__pyx_f[] = { /*--- Type declarations ---*/ +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif - #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); @@ -334,10 +833,23 @@ static const char *__pyx_f[] = { void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; - #define __Pyx_RefNannySetupContext(name) __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) - #define __Pyx_RefNannyFinishContext() __Pyx_RefNanny->FinishContext(&__pyx_refnanny) +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) @@ -348,7 +860,7 @@ static const char *__pyx_f[] = { #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name) + #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) @@ -358,84 +870,227 @@ static const char *__pyx_f[] = { #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) -#endif /* CYTHON_REFNANNY */ - -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); - -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); - -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); - -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* Print.proto */ +static int __Pyx_Print(PyObject*, PyObject *, int); +#if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 +static PyObject* __pyx_print = 0; +static PyObject* __pyx_print_kwargs = 0; +#endif -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); +/* PrintOne.proto */ +static int __Pyx_PrintOne(PyObject* stream, PyObject *o); -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); -static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) +/* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); -static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno, - int __pyx_lineno, const char *__pyx_filename); /*proto*/ +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ +/* Module declarations from 'cpython.version' */ /* Module declarations from 'cy_ctest' */ #define __Pyx_MODULE_NAME "cy_ctest" +extern int __pyx_module_is_main_cy_ctest; int __pyx_module_is_main_cy_ctest = 0; /* Implementation of 'cy_ctest' */ -static char __pyx_k__pyhello[] = "pyhello"; -static char __pyx_k____main__[] = "__main__"; -static char __pyx_k____test__[] = "__test__"; -static char __pyx_k__cy_ctest[] = "cy_ctest"; -static PyObject *__pyx_n_s____main__; -static PyObject *__pyx_n_s____test__; -static PyObject *__pyx_n_s__cy_ctest; -static PyObject *__pyx_n_s__pyhello; - -/* "cy_ctest.pyx":3 - * cimport cy_ctest +static const char __pyx_k_end[] = "end"; +static const char __pyx_k_file[] = "file"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_print[] = "print"; +static const char __pyx_k_pyhello[] = "pyhello"; +static const char __pyx_k_cy_ctest[] = "cy_ctest"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_Compiled_with_python_version_s[] = "Compiled with python version %s"; +static const char __pyx_k_home_romain_dev_waf_wrapper_waf[] = "/home/romain/dev/waf-wrapper/waf/playground/cython/src/cy_ctest.pyx"; +static PyObject *__pyx_kp_s_Compiled_with_python_version_s; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_cy_ctest; +static PyObject *__pyx_n_s_end; +static PyObject *__pyx_n_s_file; +static PyObject *__pyx_kp_s_home_romain_dev_waf_wrapper_waf; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_print; +static PyObject *__pyx_n_s_pyhello; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_pf_8cy_ctest_pyhello(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_codeobj_; +/* Late includes */ + +/* "cy_ctest.pyx":5 + * #cimport commented_import * * def pyhello(): # <<<<<<<<<<<<<< * cy_ctest.hello() + * print("Compiled with python version %s" % PY_VERSION) */ -static PyObject *__pyx_pf_8cy_ctest_pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyMethodDef __pyx_mdef_8cy_ctest_pyhello = {__Pyx_NAMESTR("pyhello"), (PyCFunction)__pyx_pf_8cy_ctest_pyhello, METH_NOARGS, __Pyx_DOCSTR(0)}; -static PyObject *__pyx_pf_8cy_ctest_pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { +/* Python wrapper */ +static PyObject *__pyx_pw_8cy_ctest_1pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_8cy_ctest_1pyhello = {"pyhello", (PyCFunction)__pyx_pw_8cy_ctest_1pyhello, METH_NOARGS, 0}; +static PyObject *__pyx_pw_8cy_ctest_1pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("pyhello (wrapper)", 0); + __pyx_r = __pyx_pf_8cy_ctest_pyhello(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_8cy_ctest_pyhello(CYTHON_UNUSED PyObject *__pyx_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pyhello"); - __pyx_self = __pyx_self; + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("pyhello", 0); - /* "cy_ctest.pyx":4 + /* "cy_ctest.pyx":6 * * def pyhello(): * cy_ctest.hello() # <<<<<<<<<<<<<< + * print("Compiled with python version %s" % PY_VERSION) */ hello(); + /* "cy_ctest.pyx":7 + * def pyhello(): + * cy_ctest.hello() + * print("Compiled with python version %s" % PY_VERSION) # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_FromString(PY_VERSION); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_Compiled_with_python_version_s, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__Pyx_PrintOne(0, __pyx_t_2) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "cy_ctest.pyx":5 + * #cimport commented_import + * + * def pyhello(): # <<<<<<<<<<<<<< + * cy_ctest.hello() + * print("Compiled with python version %s" % PY_VERSION) + */ + + /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("cy_ctest.pyhello", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; @@ -446,69 +1101,289 @@ static PyMethodDef __pyx_methods[] = { }; #if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_cy_ctest(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_cy_ctest}, + {0, NULL} +}; +#endif + static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, - __Pyx_NAMESTR("cy_ctest"), + "cy_ctest", 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else -1, /* m_size */ + #endif __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else NULL, /* m_reload */ + #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, - {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, - {&__pyx_n_s__cy_ctest, __pyx_k__cy_ctest, sizeof(__pyx_k__cy_ctest), 0, 0, 1, 1}, - {&__pyx_n_s__pyhello, __pyx_k__pyhello, sizeof(__pyx_k__pyhello), 0, 0, 1, 1}, + {&__pyx_kp_s_Compiled_with_python_version_s, __pyx_k_Compiled_with_python_version_s, sizeof(__pyx_k_Compiled_with_python_version_s), 0, 0, 1, 0}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cy_ctest, __pyx_k_cy_ctest, sizeof(__pyx_k_cy_ctest), 0, 0, 1, 1}, + {&__pyx_n_s_end, __pyx_k_end, sizeof(__pyx_k_end), 0, 0, 1, 1}, + {&__pyx_n_s_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 0, 1, 1}, + {&__pyx_kp_s_home_romain_dev_waf_wrapper_waf, __pyx_k_home_romain_dev_waf_wrapper_waf, sizeof(__pyx_k_home_romain_dev_waf_wrapper_waf), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_print, __pyx_k_print, sizeof(__pyx_k_print), 0, 0, 1, 1}, + {&__pyx_n_s_pyhello, __pyx_k_pyhello, sizeof(__pyx_k_pyhello), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; -static int __Pyx_InitCachedBuiltins(void) { +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { return 0; } -static int __Pyx_InitCachedConstants(void) { +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants"); + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "cy_ctest.pyx":5 + * #cimport commented_import + * + * def pyhello(): # <<<<<<<<<<<<<< + * cy_ctest.hello() + * print("Compiled with python version %s" % PY_VERSION) + */ + __pyx_codeobj_ = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_romain_dev_waf_wrapper_waf, __pyx_n_s_pyhello, 5, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj_)) __PYX_ERR(0, 5, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; } -static int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + #if PY_MAJOR_VERSION < 3 -PyMODINIT_FUNC initcy_ctest(void); /*proto*/ -PyMODINIT_FUNC initcy_ctest(void) +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif #else -PyMODINIT_FUNC PyInit_cy_ctest(void); /*proto*/ -PyMODINIT_FUNC PyInit_cy_ctest(void) +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initcy_ctest(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initcy_ctest(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_cy_ctest(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_cy_ctest(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + result = PyDict_SetItemString(moddict, to_name, value); + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_cy_ctest(PyObject *__pyx_pyinit_module) +#endif #endif { PyObject *__pyx_t_1 = NULL; __Pyx_RefNannyDeclarations - #if CYTHON_REFNANNY - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); - if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'cy_ctest' has already been imported. Re-initialisation is not supported."); + return -1; } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); #endif - __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_cy_ctest(void)"); - if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - #ifdef __pyx_binding_PyCFunctionType_USED - if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_cy_ctest(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ @@ -518,83 +1393,113 @@ PyMODINIT_FUNC PyInit_cy_ctest(void) #endif #endif /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4(__Pyx_NAMESTR("cy_ctest"), __pyx_methods, 0, 0, PYTHON_API_VERSION); + __pyx_m = Py_InitModule4("cy_ctest", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif - if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - #if PY_MAJOR_VERSION < 3 - Py_INCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif - __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); - if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ - if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif if (__pyx_module_is_main_cy_ctest) { - if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "cy_ctest")) { + if (unlikely(PyDict_SetItemString(modules, "cy_ctest", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif /*--- Builtin init code ---*/ - if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ - if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - /*--- Global init code ---*/ - /*--- Variable export code ---*/ - /*--- Function export code ---*/ - /*--- Type init code ---*/ - /*--- Type import code ---*/ - /*--- Variable import code ---*/ - /*--- Function import code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + (void)__Pyx_modinit_type_init_code(); + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif - /* "cy_ctest.pyx":3 - * cimport cy_ctest + /* "cy_ctest.pyx":5 + * #cimport commented_import * * def pyhello(): # <<<<<<<<<<<<<< * cy_ctest.hello() + * print("Compiled with python version %s" % PY_VERSION) */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_8cy_ctest_pyhello, NULL, __pyx_n_s__cy_ctest); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_8cy_ctest_1pyhello, NULL, __pyx_n_s_cy_ctest); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__pyhello, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyhello, __pyx_t_1) < 0) __PYX_ERR(0, 5, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "cy_ctest.pyx":1 - * cimport cy_ctest # <<<<<<<<<<<<<< - * - * def pyhello(): + * from cpython.version cimport PY_VERSION # <<<<<<<<<<<<<< + * cimport cy_ctest + * #cimport commented_import */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); if (__pyx_m) { - __Pyx_AddTraceback("init cy_ctest", __pyx_clineno, __pyx_lineno, __pyx_filename); - Py_DECREF(__pyx_m); __pyx_m = 0; + if (__pyx_d) { + __Pyx_AddTraceback("init cy_ctest", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init cy_ctest"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); - #if PY_MAJOR_VERSION < 3 - return; - #else + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 return __pyx_m; + #else + return; #endif } -/* Runtime support code */ - +/* --- Runtime support code --- */ +/* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; - m = PyImport_ImportModule((char *)modname); + m = PyImport_ImportModule(modname); if (!m) goto end; - p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + p = PyObject_GetAttrString(m, "RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: @@ -602,408 +1507,928 @@ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } -#endif /* CYTHON_REFNANNY */ +#endif -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { - const unsigned char neg_one = (unsigned char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned char" : - "value too large to convert to unsigned char"); - } - return (unsigned char)-1; - } - return (unsigned char)val; - } - return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); } +#endif -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { - const unsigned short neg_one = (unsigned short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned short" : - "value too large to convert to unsigned short"); - } - return (unsigned short)-1; - } - return (unsigned short)val; - } - return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; } +#endif -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { - const unsigned int neg_one = (unsigned int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned int" : - "value too large to convert to unsigned int"); - } - return (unsigned int)-1; - } - return (unsigned int)val; +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; } - return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; } +#endif -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { - const char neg_one = (char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to char" : - "value too large to convert to char"); - } - return (char)-1; +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; } - return (char)val; } - return (char)__Pyx_PyInt_AsLong(x); + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } } - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { - const short neg_one = (short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to short" : - "value too large to convert to short"); - } - return (short)-1; - } - return (short)val; +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; } - return (short)__Pyx_PyInt_AsLong(x); + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; } - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); - } - return (int)-1; +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); } - return (int)val; + return; } - return (int)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { - const signed char neg_one = (signed char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed char" : - "value too large to convert to signed char"); - } - return (signed char)-1; + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; } - return (signed char)val; + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; } - return (signed char)__Pyx_PyInt_AsSignedLong(x); + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); } -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { - const signed short neg_one = (signed short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed short" : - "value too large to convert to signed short"); - } - return (signed short)-1; - } - return (signed short)val; +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif } - return (signed short)__Pyx_PyInt_AsSignedLong(x); + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; } - -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { - const signed int neg_one = (signed int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed int" : - "value too large to convert to signed int"); - } - return (signed int)-1; - } - return (signed int)val; +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } - return (signed int)__Pyx_PyInt_AsSignedLong(x); + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); } -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); +/* Print */ +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 +static PyObject *__Pyx_GetStdout(void) { + PyObject *f = PySys_GetObject((char *)"stdout"); + if (!f) { + PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout"); + } + return f; +} +static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) { + int i; + if (!f) { + if (!(f = __Pyx_GetStdout())) + return -1; + } + Py_INCREF(f); + for (i=0; i < PyTuple_GET_SIZE(arg_tuple); i++) { + PyObject* v; + if (PyFile_SoftSpace(f, 1)) { + if (PyFile_WriteString(" ", f) < 0) + goto error; + } + v = PyTuple_GET_ITEM(arg_tuple, i); + if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0) + goto error; + if (PyString_Check(v)) { + char *s = PyString_AsString(v); + Py_ssize_t len = PyString_Size(v); + if (len > 0) { + switch (s[len-1]) { + case ' ': break; + case '\f': case '\r': case '\n': case '\t': case '\v': + PyFile_SoftSpace(f, 0); + break; + default: break; + } } - return (int)-1; } - return (int)val; } - return (int)__Pyx_PyInt_AsLong(x); + if (newline) { + if (PyFile_WriteString("\n", f) < 0) + goto error; + PyFile_SoftSpace(f, 0); + } + Py_DECREF(f); + return 0; +error: + Py_DECREF(f); + return -1; } - -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { - const unsigned long neg_one = (unsigned long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; +#else +static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { + PyObject* kwargs = 0; + PyObject* result = 0; + PyObject* end_string; + if (unlikely(!__pyx_print)) { + __pyx_print = PyObject_GetAttr(__pyx_b, __pyx_n_s_print); + if (!__pyx_print) + return -1; + } + if (stream) { + kwargs = PyDict_New(); + if (unlikely(!kwargs)) + return -1; + if (unlikely(PyDict_SetItem(kwargs, __pyx_n_s_file, stream) < 0)) + goto bad; + if (!newline) { + end_string = PyUnicode_FromStringAndSize(" ", 1); + if (unlikely(!end_string)) + goto bad; + if (PyDict_SetItem(kwargs, __pyx_n_s_end, end_string) < 0) { + Py_DECREF(end_string); + goto bad; + } + Py_DECREF(end_string); } - return (unsigned long)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; + } else if (!newline) { + if (unlikely(!__pyx_print_kwargs)) { + __pyx_print_kwargs = PyDict_New(); + if (unlikely(!__pyx_print_kwargs)) + return -1; + end_string = PyUnicode_FromStringAndSize(" ", 1); + if (unlikely(!end_string)) + return -1; + if (PyDict_SetItem(__pyx_print_kwargs, __pyx_n_s_end, end_string) < 0) { + Py_DECREF(end_string); + return -1; } - return (unsigned long)PyLong_AsUnsignedLong(x); - } else { - return (unsigned long)PyLong_AsLong(x); + Py_DECREF(end_string); } - } else { - unsigned long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned long)-1; - val = __Pyx_PyInt_AsUnsignedLong(tmp); - Py_DECREF(tmp); - return val; + kwargs = __pyx_print_kwargs; } + result = PyObject_Call(__pyx_print, arg_tuple, kwargs); + if (unlikely(kwargs) && (kwargs != __pyx_print_kwargs)) + Py_DECREF(kwargs); + if (!result) + return -1; + Py_DECREF(result); + return 0; +bad: + if (kwargs != __pyx_print_kwargs) + Py_XDECREF(kwargs); + return -1; } +#endif -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; +/* PrintOne */ +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 +static int __Pyx_PrintOne(PyObject* f, PyObject *o) { + if (!f) { + if (!(f = __Pyx_GetStdout())) + return -1; + } + Py_INCREF(f); + if (PyFile_SoftSpace(f, 0)) { + if (PyFile_WriteString(" ", f) < 0) + goto error; + } + if (PyFile_WriteObject(o, f, Py_PRINT_RAW) < 0) + goto error; + if (PyFile_WriteString("\n", f) < 0) + goto error; + Py_DECREF(f); + return 0; +error: + Py_DECREF(f); + return -1; + /* the line below is just to avoid C compiler + * warnings about unused functions */ + return __Pyx_Print(f, NULL, 0); +} +#else +static int __Pyx_PrintOne(PyObject* stream, PyObject *o) { + int res; + PyObject* arg_tuple = PyTuple_Pack(1, o); + if (unlikely(!arg_tuple)) + return -1; + res = __Pyx_Print(stream, arg_tuple, 1); + Py_DECREF(arg_tuple); + return res; +} +#endif + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return (unsigned PY_LONG_LONG)val; - } else + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); - } else { - return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); } } else { - unsigned PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsUnsignedLongLong(tmp); - Py_DECREF(tmp); - return val; + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); } } -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { - const long neg_one = (long)-1, const_zero = 0; +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 +#if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; } - return (long)val; } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif } - return (long)PyLong_AsUnsignedLong(x); } else { - return (long)PyLong_AsLong(x); +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; } } else { long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (long)-1; - val = __Pyx_PyInt_AsLong(tmp); + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; } -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { - const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 +#if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; } - return (PY_LONG_LONG)val; } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif } - return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); } else { - return (PY_LONG_LONG)PyLong_AsLongLong(x); +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; } } else { - PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsLongLong(tmp); + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; } -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { - const signed long neg_one = (signed long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; } - return (signed long)val; - } else + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} #endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; - } - return (signed long)PyLong_AsUnsignedLong(x); +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; i const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; - } - return (signed PY_LONG_LONG)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; - } - return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { + if (likely(err == exc_type)) return 1; + if (likely(PyExceptionClass_Check(err))) { + if (likely(PyExceptionClass_Check(exc_type))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); + } else if (likely(PyTuple_Check(exc_type))) { + return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type); } else { - return (signed PY_LONG_LONG)PyLong_AsLongLong(x); } - } else { - signed PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (signed PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsSignedLongLong(tmp); - Py_DECREF(tmp); - return val; } + return PyErr_GivenExceptionMatches(err, exc_type); } +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { + assert(PyExceptionClass_Check(exc_type1)); + assert(PyExceptionClass_Check(exc_type2)); + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); + } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); +} +#endif +/* CheckBinaryVersion */ static int __Pyx_check_binary_version(void) { char ctversion[4], rtversion[4]; PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); @@ -1014,86 +2439,12 @@ static int __Pyx_check_binary_version(void) { "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); - #if PY_VERSION_HEX < 0x02050000 - return PyErr_Warn(NULL, message); - #else return PyErr_WarnEx(NULL, message, 1); - #endif } return 0; } -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" - -static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno, - int __pyx_lineno, const char *__pyx_filename) { - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - PyObject *py_globals = 0; - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(__pyx_filename); - #else - py_srcfile = PyUnicode_FromString(__pyx_filename); - #endif - if (!py_srcfile) goto bad; - if (__pyx_clineno) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_globals = PyModule_GetDict(__pyx_m); - if (!py_globals) goto bad; - py_code = PyCode_New( - 0, /*int argcount,*/ - #if PY_MAJOR_VERSION >= 3 - 0, /*int kwonlyargcount,*/ - #endif - 0, /*int nlocals,*/ - 0, /*int stacksize,*/ - 0, /*int flags,*/ - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - __pyx_lineno, /*int firstlineno,*/ - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - if (!py_code) goto bad; - py_frame = PyFrame_New( - PyThreadState_GET(), /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - py_globals, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - py_frame->f_lineno = __pyx_lineno; - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - +/* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 @@ -1104,7 +2455,7 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } - #else /* Python 3+ has unicode identifiers */ + #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); @@ -1119,56 +2470,158 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { #endif if (!*t->p) return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; ++t; } return 0; } -/* Type Conversion Functions */ - +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } - -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; +#endif const char *name = NULL; PyObject *res = NULL; -#if PY_VERSION_HEX < 0x03000000 - if (PyInt_Check(x) || PyLong_Check(x)) +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) #else - if (PyLong_Check(x)) + if (likely(PyLong_Check(x))) #endif - return Py_INCREF(x), x; + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; -#if PY_VERSION_HEX < 0x03000000 + #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; - res = PyNumber_Int(x); + res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; - res = PyNumber_Long(x); + res = m->nb_long(x); } -#else - if (m && m->nb_int) { + #else + if (likely(m && m->nb_int)) { name = "int"; - res = PyNumber_Long(x); + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); } #endif - if (res) { -#if PY_VERSION_HEX < 0x03000000 - if (!PyInt_Check(res) && !PyLong_Check(res)) { + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else - if (!PyLong_Check(res)) { + if (unlikely(!PyLong_CheckExact(res))) { #endif - PyErr_Format(PyExc_TypeError, - "__%s__ returned non-%s (type %.200s)", - name, name, Py_TYPE(res)->tp_name); - Py_DECREF(res); - return NULL; + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { @@ -1177,40 +2630,73 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { } return res; } - static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; - PyObject* x = PyNumber_Index(b); + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } - -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { -#if PY_VERSION_HEX < 0x02050000 - if (ival <= LONG_MAX) - return PyInt_FromLong((long)ival); - else { - unsigned char *bytes = (unsigned char *) &ival; - int one = 1; int little = (int)*(unsigned char*)&one; - return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); - } -#else - return PyInt_FromSize_t(ival); -#endif +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } - -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { - unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); - if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { - return (size_t)-1; - } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to size_t"); - return (size_t)-1; - } - return (size_t)val; +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); } diff --git a/playground/cython/cython_cache/src/cy_cxxtest.cc b/playground/cython/cython_cache/src/cy_cxxtest.cc index a37b1b1798..3259cd812a 100644 --- a/playground/cython/cython_cache/src/cy_cxxtest.cc +++ b/playground/cython/cython_cache/src/cy_cxxtest.cc @@ -1,16 +1,19 @@ -/* Generated by Cython 0.15.1 on Tue May 29 23:42:45 2012 */ +/* Generated by Cython 0.29 */ #define PY_SSIZE_T_CLEAN #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. #else - -#include /* For offsetof */ +#define CYTHON_ABI "0_29" +#define CYTHON_HEX_VERSION 0x001D00F0 +#define CYTHON_FUTURE_DIVISION 0 +#include #ifndef offsetof -#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif - #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall @@ -22,122 +25,508 @@ #define __fastcall #endif #endif - #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif - +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif - -#if PY_VERSION_HEX < 0x02040000 - #define METH_COEXIST 0 - #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type) - #define PyDict_Contains(d,o) PySequence_Contains(d,o) +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL #endif - -#if PY_VERSION_HEX < 0x02050000 - typedef int Py_ssize_t; - #define PY_SSIZE_T_MAX INT_MAX - #define PY_SSIZE_T_MIN INT_MIN - #define PY_FORMAT_SIZE_T "" - #define PyInt_FromSsize_t(z) PyInt_FromLong(z) - #define PyInt_AsSsize_t(o) __Pyx_PyInt_AsInt(o) - #define PyNumber_Index(o) PyNumber_Int(o) - #define PyIndex_Check(o) PyNumber_Check(o) - #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #ifndef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif #endif -#if PY_VERSION_HEX < 0x02060000 - #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) - #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) - #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) - #define PyVarObject_HEAD_INIT(type, size) \ - PyObject_HEAD_INIT(type) size, - #define PyType_Modified(t) - - typedef struct { - void *buf; - PyObject *obj; - Py_ssize_t len; - Py_ssize_t itemsize; - int readonly; - int ndim; - char *format; - Py_ssize_t *shape; - Py_ssize_t *strides; - Py_ssize_t *suboffsets; - void *internal; - } Py_buffer; - - #define PyBUF_SIMPLE 0 - #define PyBUF_WRITABLE 0x0001 - #define PyBUF_FORMAT 0x0004 - #define PyBUF_ND 0x0008 - #define PyBUF_STRIDES (0x0010 | PyBUF_ND) - #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) - #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) - #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) - #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) - +#ifndef __cplusplus + #error "Cython files generated with the C++ option must be compiled with a C++ compiler." +#endif +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #else + #define CYTHON_INLINE inline + #endif #endif +template +void __Pyx_call_destructor(T& x) { + x.~T(); +} +template +class __Pyx_FakeReference { + public: + __Pyx_FakeReference() : ptr(NULL) { } + __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } + T *operator->() { return ptr; } + T *operator&() { return ptr; } + operator T&() { return *ptr; } + template bool operator ==(U other) { return *ptr == other; } + template bool operator !=(U other) { return *ptr != other; } + private: + T *ptr; +}; +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type #endif - -#if PY_MAJOR_VERSION >= 3 +#ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif - -#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif - +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_USE_DICT_VERSIONS +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ + } +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; // PyThread_create_key reports success always +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif // TSS (Thread Specific Storage) API +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact + #define PyObject_Unicode PyObject_Str #endif - -#if PY_VERSION_HEX < 0x02060000 - #define PyBytesObject PyStringObject - #define PyBytes_Type PyString_Type - #define PyBytes_Check PyString_Check - #define PyBytes_CheckExact PyString_CheckExact - #define PyBytes_FromString PyString_FromString - #define PyBytes_FromStringAndSize PyString_FromStringAndSize - #define PyBytes_FromFormat PyString_FromFormat - #define PyBytes_DecodeEscape PyString_DecodeEscape - #define PyBytes_AsString PyString_AsString - #define PyBytes_AsStringAndSize PyString_AsStringAndSize - #define PyBytes_Size PyString_Size - #define PyBytes_AS_STRING PyString_AS_STRING - #define PyBytes_GET_SIZE PyString_GET_SIZE - #define PyBytes_Repr PyString_Repr - #define PyBytes_Concat PyString_Concat - #define PyBytes_ConcatAndDel PyString_ConcatAndDel -#endif - -#if PY_VERSION_HEX < 0x02060000 - #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) - #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif - -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) - +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type @@ -153,13 +542,17 @@ #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long #endif - #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif - -#if PY_VERSION_HEX < 0x03020000 +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong @@ -167,57 +560,54 @@ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif - - #if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func)) #else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif - -#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) - #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) - #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif #else - #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) - #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) + #define __Pyx_PyType_AsAsync(obj) NULL #endif - -#if PY_MAJOR_VERSION >= 3 - #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; #endif -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) #else - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} #endif - -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_NAMESTR(n) ((char *)(n)) - #define __Pyx_DOCSTR(n) ((char *)(n)) +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc #else - #define __Pyx_NAMESTR(n) (n) - #define __Pyx_DOCSTR(n) (n) +#define __Pyx_truncl truncl #endif + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" @@ -226,88 +616,210 @@ #endif #endif -#if defined(WIN32) || defined(MS_WINDOWS) -#define _USE_MATH_DEFINES -#endif -#include #define __PYX_HAVE__cy_cxxtest #define __PYX_HAVE_API__cy_cxxtest +/* Early includes */ #include "lib.h" #ifdef _OPENMP #include #endif /* _OPENMP */ -#ifdef PYREX_WITHOUT_ASSERTIONS +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif - -/* inline attribute */ -#ifndef CYTHON_INLINE - #if defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif - -/* unused attribute */ -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || defined(__INTEL_COMPILER) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif - -typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ - - -/* Type Conversion Predeclarations */ - -#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) -#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) - -#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None) -#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); - +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); - +#if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif -#ifdef __GNUC__ - /* Test for GCC > 2.95 */ - #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) - #else /* __GNUC__ > 2 ... */ - #define likely(x) (x) - #define unlikely(x) (x) - #endif /* __GNUC__ > 2 ... */ -#else /* __GNUC__ */ +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ - -static PyObject *__pyx_m; +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; @@ -320,10 +832,11 @@ static const char *__pyx_f[] = { /*--- Type declarations ---*/ +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif - #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); @@ -334,10 +847,23 @@ static const char *__pyx_f[] = { void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/ + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; - #define __Pyx_RefNannySetupContext(name) __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) - #define __Pyx_RefNannyFinishContext() __Pyx_RefNanny->FinishContext(&__pyx_refnanny) +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) @@ -348,7 +874,7 @@ static const char *__pyx_f[] = { #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name) + #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) @@ -358,137 +884,275 @@ static const char *__pyx_f[] = { #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) -#endif /* CYTHON_REFNANNY */ - -static int __Pyx_Print(PyObject*, PyObject *, int); /*proto*/ -#if PY_MAJOR_VERSION >= 3 -static PyObject* __pyx_print = 0; -static PyObject* __pyx_print_kwargs = 0; +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() #endif -static int __Pyx_PrintOne(PyObject* stream, PyObject *o); /*proto*/ - -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); - -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); - -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); - -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); - -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); - -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); +/* WriteUnraisableException.proto */ +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* Print.proto */ +static int __Pyx_Print(PyObject*, PyObject *, int); +#if CYTHON_COMPILING_IN_PYPY || PY_MAJOR_VERSION >= 3 +static PyObject* __pyx_print = 0; +static PyObject* __pyx_print_kwargs = 0; +#endif -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); +/* PrintOne.proto */ +static int __Pyx_PrintOne(PyObject* stream, PyObject *o); -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); -static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); -static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ -static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); -static void __Pyx_WriteUnraisable(const char *name, int clineno, - int lineno, const char *filename); /*proto*/ +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) +/* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); -static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); /*proto*/ +/* FunctionExport.proto */ +static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); -static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno, - int __pyx_lineno, const char *__pyx_filename); /*proto*/ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ +/* Module declarations from 'cpython.version' */ /* Module declarations from 'cy_cxxtest' */ -__PYX_EXTERN_C DL_EXPORT(void) cy_hello(void); /*proto*/ +__PYX_EXTERN_C void cy_hello(void); /*proto*/ #define __Pyx_MODULE_NAME "cy_cxxtest" +extern int __pyx_module_is_main_cy_cxxtest; int __pyx_module_is_main_cy_cxxtest = 0; /* Implementation of 'cy_cxxtest' */ -static char __pyx_k_1[] = "hello cython-world!"; -static char __pyx_k__pyhello[] = "pyhello"; -static char __pyx_k____main__[] = "__main__"; -static char __pyx_k____test__[] = "__test__"; -static char __pyx_k__cy_cxxtest[] = "cy_cxxtest"; -static PyObject *__pyx_kp_s_1; -static PyObject *__pyx_n_s____main__; -static PyObject *__pyx_n_s____test__; -static PyObject *__pyx_n_s__cy_cxxtest; -static PyObject *__pyx_n_s__pyhello; - -/* "cy_cxxtest.pyx":3 +static const char __pyx_k_end[] = "end"; +static const char __pyx_k_file[] = "file"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_print[] = "print"; +static const char __pyx_k_pyhello[] = "pyhello"; +static const char __pyx_k_cy_cxxtest[] = "cy_cxxtest"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_hello_cython_world[] = "hello cython-world!"; +static const char __pyx_k_Compiled_with_python_version_s[] = "Compiled with python version %s"; +static const char __pyx_k_home_romain_dev_waf_wrapper_waf[] = "/home/romain/dev/waf-wrapper/waf/playground/cython/src/cy_cxxtest.pyx"; +static PyObject *__pyx_kp_s_Compiled_with_python_version_s; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_cy_cxxtest; +static PyObject *__pyx_n_s_end; +static PyObject *__pyx_n_s_file; +static PyObject *__pyx_kp_s_hello_cython_world; +static PyObject *__pyx_kp_s_home_romain_dev_waf_wrapper_waf; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_n_s_print; +static PyObject *__pyx_n_s_pyhello; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_pf_10cy_cxxtest_pyhello(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_codeobj_; +/* Late includes */ + +/* "cy_cxxtest.pyx":4 * cimport cy_cxxtest * * def pyhello(): # <<<<<<<<<<<<<< * cy_cxxtest.hello() - * + * print("Compiled with python version %s" % PY_VERSION) */ -static PyObject *__pyx_pf_10cy_cxxtest_pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyMethodDef __pyx_mdef_10cy_cxxtest_pyhello = {__Pyx_NAMESTR("pyhello"), (PyCFunction)__pyx_pf_10cy_cxxtest_pyhello, METH_NOARGS, __Pyx_DOCSTR(0)}; -static PyObject *__pyx_pf_10cy_cxxtest_pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; +/* Python wrapper */ +static PyObject *__pyx_pw_10cy_cxxtest_1pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_10cy_cxxtest_1pyhello = {"pyhello", (PyCFunction)__pyx_pw_10cy_cxxtest_1pyhello, METH_NOARGS, 0}; +static PyObject *__pyx_pw_10cy_cxxtest_1pyhello(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("pyhello"); - __pyx_self = __pyx_self; + __Pyx_RefNannySetupContext("pyhello (wrapper)", 0); + __pyx_r = __pyx_pf_10cy_cxxtest_pyhello(__pyx_self); - /* "cy_cxxtest.pyx":4 + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_10cy_cxxtest_pyhello(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("pyhello", 0); + + /* "cy_cxxtest.pyx":5 * * def pyhello(): * cy_cxxtest.hello() # <<<<<<<<<<<<<< + * print("Compiled with python version %s" % PY_VERSION) * - * cdef public api void cy_hello(): */ hello(); + /* "cy_cxxtest.pyx":6 + * def pyhello(): + * cy_cxxtest.hello() + * print("Compiled with python version %s" % PY_VERSION) # <<<<<<<<<<<<<< + * + * cdef public api void cy_hello(): + */ + __pyx_t_1 = __Pyx_PyObject_FromString(PY_VERSION); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyString_Format(__pyx_kp_s_Compiled_with_python_version_s, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__Pyx_PrintOne(0, __pyx_t_2) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "cy_cxxtest.pyx":4 + * cimport cy_cxxtest + * + * def pyhello(): # <<<<<<<<<<<<<< + * cy_cxxtest.hello() + * print("Compiled with python version %s" % PY_VERSION) + */ + + /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("cy_cxxtest.pyhello", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } -/* "cy_cxxtest.pyx":6 - * cy_cxxtest.hello() +/* "cy_cxxtest.pyx":8 + * print("Compiled with python version %s" % PY_VERSION) * * cdef public api void cy_hello(): # <<<<<<<<<<<<<< * print("hello cython-world!") - * */ void cy_hello(void) { __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("cy_hello"); + __Pyx_RefNannySetupContext("cy_hello", 0); - /* "cy_cxxtest.pyx":7 + /* "cy_cxxtest.pyx":9 * * cdef public api void cy_hello(): * print("hello cython-world!") # <<<<<<<<<<<<<< + */ + if (__Pyx_PrintOne(0, __pyx_kp_s_hello_cython_world) < 0) __PYX_ERR(0, 9, __pyx_L1_error) + + /* "cy_cxxtest.pyx":8 + * print("Compiled with python version %s" % PY_VERSION) * + * cdef public api void cy_hello(): # <<<<<<<<<<<<<< + * print("hello cython-world!") */ - if (__Pyx_PrintOne(0, ((PyObject *)__pyx_kp_s_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 7; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + /* function exit code */ goto __pyx_L0; __pyx_L1_error:; - __Pyx_WriteUnraisable("cy_cxxtest.cy_hello", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_WriteUnraisable("cy_cxxtest.cy_hello", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); __pyx_L0:; __Pyx_RefNannyFinishContext(); } @@ -498,70 +1162,294 @@ static PyMethodDef __pyx_methods[] = { }; #if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_cy_cxxtest(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_cy_cxxtest}, + {0, NULL} +}; +#endif + static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, - __Pyx_NAMESTR("cy_cxxtest"), + "cy_cxxtest", 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else -1, /* m_size */ + #endif __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else NULL, /* m_reload */ + #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, - {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, - {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, - {&__pyx_n_s__cy_cxxtest, __pyx_k__cy_cxxtest, sizeof(__pyx_k__cy_cxxtest), 0, 0, 1, 1}, - {&__pyx_n_s__pyhello, __pyx_k__pyhello, sizeof(__pyx_k__pyhello), 0, 0, 1, 1}, + {&__pyx_kp_s_Compiled_with_python_version_s, __pyx_k_Compiled_with_python_version_s, sizeof(__pyx_k_Compiled_with_python_version_s), 0, 0, 1, 0}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cy_cxxtest, __pyx_k_cy_cxxtest, sizeof(__pyx_k_cy_cxxtest), 0, 0, 1, 1}, + {&__pyx_n_s_end, __pyx_k_end, sizeof(__pyx_k_end), 0, 0, 1, 1}, + {&__pyx_n_s_file, __pyx_k_file, sizeof(__pyx_k_file), 0, 0, 1, 1}, + {&__pyx_kp_s_hello_cython_world, __pyx_k_hello_cython_world, sizeof(__pyx_k_hello_cython_world), 0, 0, 1, 0}, + {&__pyx_kp_s_home_romain_dev_waf_wrapper_waf, __pyx_k_home_romain_dev_waf_wrapper_waf, sizeof(__pyx_k_home_romain_dev_waf_wrapper_waf), 0, 0, 1, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_n_s_print, __pyx_k_print, sizeof(__pyx_k_print), 0, 0, 1, 1}, + {&__pyx_n_s_pyhello, __pyx_k_pyhello, sizeof(__pyx_k_pyhello), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; -static int __Pyx_InitCachedBuiltins(void) { +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + return 0; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "cy_cxxtest.pyx":4 + * cimport cy_cxxtest + * + * def pyhello(): # <<<<<<<<<<<<<< + * cy_cxxtest.hello() + * print("Compiled with python version %s" % PY_VERSION) + */ + __pyx_codeobj_ = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_romain_dev_waf_wrapper_waf, __pyx_n_s_pyhello, 4, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj_)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; + __pyx_L1_error:; + return -1; } -static int __Pyx_InitCachedConstants(void) { +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants"); + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ __Pyx_RefNannyFinishContext(); return 0; } -static int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + if (__Pyx_ExportFunction("cy_hello", (void (*)(void))cy_hello, "void (void)") < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); return -1; } +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + #if PY_MAJOR_VERSION < 3 -PyMODINIT_FUNC initcy_cxxtest(void); /*proto*/ -PyMODINIT_FUNC initcy_cxxtest(void) +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC void +#else +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#else +#ifdef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyObject * #else -PyMODINIT_FUNC PyInit_cy_cxxtest(void); /*proto*/ -PyMODINIT_FUNC PyInit_cy_cxxtest(void) +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initcy_cxxtest(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initcy_cxxtest(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_cy_cxxtest(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_cy_cxxtest(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + result = PyDict_SetItemString(moddict, to_name, value); + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_cy_cxxtest(PyObject *__pyx_pyinit_module) +#endif #endif { PyObject *__pyx_t_1 = NULL; __Pyx_RefNannyDeclarations - #if CYTHON_REFNANNY - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); - if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'cy_cxxtest' has already been imported. Re-initialisation is not supported."); + return -1; } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_cy_cxxtest(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif - __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_cy_cxxtest(void)"); - if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - #ifdef __pyx_binding_PyCFunctionType_USED - if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ @@ -571,85 +1459,113 @@ PyMODINIT_FUNC PyInit_cy_cxxtest(void) #endif #endif /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4(__Pyx_NAMESTR("cy_cxxtest"), __pyx_methods, 0, 0, PYTHON_API_VERSION); + __pyx_m = Py_InitModule4("cy_cxxtest", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif - if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - #if PY_MAJOR_VERSION < 3 - Py_INCREF(__pyx_m); + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif - __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); - if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ - if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif if (__pyx_module_is_main_cy_cxxtest) { - if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "cy_cxxtest")) { + if (unlikely(PyDict_SetItemString(modules, "cy_cxxtest", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } } + #endif /*--- Builtin init code ---*/ - if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ - if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - /*--- Global init code ---*/ - /*--- Variable export code ---*/ - /*--- Function export code ---*/ - if (__Pyx_ExportFunction("cy_hello", (void (*)(void))cy_hello, "void (void)") < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - /*--- Type init code ---*/ - /*--- Type import code ---*/ - /*--- Variable import code ---*/ - /*--- Function import code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + if (unlikely(__Pyx_modinit_function_export_code() != 0)) goto __pyx_L1_error; + (void)__Pyx_modinit_type_init_code(); + (void)__Pyx_modinit_type_import_code(); + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif - /* "cy_cxxtest.pyx":3 + /* "cy_cxxtest.pyx":4 * cimport cy_cxxtest * * def pyhello(): # <<<<<<<<<<<<<< * cy_cxxtest.hello() - * + * print("Compiled with python version %s" % PY_VERSION) */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_10cy_cxxtest_pyhello, NULL, __pyx_n_s__cy_cxxtest); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_10cy_cxxtest_1pyhello, NULL, __pyx_n_s_cy_cxxtest); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__pyhello, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 3; __pyx_clineno = __LINE__; goto __pyx_L1_error;} + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyhello, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "cy_cxxtest.pyx":1 - * cimport cy_cxxtest # <<<<<<<<<<<<<< + * from cpython.version cimport PY_VERSION # <<<<<<<<<<<<<< + * cimport cy_cxxtest * - * def pyhello(): */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); if (__pyx_m) { - __Pyx_AddTraceback("init cy_cxxtest", __pyx_clineno, __pyx_lineno, __pyx_filename); - Py_DECREF(__pyx_m); __pyx_m = 0; + if (__pyx_d) { + __Pyx_AddTraceback("init cy_cxxtest", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init cy_cxxtest"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); - #if PY_MAJOR_VERSION < 3 - return; - #else + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 return __pyx_m; + #else + return; #endif } -/* Runtime support code */ - +/* --- Runtime support code --- */ +/* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; - m = PyImport_ImportModule((char *)modname); + m = PyImport_ImportModule(modname); if (!m) goto end; - p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + p = PyObject_GetAttrString(m, "RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: @@ -657,9 +1573,297 @@ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } -#endif /* CYTHON_REFNANNY */ +#endif +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* WriteUnraisableException */ +static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* Print */ +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 static PyObject *__Pyx_GetStdout(void) { PyObject *f = PySys_GetObject((char *)"stdout"); if (!f) { @@ -667,48 +1871,54 @@ static PyObject *__Pyx_GetStdout(void) { } return f; } - static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) { - PyObject* v; int i; - if (!f) { if (!(f = __Pyx_GetStdout())) return -1; } + Py_INCREF(f); for (i=0; i < PyTuple_GET_SIZE(arg_tuple); i++) { + PyObject* v; if (PyFile_SoftSpace(f, 1)) { if (PyFile_WriteString(" ", f) < 0) - return -1; + goto error; } v = PyTuple_GET_ITEM(arg_tuple, i); if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0) - return -1; + goto error; if (PyString_Check(v)) { char *s = PyString_AsString(v); Py_ssize_t len = PyString_Size(v); - if (len > 0 && - isspace(Py_CHARMASK(s[len-1])) && - s[len-1] != ' ') - PyFile_SoftSpace(f, 0); + if (len > 0) { + switch (s[len-1]) { + case ' ': break; + case '\f': case '\r': case '\n': case '\t': case '\v': + PyFile_SoftSpace(f, 0); + break; + default: break; + } + } } } if (newline) { if (PyFile_WriteString("\n", f) < 0) - return -1; + goto error; PyFile_SoftSpace(f, 0); } + Py_DECREF(f); return 0; +error: + Py_DECREF(f); + return -1; } - -#else /* Python 3 has a print function */ - +#else static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { PyObject* kwargs = 0; PyObject* result = 0; PyObject* end_string; if (unlikely(!__pyx_print)) { - __pyx_print = __Pyx_GetAttrString(__pyx_b, "print"); + __pyx_print = PyObject_GetAttr(__pyx_b, __pyx_n_s_print); if (!__pyx_print) return -1; } @@ -716,13 +1926,13 @@ static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { kwargs = PyDict_New(); if (unlikely(!kwargs)) return -1; - if (unlikely(PyDict_SetItemString(kwargs, "file", stream) < 0)) + if (unlikely(PyDict_SetItem(kwargs, __pyx_n_s_file, stream) < 0)) goto bad; if (!newline) { end_string = PyUnicode_FromStringAndSize(" ", 1); if (unlikely(!end_string)) goto bad; - if (PyDict_SetItemString(kwargs, "end", end_string) < 0) { + if (PyDict_SetItem(kwargs, __pyx_n_s_end, end_string) < 0) { Py_DECREF(end_string); goto bad; } @@ -736,7 +1946,7 @@ static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { end_string = PyUnicode_FromStringAndSize(" ", 1); if (unlikely(!end_string)) return -1; - if (PyDict_SetItemString(__pyx_print_kwargs, "end", end_string) < 0) { + if (PyDict_SetItem(__pyx_print_kwargs, __pyx_n_s_end, end_string) < 0) { Py_DECREF(end_string); return -1; } @@ -756,492 +1966,577 @@ static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) { Py_XDECREF(kwargs); return -1; } - #endif -#if PY_MAJOR_VERSION < 3 - +/* PrintOne */ +#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION < 3 static int __Pyx_PrintOne(PyObject* f, PyObject *o) { if (!f) { if (!(f = __Pyx_GetStdout())) return -1; } + Py_INCREF(f); if (PyFile_SoftSpace(f, 0)) { if (PyFile_WriteString(" ", f) < 0) - return -1; + goto error; } if (PyFile_WriteObject(o, f, Py_PRINT_RAW) < 0) - return -1; + goto error; if (PyFile_WriteString("\n", f) < 0) - return -1; + goto error; + Py_DECREF(f); return 0; - /* the line below is just to avoid compiler - * compiler warnings about unused functions */ +error: + Py_DECREF(f); + return -1; + /* the line below is just to avoid C compiler + * warnings about unused functions */ return __Pyx_Print(f, NULL, 0); } - -#else /* Python 3 has a print function */ - +#else static int __Pyx_PrintOne(PyObject* stream, PyObject *o) { int res; - PyObject* arg_tuple = PyTuple_New(1); + PyObject* arg_tuple = PyTuple_Pack(1, o); if (unlikely(!arg_tuple)) return -1; - Py_INCREF(o); - PyTuple_SET_ITEM(arg_tuple, 0, o); res = __Pyx_Print(stream, arg_tuple, 1); Py_DECREF(arg_tuple); return res; } +#endif -#endif - -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { - const unsigned char neg_one = (unsigned char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned char" : - "value too large to convert to unsigned char"); - } - return (unsigned char)-1; - } - return (unsigned char)val; - } - return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { - const unsigned short neg_one = (unsigned short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned short" : - "value too large to convert to unsigned short"); - } - return (unsigned short)-1; - } - return (unsigned short)val; - } - return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { - const unsigned int neg_one = (unsigned int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned int" : - "value too large to convert to unsigned int"); - } - return (unsigned int)-1; - } - return (unsigned int)val; - } - return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { - const char neg_one = (char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to char" : - "value too large to convert to char"); - } - return (char)-1; - } - return (char)val; - } - return (char)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { - const short neg_one = (short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to short" : - "value too large to convert to short"); - } - return (short)-1; - } - return (short)val; - } - return (short)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); - } - return (int)-1; - } - return (int)val; - } - return (int)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { - const signed char neg_one = (signed char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed char" : - "value too large to convert to signed char"); - } - return (signed char)-1; - } - return (signed char)val; - } - return (signed char)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { - const signed short neg_one = (signed short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed short" : - "value too large to convert to signed short"); - } - return (signed short)-1; - } - return (signed short)val; - } - return (signed short)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { - const signed int neg_one = (signed int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed int" : - "value too large to convert to signed int"); - } - return (signed int)-1; - } - return (signed int)val; - } - return (signed int)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); - } - return (int)-1; - } - return (int)val; - } - return (int)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { - const unsigned long neg_one = (unsigned long)-1, const_zero = 0; +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; - } - return (unsigned long)val; - } else + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; - } - return (unsigned long)PyLong_AsUnsignedLong(x); - } else { - return (unsigned long)PyLong_AsLong(x); } } else { - unsigned long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned long)-1; - val = __Pyx_PyInt_AsUnsignedLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return (unsigned PY_LONG_LONG)val; - } else + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); #endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); - } else { - return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x); } - } else { - unsigned PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsUnsignedLongLong(tmp); - Py_DECREF(tmp); - return val; + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); } } -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { - const long neg_one = (long)-1, const_zero = 0; +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 +#if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; } - return (long)val; } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif } - return (long)PyLong_AsUnsignedLong(x); } else { - return (long)PyLong_AsLong(x); +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; } } else { long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (long)-1; - val = __Pyx_PyInt_AsLong(tmp); + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; } -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { - const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 +#if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; } - return (PY_LONG_LONG)val; } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif } - return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); } else { - return (PY_LONG_LONG)PyLong_AsLongLong(x); +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; } } else { - PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsLongLong(tmp); + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; } -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { - const signed long neg_one = (signed long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; - } - return (signed long)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; - } - return (signed long)PyLong_AsUnsignedLong(x); - } else { - return (signed long)PyLong_AsLong(x); +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; } - } else { - signed long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (signed long)-1; - val = __Pyx_PyInt_AsSignedLong(tmp); - Py_DECREF(tmp); - return val; + return 0; } + return __Pyx_InBases(a, b); } - -static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { - const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; } - return (signed PY_LONG_LONG)val; - } else + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} #endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; - } - return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x); +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} - -static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { - PyThreadState *tstate = PyThreadState_GET(); - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { + if (likely(err == exc_type)) return 1; + if (likely(PyExceptionClass_Check(err))) { + if (likely(PyExceptionClass_Check(exc_type))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); + } else if (likely(PyTuple_Check(exc_type))) { + return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type); + } else { + } + } + return PyErr_GivenExceptionMatches(err, exc_type); } - - -static void __Pyx_WriteUnraisable(const char *name, int clineno, - int lineno, const char *filename) { - PyObject *old_exc, *old_val, *old_tb; - PyObject *ctx; - __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); - #if PY_MAJOR_VERSION < 3 - ctx = PyString_FromString(name); - #else - ctx = PyUnicode_FromString(name); - #endif - __Pyx_ErrRestore(old_exc, old_val, old_tb); - if (!ctx) { - PyErr_WriteUnraisable(Py_None); - } else { - PyErr_WriteUnraisable(ctx); - Py_DECREF(ctx); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { + assert(PyExceptionClass_Check(exc_type1)); + assert(PyExceptionClass_Check(exc_type2)); + if (likely(err == exc_type1 || err == exc_type2)) return 1; + if (likely(PyExceptionClass_Check(err))) { + return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); } + return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); } +#endif +/* CheckBinaryVersion */ static int __Pyx_check_binary_version(void) { char ctversion[4], rtversion[4]; PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); @@ -1252,15 +2547,12 @@ static int __Pyx_check_binary_version(void) { "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); - #if PY_VERSION_HEX < 0x02050000 - return PyErr_Warn(NULL, message); - #else return PyErr_WarnEx(NULL, message, 1); - #endif } return 0; } +/* FunctionExport */ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *sig) { PyObject *d = 0; PyObject *cobj = 0; @@ -1268,7 +2560,6 @@ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *s void (*fp)(void); void *p; } tmp; - d = PyObject_GetAttrString(__pyx_m, (char *)"__pyx_capi__"); if (!d) { PyErr_Clear(); @@ -1280,7 +2571,7 @@ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *s goto bad; } tmp.fp = f; -#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0) +#if PY_VERSION_HEX >= 0x02070000 cobj = PyCapsule_New(tmp.p, sig, 0); #else cobj = PyCObject_FromVoidPtrAndDesc(tmp.p, (void *)sig, 0); @@ -1298,77 +2589,7 @@ static int __Pyx_ExportFunction(const char *name, void (*f)(void), const char *s return -1; } -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" - -static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno, - int __pyx_lineno, const char *__pyx_filename) { - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - PyObject *py_globals = 0; - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(__pyx_filename); - #else - py_srcfile = PyUnicode_FromString(__pyx_filename); - #endif - if (!py_srcfile) goto bad; - if (__pyx_clineno) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_globals = PyModule_GetDict(__pyx_m); - if (!py_globals) goto bad; - py_code = PyCode_New( - 0, /*int argcount,*/ - #if PY_MAJOR_VERSION >= 3 - 0, /*int kwonlyargcount,*/ - #endif - 0, /*int nlocals,*/ - 0, /*int stacksize,*/ - 0, /*int flags,*/ - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - __pyx_lineno, /*int firstlineno,*/ - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - if (!py_code) goto bad; - py_frame = PyFrame_New( - PyThreadState_GET(), /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - py_globals, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - py_frame->f_lineno = __pyx_lineno; - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - +/* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 @@ -1379,7 +2600,7 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } - #else /* Python 3+ has unicode identifiers */ + #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); @@ -1394,56 +2615,158 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { #endif if (!*t->p) return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; ++t; } return 0; } -/* Type Conversion Functions */ - +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } - -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; +#endif const char *name = NULL; PyObject *res = NULL; -#if PY_VERSION_HEX < 0x03000000 - if (PyInt_Check(x) || PyLong_Check(x)) +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) #else - if (PyLong_Check(x)) + if (likely(PyLong_Check(x))) #endif - return Py_INCREF(x), x; + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; -#if PY_VERSION_HEX < 0x03000000 + #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; - res = PyNumber_Int(x); + res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; - res = PyNumber_Long(x); + res = m->nb_long(x); } -#else - if (m && m->nb_int) { + #else + if (likely(m && m->nb_int)) { name = "int"; - res = PyNumber_Long(x); + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); } #endif - if (res) { -#if PY_VERSION_HEX < 0x03000000 - if (!PyInt_Check(res) && !PyLong_Check(res)) { + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else - if (!PyLong_Check(res)) { + if (unlikely(!PyLong_CheckExact(res))) { #endif - PyErr_Format(PyExc_TypeError, - "__%s__ returned non-%s (type %.200s)", - name, name, Py_TYPE(res)->tp_name); - Py_DECREF(res); - return NULL; + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { @@ -1452,40 +2775,73 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { } return res; } - static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; - PyObject* x = PyNumber_Index(b); + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } - -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { -#if PY_VERSION_HEX < 0x02050000 - if (ival <= LONG_MAX) - return PyInt_FromLong((long)ival); - else { - unsigned char *bytes = (unsigned char *) &ival; - int one = 1; int little = (int)*(unsigned char*)&one; - return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); - } -#else - return PyInt_FromSize_t(ival); -#endif +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } - -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { - unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); - if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { - return (size_t)-1; - } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to size_t"); - return (size_t)-1; - } - return (size_t)val; +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); } diff --git a/playground/cython/cython_cache/src/cy_cxxtest.h b/playground/cython/cython_cache/src/cy_cxxtest.h index 981a6c7d42..af40fd5905 100644 --- a/playground/cython/cython_cache/src/cy_cxxtest.h +++ b/playground/cython/cython_cache/src/cy_cxxtest.h @@ -1,3 +1,5 @@ +/* Generated by Cython 0.29 */ + #ifndef __PYX_HAVE__cy_cxxtest #define __PYX_HAVE__cy_cxxtest @@ -12,10 +14,17 @@ #endif #endif -__PYX_EXTERN_C DL_IMPORT(void) cy_hello(void); +#ifndef DL_IMPORT + #define DL_IMPORT(_T) _T +#endif + +__PYX_EXTERN_C void cy_hello(void); #endif /* !__PYX_HAVE_API__cy_cxxtest */ +/* WARNING: the interface of the module init function changed in CPython 3.5. */ +/* It now returns a PyModuleDef instance instead of a PyModule instance. */ + #if PY_MAJOR_VERSION < 3 PyMODINIT_FUNC initcy_cxxtest(void); #else diff --git a/playground/cython/cython_cache/src/cy_cxxtest_api.h b/playground/cython/cython_cache/src/cy_cxxtest_api.h index 3f7f2c5bde..871d67313f 100644 --- a/playground/cython/cython_cache/src/cy_cxxtest_api.h +++ b/playground/cython/cython_cache/src/cy_cxxtest_api.h @@ -1,31 +1,18 @@ +/* Generated by Cython 0.29 */ + #ifndef __PYX_HAVE_API__cy_cxxtest #define __PYX_HAVE_API__cy_cxxtest #include "Python.h" #include "cy_cxxtest.h" -static void (*__pyx_f_10cy_cxxtest_cy_hello)(void) = 0; -#define cy_hello __pyx_f_10cy_cxxtest_cy_hello - -#ifndef __PYX_HAVE_RT_ImportModule -#define __PYX_HAVE_RT_ImportModule -static PyObject *__Pyx_ImportModule(const char *name) { - PyObject *py_name = 0; - PyObject *py_module = 0; - - #if PY_MAJOR_VERSION < 3 - py_name = PyString_FromString(name); - #else - py_name = PyUnicode_FromString(name); - #endif - if (!py_name) - goto bad; - py_module = PyImport_Import(py_name); - Py_DECREF(py_name); - return py_module; -bad: - Py_XDECREF(py_name); - return 0; -} +static void (*__pyx_api_f_10cy_cxxtest_cy_hello)(void) = 0; +#define cy_hello __pyx_api_f_10cy_cxxtest_cy_hello +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif #endif #ifndef __PYX_HAVE_RT_ImportFunction @@ -37,21 +24,20 @@ static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (** void (*fp)(void); void *p; } tmp; - d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); if (!d) goto bad; cobj = PyDict_GetItemString(d, funcname); if (!cobj) { PyErr_Format(PyExc_ImportError, - "%s does not export expected C function %s", + "%.200s does not export expected C function %.200s", PyModule_GetName(module), funcname); goto bad; } -#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0) +#if PY_VERSION_HEX >= 0x02070000 if (!PyCapsule_IsValid(cobj, sig)) { PyErr_Format(PyExc_TypeError, - "C function %s.%s has wrong signature (expected %s, got %s)", + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); goto bad; } @@ -65,7 +51,7 @@ static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (** while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } if (*s1 != *s2) { PyErr_Format(PyExc_TypeError, - "C function %s.%s has wrong signature (expected %s, got %s)", + "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", PyModule_GetName(module), funcname, sig, desc); goto bad; } @@ -82,11 +68,12 @@ static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (** } #endif + static int import_cy_cxxtest(void) { PyObject *module = 0; - module = __Pyx_ImportModule("cy_cxxtest"); + module = PyImport_ImportModule("cy_cxxtest"); if (!module) goto bad; - if (__Pyx_ImportFunction(module, "cy_hello", (void (**)(void))&__pyx_f_10cy_cxxtest_cy_hello, "void (void)") < 0) goto bad; + if (__Pyx_ImportFunction(module, "cy_hello", (void (**)(void))&__pyx_api_f_10cy_cxxtest_cy_hello, "void (void)") < 0) goto bad; Py_DECREF(module); module = 0; return 0; bad: diff --git a/playground/cython/src/cy_ctest.pyx b/playground/cython/src/cy_ctest.pyx index ddf4b0be98..2fc1f96cc8 100644 --- a/playground/cython/src/cy_ctest.pyx +++ b/playground/cython/src/cy_ctest.pyx @@ -1,4 +1,7 @@ +from cpython.version cimport PY_VERSION cimport cy_ctest +#cimport commented_import def pyhello(): cy_ctest.hello() + print("Compiled with python version %s" % PY_VERSION) diff --git a/playground/cython/src/cy_cxxtest.pyx b/playground/cython/src/cy_cxxtest.pyx index 1102b97b24..85edefc884 100644 --- a/playground/cython/src/cy_cxxtest.pyx +++ b/playground/cython/src/cy_cxxtest.pyx @@ -1,8 +1,9 @@ +from cpython.version cimport PY_VERSION cimport cy_cxxtest def pyhello(): cy_cxxtest.hello() + print("Compiled with python version %s" % PY_VERSION) cdef public api void cy_hello(): print("hello cython-world!") - diff --git a/playground/cython/wscript b/playground/cython/wscript index 9b344726e4..eb2a5f7dc8 100644 --- a/playground/cython/wscript +++ b/playground/cython/wscript @@ -1,5 +1,5 @@ #!/usr/bin/env python -# encoding: ISO8859-1 +# encoding: utf-8 # Thomas Nagy, 2010 from waflib import Logs @@ -40,13 +40,13 @@ def build(ctx): target = 'cxx_lib', includes = 'cxx_lib') - # first try to build a C-based cython extension + # build a C-based cython extension ctx( features = 'c cshlib pyext', source = 'src/cy_ctest.pyx', target = 'cy_ctest', includes = 'c_lib', - use = 'c_lib') + use = 'c_lib') # then a C++-based one ctx( @@ -54,7 +54,7 @@ def build(ctx): source = 'src/cy_cxxtest.pyx', target = 'cy_cxxtest', includes = 'cxx_lib', - use = 'cxx_lib') + use = 'cxx_lib') # a C++ application which uses a C function from a cython module ctx( @@ -62,5 +62,25 @@ def build(ctx): source = 'cxx_lib/app.cxx', target = 'cy-app', includes = 'cxx_lib src', - use = 'cxx_lib' - ) + use = 'cxx_lib') + + # --------------------------------------------------------------- + # Testcase for #2244 below + + ctx.get_tgen_by_name('cy_ctest').features += ' subst_header_order' + + # a generated header for cy_ctest + ctx( + features = 'subst', + source = 'c_lib/extra_dep.h.in', + target = 'c_lib/extra_dep.h', + ) + + from waflib import TaskGen + @TaskGen.feature('subst_header_order') + @TaskGen.after('process_source') + def set_subst_before_cython_tasks(self): + tg = self.bld.get_tgen_by_name('c_lib/extra_dep.h') + tg.post() + for tsk in self.tasks: + tsk.run_after.add(tg.tasks[-1]) diff --git a/playground/daemon/daemon.py b/playground/daemon/daemon.py index 7938122c67..95e17e67ad 100644 --- a/playground/daemon/daemon.py +++ b/playground/daemon/daemon.py @@ -107,7 +107,6 @@ def enumerate(self, node): yield k except AttributeError: pass - raise StopIteration def wait_pyinotify(self, bld): diff --git a/playground/descriptions/wscript b/playground/descriptions/wscript new file mode 100644 index 0000000000..9e0b80dce6 --- /dev/null +++ b/playground/descriptions/wscript @@ -0,0 +1,34 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Justin Israel, 2017 + +""" +Allow the "waf list" command to display descriptions for each target +""" + +top = '.' +out = 'build' + +def configure(ctx): + pass + +def build(bld): + bld( + rule="touch ${TGT}", + target='file.in', + description='Create the input file', + ) + + bld( + rule='cp ${SRC} ${TGT}', + source='file.in', + target='file.out', + description='Generate output file', + ) + + bld.install_files( + 'dist', + ['file.out'], + name='install', + description='Deploy files', + ) diff --git a/playground/display/wscript b/playground/display/wscript index 117afee9ce..57775d33af 100644 --- a/playground/display/wscript +++ b/playground/display/wscript @@ -1,6 +1,6 @@ #! /usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2011 (ita) +# Thomas Nagy, 2016 (ita) def options(opt): opt.load('compiler_c') @@ -9,43 +9,6 @@ def configure(conf): conf.load('compiler_c') def build(bld): + bld.load('print_commands') bld.program(source='main.c', target='app') - customize_ze_outputs() - -def customize_ze_outputs(): - # first, display strings, people like them - from waflib import Utils, Logs - from waflib.Context import Context - def exec_command(self, cmd, **kw): - subprocess = Utils.subprocess - kw['shell'] = isinstance(cmd, str) - if isinstance(cmd, str): - Logs.info('%s' % cmd) - else: - Logs.info('%s' % ' '.join(cmd)) # here is the change - Logs.debug('runner_env: kw=%s' % kw) - try: - if self.logger: - self.logger.info(cmd) - kw['stdout'] = kw['stderr'] = subprocess.PIPE - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() - if out: - self.logger.debug('out: %s' % out.decode(sys.stdout.encoding or 'iso8859-1')) - if err: - self.logger.error('err: %s' % err.decode(sys.stdout.encoding or 'iso8859-1')) - return p.returncode - else: - p = subprocess.Popen(cmd, **kw) - return p.wait() - except OSError: - return -1 - Context.exec_command = exec_command - - # and change the outputs for tasks too - from waflib.Task import Task - def display(self): - return '' # no output on empty strings - Task.__str__ = display - diff --git a/playground/distnet/server/cgi-bin/download.py b/playground/distnet/server/cgi-bin/download.py index a6074b6c49..6f73bc9890 100755 --- a/playground/distnet/server/cgi-bin/download.py +++ b/playground/distnet/server/cgi-bin/download.py @@ -26,10 +26,10 @@ def getvalue(x): length = os.stat(filename).st_size -print "Content-Type: application/octet-stream" -print "Content-Disposition: attachment; filename=f.bin" -print "Content-length: %s" % length -print "" +print("Content-Type: application/octet-stream") +print("Content-Disposition: attachment; filename=f.bin") +print("Content-length: %s" % length) +print("") with open(filename, 'rb') as f: while True: diff --git a/playground/distnet/server/cgi-bin/resolve.py b/playground/distnet/server/cgi-bin/resolve.py index 23e4801123..24b04a31d9 100755 --- a/playground/distnet/server/cgi-bin/resolve.py +++ b/playground/distnet/server/cgi-bin/resolve.py @@ -21,8 +21,8 @@ distnet.packages.constraints = distnet.packages.local_resolve(text) results = distnet.packages.get_results() -print "Content-Type: text/plain" -print "" -print "" -print results +print("Content-Type: text/plain") +print( "") +print("") +print(results) diff --git a/playground/dynamic_build/wscript b/playground/dynamic_build/wscript index 607f8c5453..296a1af41f 100644 --- a/playground/dynamic_build/wscript +++ b/playground/dynamic_build/wscript @@ -44,39 +44,16 @@ def configure(conf): def build(bld): - """ - groups will be processed one by one during the build - the progress bar display will be inaccurate - """ - - bld.post_mode = Build.POST_LAZY - import random rnd = random.randint(0, 25) bld( - rule = "sleep 2 && (echo 'int num%d = %d;' > ${TGT})" % (rnd, rnd), - target = 'foo_%d.c' % rnd, - ) + rule = "sleep 2 && (echo 'int num%d = %d;' > ${TGT})" % (rnd, rnd), + target = 'foo_%d.c' % rnd, + ) bld.add_group() - bld.program(source='main.c', target='app', dynamic_source='*.c') - -# support for the "dynamic_source" attribute follows - -from waflib import Build, Utils, TaskGen -@TaskGen.feature('c') -@TaskGen.before('process_source', 'process_rule') -def dynamic_post(self): - """ - bld(dynamic_source='*.c', ..) will search for source files to add to the attribute 'source' - we could also call "eval" or whatever expression - """ - if not getattr(self, 'dynamic_source', None): - return - self.source = Utils.to_list(self.source) - self.source.extend(self.path.get_bld().ant_glob(self.dynamic_source, remove=False)) - - # if headers are created dynamically, assign signatures manually: - # for x in self.path.get_bld().ant_glob('**/*.h', remove=False): x.sig = Utils.h_file(x.abspath()) + it = bld.path.get_bld().ant_glob('*.c', remove=False, quiet=True, generator=True) + src = ['main.c', it] + bld(features='c cprogram', source=src, target='app') diff --git a/playground/dynamic_build2/wscript b/playground/dynamic_build2/wscript index ebd0298daa..84bceb6019 100644 --- a/playground/dynamic_build2/wscript +++ b/playground/dynamic_build2/wscript @@ -21,14 +21,16 @@ def configure(conf): def read_files(task): # the real build files must be excluded, else they will get rebuilt for x in task.generator.bld.bldnode.ant_glob('**', excl='**/*.o app', remove=False): - x.sig = Utils.h_file(x.abspath()) + pass def build(bld): bld.post_mode = Build.POST_LAZY bld(rule='tar xvf ${SRC[0].abspath()}', source='foo.tar') - bld.add_group() + bld.add_group('foo') bld(rule=read_files, always=True, name='read_files') - bld.add_group() + bld.add_group('bar') bld.program(source='aa/main.c', target='app') + + print bld.get_group('bar') diff --git a/playground/dynamic_build3/wscript b/playground/dynamic_build3/wscript index d19684942e..6864ea9a21 100644 --- a/playground/dynamic_build3/wscript +++ b/playground/dynamic_build3/wscript @@ -11,8 +11,8 @@ An advanced dynamic build simulating a call to an external system. That external build system produces a library which is then used in the current build. """ -import os, shutil, sys, subprocess -from waflib import Utils, Build, Logs +import os, shutil, sys +from waflib import Build, Errors, Logs top = '.' out = 'build' @@ -31,8 +31,7 @@ def build(bld): tmp_dir = bld.bldnode.make_node('external_lib') # build the external library through an external process - # the "update_outputs" is unnecessary unless an external folder is given, for example tmp_dir = bld.root.make_node('/tmp/aaa') - bld(rule=some_fun, target=tmp_dir.make_node('flag.lock'), update_outputs=True) + bld(rule=some_fun, target=tmp_dir.make_node('flag.lock')) # once it is done create a second build group bld.add_group() @@ -80,7 +79,7 @@ def some_fun(task): try: task.generator.bld.cmd_and_log(cmd, cwd=cwd, quiet=0, output=0) - except Exception as e: + except Errors.WafError as e: try: print(e.stderr) except AttributeError: diff --git a/playground/dynamic_headers/wscript b/playground/dynamic_headers/wscript index 39fe189de3..82b909a77a 100644 --- a/playground/dynamic_headers/wscript +++ b/playground/dynamic_headers/wscript @@ -56,7 +56,7 @@ def runnable_status(self): tsk = mock_tasks[m_node] except KeyError: tsk = mock_tasks[m_node] = self.generator.create_task('mock', [h_node], [m_node]) - bld.producer.outstanding.insert(0, tsk) + bld.producer.outstanding.append(tsk) bld.producer.total += 1 # preprocessor cache :-/ @@ -73,7 +73,7 @@ def runnable_status(self): if add: # recompute the task signature delattr(self, 'cache_sig') - del bld.task_sigs[(self.uid(), 'imp')] + del bld.imp_sigs[self.uid()] return self.runnable_status() for x in bld.node_deps[self.uid()]: @@ -86,7 +86,7 @@ def runnable_status(self): tsk = mock_tasks[x] except KeyError: tsk = mock_tasks[x] = self.generator.create_task('mock', [h_node], [x]) - bld.producer.outstanding.insert(0, tsk) + bld.producer.outstanding.append(tsk) bld.producer.total += 1 add = True diff --git a/playground/eclipse/c/exLibC/src/exLibC.cpp b/playground/eclipse/c/exLibC/src/exLibC.cpp new file mode 100644 index 0000000000..79877a3ddc --- /dev/null +++ b/playground/eclipse/c/exLibC/src/exLibC.cpp @@ -0,0 +1,17 @@ +#include +#include +#include + +#include + +int check_smaller(int value) { + const char* foo = u8"bar"; // u8 is C++17 only + std::cout << __cplusplus << std::endl; // Check version of C++ standard + + if (value < HELLO_LIMIT) { + return 0; + } else { + return -1; + } +} + diff --git a/playground/eclipse/c/exLibC/src/include/pkg1/exLibC/exLibC.hpp b/playground/eclipse/c/exLibC/src/include/pkg1/exLibC/exLibC.hpp new file mode 100644 index 0000000000..81a1256d3b --- /dev/null +++ b/playground/eclipse/c/exLibC/src/include/pkg1/exLibC/exLibC.hpp @@ -0,0 +1,4 @@ + +#define HELLO_LIMIT 5 + +int check_smaller(int value); diff --git a/playground/eclipse/c/exProgLinkedC/src/exProgLinkedC.cpp b/playground/eclipse/c/exProgLinkedC/src/exProgLinkedC.cpp new file mode 100644 index 0000000000..86dae58c97 --- /dev/null +++ b/playground/eclipse/c/exProgLinkedC/src/exProgLinkedC.cpp @@ -0,0 +1,16 @@ +#include +#include + +#include + +int main(int argc, char *argv[]) { + printf("Hello world!\n"); + if (argc < 2) { + printf("Too few parameters passed!\n"); + } else { + int val=atoi(argv[1]); + printf("Result is: %d\n",check_smaller(val)); + } + + return 0; +} diff --git a/playground/eclipse/c/wscript b/playground/eclipse/c/wscript new file mode 100644 index 0000000000..7093bcd87e --- /dev/null +++ b/playground/eclipse/c/wscript @@ -0,0 +1,19 @@ +#! /usr/bin/env python +# encoding: utf-8 + +def options(opt): + # We are using C and C++ + opt.load('compiler_c compiler_cxx') + +def configure(conf): + # We are using C and C++ + conf.load('compiler_c compiler_cxx') + # Force some standards to see that IDE will follow them + conf.env.CXXFLAGS=['-std=c++17'] + conf.env.CFLAGS=['-std=c17'] + +def build(bld): + bld.shlib(source='exLibC/src/exLibC.cpp', includes='exLibC/src/include', target='exampleLibC', export_includes='exLibC/src/include/') + bld.program(source=bld.path.ant_glob('exProgLinkedC/src/*.cpp'), target='exampleProgLinkedC', use='exampleLibC') + + diff --git a/playground/eclipse/java/animals/junit/org/example/AnimalTest.java b/playground/eclipse/java/animals/junit/org/example/AnimalTest.java new file mode 100644 index 0000000000..64c7a85a26 --- /dev/null +++ b/playground/eclipse/java/animals/junit/org/example/AnimalTest.java @@ -0,0 +1,13 @@ +package org.example; + +import junit.framework.TestCase; + +public class AnimalTest extends TestCase { + public AnimalTest() { + } + + public void testAnimal() { + System.out.println("Test run successfully!"); + } +} + diff --git a/playground/eclipse/java/animals/manifest b/playground/eclipse/java/animals/manifest new file mode 100644 index 0000000000..c6488124d8 --- /dev/null +++ b/playground/eclipse/java/animals/manifest @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Created-By: Waf 1.6.2 (rev >= 10780) + diff --git a/playground/eclipse/java/animals/src/org/example/Animal.java b/playground/eclipse/java/animals/src/org/example/Animal.java new file mode 100644 index 0000000000..c6e7b92c94 --- /dev/null +++ b/playground/eclipse/java/animals/src/org/example/Animal.java @@ -0,0 +1,9 @@ +package org.example; + +class Animal { + + public String sound() { + return null; + } + +} diff --git a/playground/eclipse/java/animals/wscript b/playground/eclipse/java/animals/wscript new file mode 100644 index 0000000000..159f38746c --- /dev/null +++ b/playground/eclipse/java/animals/wscript @@ -0,0 +1,28 @@ +#! /usr/bin/env python + +def build(bld): + + t = bld( + features = 'javac jar', + name = 'animals', + + # javac + srcdir = 'src', + compat = '1.7', + + # jar + basedir = '.', + destfile = '../animals.jar', + manifest = 'manifest', + use = 'NNN', + ) + t.env.JAVACFLAGS = ['-Xlint:unchecked'] + + if bld.env.DO_JUNIT: + t.features += ' junit' + t.srcdir = 'src junit' + t.junitsrc = 'junit' + t.junitclasspath = '.' + t.use += ' JUNIT' + t.env.JUNIT_EXEC_FLAGS = ['-ea'] + diff --git a/playground/eclipse/java/cats/src/org/example/Cat.java b/playground/eclipse/java/cats/src/org/example/Cat.java new file mode 100644 index 0000000000..bf937dc9ad --- /dev/null +++ b/playground/eclipse/java/cats/src/org/example/Cat.java @@ -0,0 +1,14 @@ + +package org.example; + +import org.example.Animal; + +class Cat extends Animal { + + public String sound() { + return "Meow!"; + } + + +} + diff --git a/playground/eclipse/java/cats/wscript b/playground/eclipse/java/cats/wscript new file mode 100644 index 0000000000..5a9bdbfc0d --- /dev/null +++ b/playground/eclipse/java/cats/wscript @@ -0,0 +1,18 @@ +#! /usr/bin/env python + +def build(bld): + + bld(features = 'javac', + srcdir = 'src', + compat = '1.7', + use = 'animals', + name = 'cats-src', + ) + + bld(features = 'jar', + basedir = '.', + destfile = '../cats.jar', + name = 'cats', + use = 'cats-src' + ) + diff --git a/playground/eclipse/java/junit.py b/playground/eclipse/java/junit.py new file mode 100644 index 0000000000..4a271e5183 --- /dev/null +++ b/playground/eclipse/java/junit.py @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +JUnit test system + + - executes all junit tests in the specified subtree (junitsrc) + - only if --junit is given on the commandline + - method: + - add task to compile junitsrc after compiling srcdir + - additional junit_classpath specifiable + - defaults to classpath + destdir + - add task to run junit tests after they're compiled. +""" + +import os +from waflib import Task, TaskGen, Utils, Options +from waflib.TaskGen import feature, before, after +from waflib.Configure import conf + +JUNIT_RUNNER = 'org.junit.runner.JUnitCore' + +def options(opt): + opt.add_option('--junit', action='store_true', default=False, + help='Run all junit tests', dest='junit') + opt.add_option('--junitpath', action='store', default='', + help='Give a path to the junit jar') + +def configure(ctx): + cp = ctx.options.junitpath + val = ctx.env.JUNIT_RUNNER = ctx.env.JUNIT_RUNNER or JUNIT_RUNNER + if ctx.check_java_class(val, with_classpath=cp): + ctx.fatal('Could not run junit from %r' % val) + ctx.env.CLASSPATH_JUNIT = cp + +#@feature('junit') +#@after('apply_java', 'use_javac_files') +def make_test(self): + """make the unit test task""" + if not getattr(self, 'junitsrc', None): + return + junit_task = self.create_task('junit_test') + try: + junit_task.set_run_after(self.javac_task) + except AttributeError: + pass +feature('junit')(make_test) +after('apply_java', 'use_javac_files')(make_test) + +class junit_test(Task.Task): + color = 'YELLOW' + vars = ['JUNIT_EXEC_FLAGS', 'JUNIT_RUNNER'] + + def runnable_status(self): + """ + Only run if --junit was set as an option + """ + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + + n = self.generator.path.find_dir(self.generator.junitsrc) + if not n: + self.generator.bld.fatal('no such junit directory %r' % self.generator.junitsrc) + self.base = n + + # make sure the tests are executed whenever the .class files change + self.inputs = n.ant_glob('**/*.java') + + ret = super(junit_test, self).runnable_status() + if ret == Task.SKIP_ME: + if getattr(Options.options, 'junit', False): + ret = Task.RUN_ME + return ret + + def run(self): + cmd = [] + cmd.extend(self.env.JAVA) + cmd.append('-classpath') + cmd.append(self.generator.javac_task.env.CLASSPATH + os.pathsep + self.generator.javac_task.env.OUTDIR) + cmd.extend(self.env.JUNIT_EXEC_FLAGS) + cmd.append(self.env.JUNIT_RUNNER) + cmd.extend([x.path_from(self.base).replace('.java', '').replace(os.sep, '.') for x in self.inputs]) + return self.exec_command(cmd) + diff --git a/playground/eclipse/java/protoc/message.proto b/playground/eclipse/java/protoc/message.proto new file mode 100644 index 0000000000..2c73cee5c8 --- /dev/null +++ b/playground/eclipse/java/protoc/message.proto @@ -0,0 +1,12 @@ +package udp.tc.tests; + +option java_package ="com.udp.tc.tests"; +option java_outer_classname= "MessageProtos"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message Message { + required int32 test = 1; + optional uint32 blah = 2; +} diff --git a/playground/eclipse/java/src/com/meow/Hello.java b/playground/eclipse/java/src/com/meow/Hello.java new file mode 100644 index 0000000000..f685ac5885 --- /dev/null +++ b/playground/eclipse/java/src/com/meow/Hello.java @@ -0,0 +1,34 @@ +package com.meow; // obligatory + +public class Hello +{ + int m_var = 0; + public Hello() + { + this.m_var = 2; + } + + class MyHelperClass + { + MyHelperClass() { } + int someHelperMethod(int z, int q) { return 2; } + } + + public Object makeObj(String name) + { + final String objName = "My name is " + name; + + return new Object() { + public String toString() + { + return objName; + } + }; + } + + public static void main(String args[]) + { + System.out.println("Hello, world"); + } +} + diff --git a/playground/eclipse/java/src/com/meow/package-info.java b/playground/eclipse/java/src/com/meow/package-info.java new file mode 100644 index 0000000000..116cfb495c --- /dev/null +++ b/playground/eclipse/java/src/com/meow/package-info.java @@ -0,0 +1,2 @@ +package com.meow; + diff --git a/playground/eclipse/java/src/com/meow/truc/bar/Hello.java b/playground/eclipse/java/src/com/meow/truc/bar/Hello.java new file mode 100644 index 0000000000..61af11cff3 --- /dev/null +++ b/playground/eclipse/java/src/com/meow/truc/bar/Hello.java @@ -0,0 +1,34 @@ +package com.meow.truc.bar; // obligatory + +public class Hello +{ + int m_var = 0; + public Hello() + { + this.m_var = 2; + } + + class MyHelperClass + { + MyHelperClass() { } + int someHelperMethod(int z, int q) { return 2; } + } + + public Object makeObj(String name) + { + final String objName = "My name is " + name; + + return new Object() { + public String toString() + { + return objName; + } + }; + } + + public static void main(String args[]) + { + System.out.println("Hello, world"); + } +} + diff --git a/playground/eclipse/java/src/com/meow/truc/foo/Hello.java b/playground/eclipse/java/src/com/meow/truc/foo/Hello.java new file mode 100644 index 0000000000..ab45d8c723 --- /dev/null +++ b/playground/eclipse/java/src/com/meow/truc/foo/Hello.java @@ -0,0 +1,34 @@ +package com.meow.truc.foo; // obligatory + +public class Hello +{ + int m_var = 0; + public Hello() + { + this.m_var = 2; + } + + class MyHelperClass + { + MyHelperClass() { } + int someHelperMethod(int z, int q) { return 2; } + } + + public Object makeObj(String name) + { + final String objName = "My name is " + name; + + return new Object() { + public String toString() + { + return objName; + } + }; + } + + public static void main(String args[]) + { + System.out.println("Hello, world"); + } +} + diff --git a/playground/eclipse/java/sup/org/test/Hella.java b/playground/eclipse/java/sup/org/test/Hella.java new file mode 100644 index 0000000000..08dcd1c698 --- /dev/null +++ b/playground/eclipse/java/sup/org/test/Hella.java @@ -0,0 +1,10 @@ +package org.test; // obligatory + +public class Hella +{ + public static void main(String args[]) + { + System.out.println("Hella, world"); + } +} + diff --git a/playground/eclipse/java/wscript b/playground/eclipse/java/wscript new file mode 100644 index 0000000000..d517174e11 --- /dev/null +++ b/playground/eclipse/java/wscript @@ -0,0 +1,66 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2006-2010 (ita) + +""" +java example + +The gcj compiler has a very different command-line - see playground/gcj +""" + +VERSION = '0.0.4' +APPNAME = 'java_test' + +top = '.' +out = 'build' + +def options(opt): + try: + opt.load('junit', tooldir='.') + except: + pass + +def configure(conf): + conf.load('java protoc') + + try: + ret = conf.load('junit', tooldir='.') + conf.env.DO_JUNIT = True + except: + pass + + conf.check_java_class('java.io.FileOutputStream') + conf.check_java_class('FakeClass') + + conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar'] + conf.env.CLASSPATH_PROTOBUF = ['/tmp/cp/protobuf-java-2.5.0.jar'] + +def build(bld): + + bld(features = 'javac jar javadoc', + srcdir = 'src/', # folder containing the sources to compile + outdir = 'src', # folder where to output the classes (in the build directory) + compat = '1.6', # java compatibility version number + sourcepath = ['src', 'sup'], + classpath = ['.', '..'], + #jaropts = '-C default/src/ .', # can be used to give files + basedir = 'src', # folder containing the classes and other files to package (must match outdir) + destfile = 'foo.jar', # do not put the destfile in the folder of the java classes! + use = 'NNN', + + # javadoc + javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'], + javadoc_output = 'javadoc', + ) + + bld.recurse('animals cats') + + + bld( + features = 'javac protoc', + name = 'pbjava', + srcdir = 'protoc/', + source = ['protoc/message.proto'], + use = 'PROTOBUF', + protoc_includes = ['protoc']) + diff --git a/playground/eclipse/python/mod1/src/mod1/Mod1ObjOri.py b/playground/eclipse/python/mod1/src/mod1/Mod1ObjOri.py new file mode 100644 index 0000000000..9c4ffdf9c4 --- /dev/null +++ b/playground/eclipse/python/mod1/src/mod1/Mod1ObjOri.py @@ -0,0 +1,30 @@ + +""" +Class mod1 for tests + +Doctest examples: + +>>> a = Mod1Class("pippo") +>>> a.getMyName() +'pippo from obj1 _init_' + +>>> a = Mod1Class("pLuTo") +>>> a.getMyName() +'pLuTo from obj1 _init_' + +""" + +class Mod1Class(object): + + def __init__(self, pName): + """ + Constructor stores pName in myName and appends the class string marker + """ + self.val = 1 + self.myName = pName + " from obj1 _init_" + + def getMyName(self): + """ + getMyName in Mod1Class returns the name as is + """ + return self.myName diff --git a/playground/eclipse/python/mod1/src/mod1/__init__.py b/playground/eclipse/python/mod1/src/mod1/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/eclipse/python/mod2/src/mod2/Mod2ObjOri.py b/playground/eclipse/python/mod2/src/mod2/Mod2ObjOri.py new file mode 100644 index 0000000000..aeb2ad780c --- /dev/null +++ b/playground/eclipse/python/mod2/src/mod2/Mod2ObjOri.py @@ -0,0 +1,13 @@ + + +class Mod2Class(object): + def __init__(self, pName): + self.myName = pName + " from obj2 _init_" + self.testVal = 2 + + def getMyName(self): + """ + getMyName in Mod2Class returns the name all uppercase + """ + + return self.myName.upper() diff --git a/playground/eclipse/python/mod2/src/mod2/__init__.py b/playground/eclipse/python/mod2/src/mod2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/eclipse/python/mod3/src/mod3/Mod3ObjOri.py b/playground/eclipse/python/mod3/src/mod3/Mod3ObjOri.py new file mode 100644 index 0000000000..ea213550a0 --- /dev/null +++ b/playground/eclipse/python/mod3/src/mod3/Mod3ObjOri.py @@ -0,0 +1,11 @@ + + +import mod2.Mod2ObjOri + +class Mod3Class(mod2.Mod2ObjOri.Mod2Class): + + def getMyName(self): + """ + getMyName in Mod3Class returns the name all lowercase + """ + return self.myName.lower() diff --git a/playground/eclipse/python/mod3/src/mod3/__init__.py b/playground/eclipse/python/mod3/src/mod3/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/eclipse/python/prg/src/prg1.py b/playground/eclipse/python/prg/src/prg1.py new file mode 100644 index 0000000000..65798c5c2e --- /dev/null +++ b/playground/eclipse/python/prg/src/prg1.py @@ -0,0 +1,24 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import mod1.Mod1ObjOri +import mod2.Mod2ObjOri +import mod3.Mod3ObjOri + + +if __name__ == '__main__': + print("Creating obj1 with string pippo") + obj1 = mod1.Mod1ObjOri.Mod1Class("pippo") + + a = 1 + + print("Creating obj1 with string pLUto") + obj2 = mod2.Mod2ObjOri.Mod2Class("pLUto") + + print("Creating obj3 with string pLUto") + obj3 = mod3.Mod3ObjOri.Mod3Class("pLUto") + + print("Hello World, this are my results:") + print(obj1.getMyName()) + print(obj2.getMyName()) + print(obj3.getMyName()) diff --git a/playground/eclipse/python/withqt5/src/firstgui.ui b/playground/eclipse/python/withqt5/src/firstgui.ui new file mode 100644 index 0000000000..cb7f9d30b2 --- /dev/null +++ b/playground/eclipse/python/withqt5/src/firstgui.ui @@ -0,0 +1,130 @@ + + + myfirstgui + + + + 0 + 0 + 411 + 247 + + + + My First Gui! + + + + + 20 + 210 + 381 + 32 + + + + Qt::Horizontal + + + QDialogButtonBox::Close + + + + + + 10 + 10 + 101 + 21 + + + + + + + 120 + 10 + 281 + 192 + + + + + + + 10 + 180 + 101 + 23 + + + + clear + + + + + + 10 + 40 + 101 + 23 + + + + add + + + + + + + buttonBox + accepted() + myfirstgui + accept() + + + 258 + 274 + + + 157 + 274 + + + + + buttonBox + rejected() + myfirstgui + reject() + + + 316 + 260 + + + 286 + 274 + + + + + clearBtn + clicked() + listWidget + clear() + + + 177 + 253 + + + 177 + 174 + + + + + diff --git a/playground/eclipse/python/withqt5/src/sample.py b/playground/eclipse/python/withqt5/src/sample.py new file mode 100644 index 0000000000..8f5f2f5b15 --- /dev/null +++ b/playground/eclipse/python/withqt5/src/sample.py @@ -0,0 +1,24 @@ +import sys +from PySide2 import QtCore, QtGui, QtWidgets +from firstgui import Ui_myfirstgui + +class MyFirstGuiProgram(Ui_myfirstgui): + def __init__(self, dialog): + Ui_myfirstgui.__init__(self) + self.setupUi(dialog) + + # Connect "add" button with a custom function (addInputTextToListbox) + self.addBtn.clicked.connect(self.addInputTextToListbox) + + def addInputTextToListbox(self): + txt = self.myTextInput.text() + self.listWidget.addItem(txt) + +if __name__ == '__main__': + app = QtWidgets.QApplication(sys.argv) + dialog = QtWidgets.QDialog() + + prog = MyFirstGuiProgram(dialog) + + dialog.show() + sys.exit(app.exec_()) diff --git a/playground/eclipse/python/wscript b/playground/eclipse/python/wscript new file mode 100644 index 0000000000..7576137f4b --- /dev/null +++ b/playground/eclipse/python/wscript @@ -0,0 +1,21 @@ +#! /usr/bin/env python +# encoding: utf-8 + + +def options(opt): + opt.load('python pyqt5') + +def configure(conf): + conf.load('python pyqt5') + conf.check_python_version(minver=(2, 7, 0)) + + +def build(bld): + bld(name='mod1', features='py', source=bld.path.ant_glob('mod1/src/**/*.py'), install_from='mod1/src') + bld(name='mod2', features='py', source=bld.path.ant_glob('mod2/src/**/*.py'), install_from='mod2/src') + bld(name='mod3', features='py', source=bld.path.ant_glob('mod3/src/**/*.py'), install_from='mod3/src') + + bld(name='withqt5', features='py pyqt5', source=bld.path.ant_glob('withqt5/src/**/*'), install_from='withqt5/src') + + # Example program with module dependencies + bld(name='prg', features='py', source=bld.path.ant_glob('prg/src/**/*.py'), install_from='prg/src', use='mod1 mod2 mod3') diff --git a/playground/eclipse/wscript b/playground/eclipse/wscript new file mode 100644 index 0000000000..f7601a5ab2 --- /dev/null +++ b/playground/eclipse/wscript @@ -0,0 +1,65 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin 2017 (fedepell) + +""" +Example source tree to be used with eclipse extra. + +First of all load the extra: + +... +def options(opt): + opt.load('eclipse') + +def configure(conf): + conf.load('eclipse') +... + +Then after configuring the project you can anytime run: + +waf eclipse + +This will generate the needed configuration files for Eclipse: +-) .project is generic Eclipse project file +-) .cproject for C/C++ CDT +-) .classpath for Java JDT +-) .pydevproject for Pydev + +If CDT is in the project (so at least one C/C++ build) then CDT builder +will be used to call waf as it is more versatile. If just JDT/PYydev are +used then an Eclipse external builder is defined that calls waf. This is +created in the file .externalToolBuilders/Waf_Builder.launch. + +The example contains three directories with different supported languages +to demonstrate the features working for each of them, most importantly the +automatic addition of search paths for each language so referencing objects +or files in the IDE is done correctly. This is equivalent to configure +Eclipse by hand using Project->Properties and then each language menu. + +Also the generic invocation for building and cleaning are redefined so +waf is called correctly when the respective actions are requested. + +To test just the external builder definition just remove "c" from the +module_list below. +""" + + +module_list = 'c java python' +out = 'build' + +def options(opt): + opt.load('eclipse') + # We recurse options in our submodules + opt.recurse(module_list) + + +def configure(conf): + conf.env.ECLIPSE_EXTRA_TARGETS = ['test', 'lint', 'foo --bar'] + conf.load('eclipse') + # We recurse configurations in our submodules + conf.recurse(module_list) + + +def build(bld): + bld.recurse(module_list) + diff --git a/playground/embedded_resources/wscript b/playground/embedded_resources/wscript index 3a872cbd13..b21e9f7b7a 100644 --- a/playground/embedded_resources/wscript +++ b/playground/embedded_resources/wscript @@ -24,6 +24,7 @@ def build(bld): name='example', source='main.c', features='file_to_object', + includes_nodes=[], ) bld( target = 'app', diff --git a/playground/erlang/hello.erl b/playground/erlang/hello.erl index ae71681f09..d58a3731ab 100644 --- a/playground/erlang/hello.erl +++ b/playground/erlang/hello.erl @@ -1,5 +1,36 @@ % what a weird language ... :-) +%%% @author Przemyslaw Rzepecki +%%% @version 0.01 + +%%% @doc == Hello World, Example Module == +%%% This module contains some Erlang code for WAF build system support for +%%% Erlang language. +%%% @end -module(hello). --export([hello_world/0]). -hello_world() -> io:fwrite("hello, world\n"). +-export([say_hello/1, hello_world/0]). +-include("hello.hrl"). + +%%% ########################################################################### +%% @doc Returns a greetings string +%% +%% Some more specific description of the function should be written here... +%% +%% See http://erlang.org/doc/apps/edoc/users_guide.html for the complete Edoc +%% guide. +%% +%% @end +%%% ---------------------------------------------------------- +say_hello(waf) -> "Hello WAF, cool to see you!"; +say_hello(make) -> "Oh Make, you again..."; +say_hello(Other) -> "Hi " ++ Other. + + +%%% ########################################################################### +%% @doc Print a 'Hello World' string to stdout of the program.. +%% +%% This is an Erlang Version of the famous hello_world function. +%% +%% @end +%%% ---------------------------------------------------------- +hello_world() -> io:fwrite("~p~n", [?HELLO_WORLD]). diff --git a/playground/erlang/hello_eunit.erl b/playground/erlang/hello_eunit.erl new file mode 100644 index 0000000000..024b76d27b --- /dev/null +++ b/playground/erlang/hello_eunit.erl @@ -0,0 +1,10 @@ +-module(hello_eunit). +-include_lib("eunit/include/eunit.hrl"). +-include("hello.hrl"). + +example_test_() -> + [ + ?_assert(hello:say_hello(waf) =:= "Hello WAF, cool to see you!"), + ?_assert(hello:say_hello(make) =:= "Oh Make, you again..."), + ?_assert(hello:say_hello("Mike") =:= "Hi Mike") + ]. diff --git a/playground/erlang/inc/hello.hrl b/playground/erlang/inc/hello.hrl new file mode 100644 index 0000000000..c0dd4dc179 --- /dev/null +++ b/playground/erlang/inc/hello.hrl @@ -0,0 +1 @@ +-define(HELLO_WORLD, "hello, world"). diff --git a/playground/erlang/wscript b/playground/erlang/wscript index c594f71138..2cf077d981 100644 --- a/playground/erlang/wscript +++ b/playground/erlang/wscript @@ -4,5 +4,8 @@ def configure(conf): conf.load('erlang') def build(bld): - bld(source='hello.erl') + bld(source='hello.erl', includes=['inc']) + # This requires EUnit. The Erlangs EUnit header files are available erlang-dev package. + bld(source=['hello_eunit.erl', 'hello.beam'], includes=['inc'], features="eunit") + bld(source=['hello.erl'], includes=['inc'], features="edoc") diff --git a/playground/exclusive_link/excl.py b/playground/exclusive_link/excl.py index 732534b61c..351ddb298d 100644 --- a/playground/exclusive_link/excl.py +++ b/playground/exclusive_link/excl.py @@ -1,6 +1,6 @@ #! /usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2011 (ita) +# Thomas Nagy, 2011-2015 (ita) """ Prevents link tasks from executing in parallel. This can be used to @@ -31,8 +31,12 @@ def runnable_status(self): ret = Task.ASK_LATER if count >= MAX: return ret + + self.m1_excl = getattr(self, 'm1_excl', 0) + 1 ret = old_runnable_status(self) - if ret == Task.RUN_ME: + self.m1_excl -= 1 + + if ret == Task.RUN_ME and not self.m1_excl: lock.acquire() count += 1 lock.release() @@ -43,11 +47,14 @@ def runnable_status(self): def run(self): global count, lock try: + self.m2_excl = getattr(self, 'm2_excl', 0) + 1 ret = old_run(self) finally: - lock.acquire() - count -= 1 - lock.release() + self.m2_excl -= 1 + if not self.m2_excl: + lock.acquire() + count -= 1 + lock.release() return ret cls.run = run diff --git a/playground/folder_hashing/wscript b/playground/folder_hashing/wscript index f66101fbcb..cf1d73f4fd 100644 --- a/playground/folder_hashing/wscript +++ b/playground/folder_hashing/wscript @@ -8,5 +8,5 @@ def build(bld): node = bld.path.get_bld().make_node('test/bar/stuff') bld(features='mkdir', target=node) - bld(rule='du ${SRC}', source=node) + bld(rule='du ${SRC}', source=[node]) diff --git a/docs/book/examples/advbuild_variant_env/main.c b/playground/gdbus/main.c similarity index 100% rename from docs/book/examples/advbuild_variant_env/main.c rename to playground/gdbus/main.c diff --git a/playground/gdbus/test.xml b/playground/gdbus/test.xml new file mode 100644 index 0000000000..21f4c8ebd0 --- /dev/null +++ b/playground/gdbus/test.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/playground/gdbus/wscript b/playground/gdbus/wscript new file mode 100644 index 0000000000..b5a8de3c4d --- /dev/null +++ b/playground/gdbus/wscript @@ -0,0 +1,24 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Copyright Garmin International or its subsidiaries, 2018 + +VERSION = '1.0' + +def options(opt): + opt.load('compiler_c') + +def configure(conf): + conf.load('compiler_c gdbus') + conf.check_cfg(package='gio-2.0', args='--cflags --libs') + conf.check_cfg(package='gio-unix-2.0', args='--cflags --libs') + conf.check_cfg(package='glib-2.0', args='--cflags --libs') + +def build(bld): + tg = bld.program( + includes = '.', + source = bld.path.ant_glob('*.c'), + target = 'gnome-hello', + use = 'GIO-2.0 GIO-UNIX-2.0 GLIB-2.0') + + tg.add_gdbus_file('test.xml', 'test_prefix', 'glib_server') + diff --git a/playground/genpybind/example.cpp b/playground/genpybind/example.cpp new file mode 100644 index 0000000000..a237e56f8f --- /dev/null +++ b/playground/genpybind/example.cpp @@ -0,0 +1,9 @@ +#include "example.h" + +constexpr int Example::not_exposed; + +int Example::calculate(int some_argument) const { return _value + some_argument; } + +int Example::getSomething() const { return _value; } + +void Example::setSomething(int value) { _value = value; } diff --git a/playground/genpybind/example.h b/playground/genpybind/example.h new file mode 100644 index 0000000000..9563637722 --- /dev/null +++ b/playground/genpybind/example.h @@ -0,0 +1,20 @@ +#pragma once + +#include "genpybind.h" + +class GENPYBIND(visible) Example { +public: + static constexpr int GENPYBIND(hidden) not_exposed = 10; + + /// \brief Do a complicated calculation. + int calculate(int some_argument = 5) const; + + GENPYBIND(getter_for(something)) + int getSomething() const; + + GENPYBIND(setter_for(something)) + void setSomething(int value); + +private: + int _value = 0; +}; diff --git a/playground/genpybind/example_test.py b/playground/genpybind/example_test.py new file mode 100644 index 0000000000..1384390945 --- /dev/null +++ b/playground/genpybind/example_test.py @@ -0,0 +1,9 @@ +import pyexample as m + + +def test_example(): + obj = m.Example() + obj.something = 42 + assert obj.something == 42 + assert obj.calculate() == 47 # with default argument + assert obj.calculate(2) == 44 diff --git a/playground/genpybind/wscript b/playground/genpybind/wscript new file mode 100644 index 0000000000..1732ec83d1 --- /dev/null +++ b/playground/genpybind/wscript @@ -0,0 +1,37 @@ +#!/usr/bin/env python + + +def options(opt): + opt.load('python') + opt.load('compiler_cxx') + opt.load('genpybind') + + +def configure(cfg): + cfg.load('python') + cfg.load('compiler_cxx') + cfg.check_python_version((2, 7)) + cfg.check_python_headers() + cfg.load('genpybind') + + cfg.check(compiler='cxx', + features='cxx pyext', + uselib_store='PYBIND11GENPYBIND_EXAMPLE', + mandatory=True, + header_name='pybind11/pybind11.h') + + +def build(bld): + bld(target='example_inc', + export_includes='.') + + bld.shlib(target='example', + source='example.cpp', + features='use', + use='example_inc') + + bld(target='pyexample', + source='example.h', + genpybind_tags='genpybind_example', + features='use genpybind cxx cxxshlib pyext', + use=['PYBIND11GENPYBIND_EXAMPLE', 'example']) diff --git a/playground/go/LICENSE b/playground/go/LICENSE deleted file mode 100644 index 6a66aea5ea..0000000000 --- a/playground/go/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/playground/go/gmp.go b/playground/go/gmp.go deleted file mode 100644 index c138fa13fb..0000000000 --- a/playground/go/gmp.go +++ /dev/null @@ -1,278 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gmp - -// #include -// #include -// #cgo LDFLAGS: -lgmp -import "C" - -import ( - "os" - "unsafe" -) - -/* - * one of a kind - */ - -// An Int represents a signed multi-precision integer. -// The zero value for an Int represents the value 0. -type Int struct { - i C.mpz_t - init bool -} - -// NewInt returns a new Int initialized to x. -func NewInt(x int64) *Int { return new(Int).SetInt64(x) } - -// Int promises that the zero value is a 0, but in gmp -// the zero value is a crash. To bridge the gap, the -// init bool says whether this is a valid gmp value. -// doinit initializes z.i if it needs it. This is not inherent -// to FFI, just a mismatch between Go's convention of -// making zero values useful and gmp's decision not to. -func (z *Int) doinit() { - if z.init { - return - } - z.init = true - C.mpz_init(&z.i[0]) -} - -// Bytes returns z's representation as a big-endian byte array. -func (z *Int) Bytes() []byte { - b := make([]byte, (z.Len()+7)/8) - n := C.size_t(len(b)) - C.mpz_export(unsafe.Pointer(&b[0]), &n, 1, 1, 1, 0, &z.i[0]) - return b[0:n] -} - -// Len returns the length of z in bits. 0 is considered to have length 1. -func (z *Int) Len() int { - z.doinit() - return int(C.mpz_sizeinbase(&z.i[0], 2)) -} - -// Set sets z = x and returns z. -func (z *Int) Set(x *Int) *Int { - z.doinit() - C.mpz_set(&z.i[0], &x.i[0]) - return z -} - -// SetBytes interprets b as the bytes of a big-endian integer -// and sets z to that value. -func (z *Int) SetBytes(b []byte) *Int { - z.doinit() - if len(b) == 0 { - z.SetInt64(0) - } else { - C.mpz_import(&z.i[0], C.size_t(len(b)), 1, 1, 1, 0, unsafe.Pointer(&b[0])) - } - return z -} - -// SetInt64 sets z = x and returns z. -func (z *Int) SetInt64(x int64) *Int { - z.doinit() - // TODO(rsc): more work on 32-bit platforms - C.mpz_set_si(&z.i[0], C.long(x)) - return z -} - -// SetString interprets s as a number in the given base -// and sets z to that value. The base must be in the range [2,36]. -// SetString returns an error if s cannot be parsed or the base is invalid. -func (z *Int) SetString(s string, base int) os.Error { - z.doinit() - if base < 2 || base > 36 { - return os.EINVAL - } - p := C.CString(s) - defer C.free(unsafe.Pointer(p)) - if C.mpz_set_str(&z.i[0], p, C.int(base)) < 0 { - return os.EINVAL - } - return nil -} - -// String returns the decimal representation of z. -func (z *Int) String() string { - if z == nil { - return "nil" - } - z.doinit() - p := C.mpz_get_str(nil, 10, &z.i[0]) - s := C.GoString(p) - C.free(unsafe.Pointer(p)) - return s -} - -func (z *Int) destroy() { - if z.init { - C.mpz_clear(&z.i[0]) - } - z.init = false -} - - -/* - * arithmetic - */ - -// Add sets z = x + y and returns z. -func (z *Int) Add(x, y *Int) *Int { - x.doinit() - y.doinit() - z.doinit() - C.mpz_add(&z.i[0], &x.i[0], &y.i[0]) - return z -} - -// Sub sets z = x - y and returns z. -func (z *Int) Sub(x, y *Int) *Int { - x.doinit() - y.doinit() - z.doinit() - C.mpz_sub(&z.i[0], &x.i[0], &y.i[0]) - return z -} - -// Mul sets z = x * y and returns z. -func (z *Int) Mul(x, y *Int) *Int { - x.doinit() - y.doinit() - z.doinit() - C.mpz_mul(&z.i[0], &x.i[0], &y.i[0]) - return z -} - -// Div sets z = x / y, rounding toward zero, and returns z. -func (z *Int) Div(x, y *Int) *Int { - x.doinit() - y.doinit() - z.doinit() - C.mpz_tdiv_q(&z.i[0], &x.i[0], &y.i[0]) - return z -} - -// Mod sets z = x % y and returns z. -// Like the result of the Go % operator, z has the same sign as x. -func (z *Int) Mod(x, y *Int) *Int { - x.doinit() - y.doinit() - z.doinit() - C.mpz_tdiv_r(&z.i[0], &x.i[0], &y.i[0]) - return z -} - -// Lsh sets z = x << s and returns z. -func (z *Int) Lsh(x *Int, s uint) *Int { - x.doinit() - z.doinit() - C.mpz_mul_2exp(&z.i[0], &x.i[0], C.mp_bitcnt_t(s)) - return z -} - -// Rsh sets z = x >> s and returns z. -func (z *Int) Rsh(x *Int, s uint) *Int { - x.doinit() - z.doinit() - C.mpz_div_2exp(&z.i[0], &x.i[0], C.mp_bitcnt_t(s)) - return z -} - -// Exp sets z = x^y % m and returns z. -// If m == nil, Exp sets z = x^y. -func (z *Int) Exp(x, y, m *Int) *Int { - m.doinit() - x.doinit() - y.doinit() - z.doinit() - if m == nil { - C.mpz_pow_ui(&z.i[0], &x.i[0], C.mpz_get_ui(&y.i[0])) - } else { - C.mpz_powm(&z.i[0], &x.i[0], &y.i[0], &m.i[0]) - } - return z -} - -func (z *Int) Int64() int64 { - if !z.init { - return 0 - } - return int64(C.mpz_get_si(&z.i[0])) -} - - -// Neg sets z = -x and returns z. -func (z *Int) Neg(x *Int) *Int { - x.doinit() - z.doinit() - C.mpz_neg(&z.i[0], &x.i[0]) - return z -} - -// Abs sets z to the absolute value of x and returns z. -func (z *Int) Abs(x *Int) *Int { - x.doinit() - z.doinit() - C.mpz_abs(&z.i[0], &x.i[0]) - return z -} - - -/* - * functions without a clear receiver - */ - -// CmpInt compares x and y. The result is -// -// -1 if x < y -// 0 if x == y -// +1 if x > y -// -func CmpInt(x, y *Int) int { - x.doinit() - y.doinit() - switch cmp := C.mpz_cmp(&x.i[0], &y.i[0]); { - case cmp < 0: - return -1 - case cmp == 0: - return 0 - } - return +1 -} - -// DivModInt sets q = x / y and r = x % y. -func DivModInt(q, r, x, y *Int) { - q.doinit() - r.doinit() - x.doinit() - y.doinit() - C.mpz_tdiv_qr(&q.i[0], &r.i[0], &x.i[0], &y.i[0]) -} - -// GcdInt sets d to the greatest common divisor of a and b, -// which must be positive numbers. -// If x and y are not nil, GcdInt sets x and y such that d = a*x + b*y. -// If either a or b is not positive, GcdInt sets d = x = y = 0. -func GcdInt(d, x, y, a, b *Int) { - d.doinit() - x.doinit() - y.doinit() - a.doinit() - b.doinit() - C.mpz_gcdext(&d.i[0], &x.i[0], &y.i[0], &a.i[0], &b.i[0]) -} - -// ProbablyPrime performs n Miller-Rabin tests to check whether z is prime. -// If it returns true, z is prime with probability 1 - 1/4^n. -// If it returns false, z is not prime. -func (z *Int) ProbablyPrime(n int) bool { - z.doinit() - return int(C.mpz_probab_prime_p(&z.i[0], C.int(n))) > 0 -} diff --git a/playground/go/gmp/impl.go b/playground/go/gmp/impl.go deleted file mode 100644 index 07559f2f98..0000000000 --- a/playground/go/gmp/impl.go +++ /dev/null @@ -1,7 +0,0 @@ -package gmp - -/* - #include -*/ -import "C" -// EOF diff --git a/playground/go/main.go b/playground/go/main.go deleted file mode 100644 index 76a8fd7a54..0000000000 --- a/playground/go/main.go +++ /dev/null @@ -1,10 +0,0 @@ -// By: Tom Wambold -package main - -import "other" - -func main() { - a := other.Vector3 {1, 2, 3}; - a.Size(); - return; -} diff --git a/playground/go/my-c-lib-2.go b/playground/go/my-c-lib-2.go deleted file mode 100644 index 627356551a..0000000000 --- a/playground/go/my-c-lib-2.go +++ /dev/null @@ -1,16 +0,0 @@ -package foo - -/* - #cgo LDFLAGS: -lmy-c-lib - - #include "my-c-lib.h" - #include -*/ -import "C" -import "unsafe" - -func MyBye(msg string) { - c_msg := C.CString(msg) - defer C.free(unsafe.Pointer(c_msg)) - C.my_c_bye(c_msg) -} diff --git a/playground/go/my-c-lib.go b/playground/go/my-c-lib.go deleted file mode 100644 index 31ad7e8ac7..0000000000 --- a/playground/go/my-c-lib.go +++ /dev/null @@ -1,16 +0,0 @@ -package foo - -/* - #cgo LDFLAGS: -lmy-c-lib - - #include "my-c-lib.h" - #include -*/ -import "C" -import "unsafe" - -func MyHello(msg string) { - c_msg := C.CString(msg) - defer C.free(unsafe.Pointer(c_msg)) - C.my_c_hello(c_msg) -} diff --git a/playground/go/my-c-lib/includes/my-c-lib.h b/playground/go/my-c-lib/includes/my-c-lib.h deleted file mode 100644 index 8623b51696..0000000000 --- a/playground/go/my-c-lib/includes/my-c-lib.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef MY_C_LIB_H -#define MY_C_LIB_H 1 - -void my_c_hello(const char *msg); -void my_c_bye(const char *msg); - -#endif diff --git a/playground/go/my-c-lib/src/foo.c b/playground/go/my-c-lib/src/foo.c deleted file mode 100644 index de0c4e0046..0000000000 --- a/playground/go/my-c-lib/src/foo.c +++ /dev/null @@ -1,15 +0,0 @@ -/* simple C library to exercize CGO */ - -#include -#include "my-c-lib.h" - -void my_c_hello(const char *msg) -{ - fprintf(stdout, msg); -} - -void my_c_bye(const char *msg) -{ - fprintf(stdout, msg); -} - diff --git a/playground/go/my-cgo-test.go b/playground/go/my-cgo-test.go deleted file mode 100644 index 556b4247e8..0000000000 --- a/playground/go/my-cgo-test.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -import "foo" - -func main() { - foo.MyHello("hello from my-c-lib\n") - foo.MyBye("bye from my-c-lib\n") -} - -// EOF diff --git a/playground/go/other/a.go b/playground/go/other/a.go deleted file mode 100644 index 1af17d5e41..0000000000 --- a/playground/go/other/a.go +++ /dev/null @@ -1,11 +0,0 @@ -// By: Tom Wambold -package other - -import "math" - -// A three-value vector (i, j, k) -type Vector3 [3]float64 - -func (a *Vector3) Size() float64 { - return math.Sqrt(float64(a[0] * a[0] + a[1] * a[1] + a[2] * a[2])); -} diff --git a/playground/go/other/b.go b/playground/go/other/b.go deleted file mode 100644 index 6c2eb82093..0000000000 --- a/playground/go/other/b.go +++ /dev/null @@ -1,6 +0,0 @@ -package other - -type Foo interface { - Get(i int, j int) float64; - Set(i int, j int, v float64); -} diff --git a/playground/go/pi.go b/playground/go/pi.go deleted file mode 100644 index 45f61abbd6..0000000000 --- a/playground/go/pi.go +++ /dev/null @@ -1,104 +0,0 @@ -/* -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - * Neither the name of "The Computer Language Benchmarks Game" nor the - name of "The Computer Language Shootout Benchmarks" nor the names of - its contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. -*/ - -/* The Computer Language Benchmarks Game - * http://shootout.alioth.debian.org/ - * - * contributed by The Go Authors. - * based on pidigits.c (by Paolo Bonzini & Sean Bartlett, - * modified by Michael Mellor) - */ - -package main - -import ( - big "gmp" - "fmt" - "runtime" -) - -var ( - tmp1 = big.NewInt(0) - tmp2 = big.NewInt(0) - numer = big.NewInt(1) - accum = big.NewInt(0) - denom = big.NewInt(1) - ten = big.NewInt(10) -) - -func extractDigit() int64 { - if big.CmpInt(numer, accum) > 0 { - return -1 - } - tmp1.Lsh(numer, 1).Add(tmp1, numer).Add(tmp1, accum) - big.DivModInt(tmp1, tmp2, tmp1, denom) - tmp2.Add(tmp2, numer) - if big.CmpInt(tmp2, denom) >= 0 { - return -1 - } - return tmp1.Int64() -} - -func nextTerm(k int64) { - y2 := k*2 + 1 - accum.Add(accum, tmp1.Lsh(numer, 1)) - accum.Mul(accum, tmp1.SetInt64(y2)) - numer.Mul(numer, tmp1.SetInt64(k)) - denom.Mul(denom, tmp1.SetInt64(y2)) -} - -func eliminateDigit(d int64) { - accum.Sub(accum, tmp1.Mul(denom, tmp1.SetInt64(d))) - accum.Mul(accum, ten) - numer.Mul(numer, ten) -} - -func main() { - i := 0 - k := int64(0) - for { - d := int64(-1) - for d < 0 { - k++ - nextTerm(k) - d = extractDigit() - } - eliminateDigit(d) - fmt.Printf("%c", d+'0') - - if i++; i%50 == 0 { - fmt.Printf("\n") - if i >= 1000 { - break - } - } - } - - fmt.Printf("\n%d calls; bit sizes: %d %d %d\n", runtime.Cgocalls(), numer.Len(), accum.Len(), denom.Len()) -} diff --git a/playground/go/stdio/chain.go b/playground/go/stdio/chain.go deleted file mode 100644 index 5561cb1c61..0000000000 --- a/playground/go/stdio/chain.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Pass numbers along a chain of threads. - -package main - -import ( - "runtime" - "cgo/stdio" - "strconv" -) - -const N = 10 -const R = 5 - -func link(left chan<- int, right <-chan int) { - // Keep the links in dedicated operating system - // threads, so that this program tests coordination - // between pthreads and not just goroutines. - runtime.LockOSThread() - for { - v := <-right - stdio.Stdout.WriteString(strconv.Itoa(v) + "\n") - left <- 1 + v - } -} - -func main() { - leftmost := make(chan int) - var left chan int - right := leftmost - for i := 0; i < N; i++ { - left, right = right, make(chan int) - go link(left, right) - } - for i := 0; i < R; i++ { - right <- 0 - x := <-leftmost - stdio.Stdout.WriteString(strconv.Itoa(x) + "\n") - } -} diff --git a/playground/go/stdio/fib.go b/playground/go/stdio/fib.go deleted file mode 100644 index d1692c8616..0000000000 --- a/playground/go/stdio/fib.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Compute Fibonacci numbers with two goroutines -// that pass integers back and forth. No actual -// concurrency, just threads and synchronization -// and foreign code on multiple pthreads. - -package main - -import ( - "runtime" - "cgo/stdio" - "strconv" -) - -func fibber(c, out chan int64, i int64) { - // Keep the fibbers in dedicated operating system - // threads, so that this program tests coordination - // between pthreads and not just goroutines. - runtime.LockOSThread() - - if i == 0 { - c <- i - } - for { - j := <-c - stdio.Stdout.WriteString(strconv.Itoa64(j) + "\n") - out <- j - <-out - i += j - c <- i - } -} - -func main() { - c := make(chan int64) - out := make(chan int64) - go fibber(c, out, 0) - go fibber(c, out, 1) - <-out - for i := 0; i < 90; i++ { - out <- 1 - <-out - } -} diff --git a/playground/go/stdio/file.go b/playground/go/stdio/file.go deleted file mode 100644 index 021cbf909c..0000000000 --- a/playground/go/stdio/file.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -A trivial example of wrapping a C library in Go. -For a more complex example and explanation, -see ../gmp/gmp.go. -*/ - -package stdio - -/* -#include -#include -#include -#include - -char* greeting = "hello, world"; -*/ -import "C" -import "unsafe" - -type File C.FILE - -var Stdout = (*File)(C.stdout) -var Stderr = (*File)(C.stderr) - -// Test reference to library symbol. -// Stdout and stderr are too special to be a reliable test. -var myerr = C.sys_errlist - -func (f *File) WriteString(s string) { - p := C.CString(s) - C.fputs(p, (*C.FILE)(f)) - C.free(unsafe.Pointer(p)) - f.Flush() -} - -func (f *File) Flush() { - C.fflush((*C.FILE)(f)) -} - -var Greeting = C.GoString(C.greeting) diff --git a/playground/go/stdio/hello.go b/playground/go/stdio/hello.go deleted file mode 100644 index 75244e8bdc..0000000000 --- a/playground/go/stdio/hello.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import "cgo/stdio" - -func main() { - stdio.Stdout.WriteString(stdio.Greeting + "\n") -} diff --git a/playground/go/wscript b/playground/go/wscript deleted file mode 100644 index 842531f905..0000000000 --- a/playground/go/wscript +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Tom Wambold tom5760 gmail -# Thomas Nagy, 2010 (ita) - -""" -if libgmp is present, try building with 'waf --exe' -""" - -top = '.' -out = 'build' - -def options(opt): - opt.add_option('--exe', action='store_true', default=False, help='Execute the program after it is compiled') - -def configure(ctx): - ctx.load('go') - - # the compiler keeps changing, think twice before trying it for a serious project - ctx.env.TRY_CGO = False - - return - try: - ctx.load('gcc') - ctx.check_cc(fragment='#include \nint main() {return 0;}\n', uselib_store='GMP', lib='gmp') - except ctx.errors.ConfigurationError: - ctx.env.TRY_CGO = False - else: - ctx.env.TRY_CGO = True - -def build(ctx): - - ctx( - features = 'go gopackage', - target = 'other', - source = [ - 'other/a.go', - 'other/b.go', # gopack sux - ], - ) - - ctx( - features = 'go goprogram', - target = 'test', - use = 'other', - source = 'main.go', - includes = '.', - ) - - # NOTE: if you use ant_glob, use it like this: bld.path.ant_glob('*.go', excl='*_test.go') - - if ctx.env.TRY_CGO: - ctx.read_shlib('gmp') - ctx( - features = 'c cshlib', - source = 'my-c-lib/src/foo.c', - target = 'my-c-lib', - includes = 'my-c-lib/includes', - export_includes=['my-c-lib/includes'], - ) - ctx( - features = 'cgopackage', - name = 'go-gmp', - target = 'gmp', - source = 'gmp.go gmp/impl.go', - use = 'gmp', - ) - - # testing multiple cgopackage targets... - ctx( - features = 'cgopackage', - name = 'go-stdio', - target = 'cgo/stdio', - source = 'stdio/file.go', - ) - - ctx( - features = 'cgopackage', - name = 'go-my-c-lib', - target = 'foo', - source = 'my-c-lib.go my-c-lib-2.go', - use = ['my-c-lib',], - ) - - #ctx.add_group() - ctx(features='go goprogram uselib', - source='pi.go', - target='pi', - use='go-gmp', - #gocflags=['-I.', '-I..'], - ) - - ctx( - features='go goprogram uselib', - source='stdio/hello.go', - target='go-stdio-hello', - use='go-stdio', - ) - - ctx( - features='go goprogram uselib', - source='stdio/fib.go', - target='go-stdio-fib', - use='go-stdio', - ) - - ctx( - features='go goprogram uselib', - source='stdio/chain.go', - target='go-stdio-chain', - use='go-stdio', - ) - - ctx( - features='go goprogram uselib', - source='my-cgo-test.go', - target='my-cgo-test', - use='go-my-c-lib', - ) - - from waflib import Options, Utils - if ctx.env.TRY_CGO and Options.options.exe: - def exe_pi(bld): - p = Utils.subprocess.Popen('LD_LIBRARY_PATH=build ./build/pi', shell=True) - p.wait() - ctx.add_post_fun(exe_pi) - - def exe_hello(bld): - p = Utils.subprocess.Popen('LD_LIBRARY_PATH=build ./build/go-stdio-hello', shell=True) - p.wait() - ctx.add_post_fun(exe_hello) - - def exe_fib(bld): - p = Utils.subprocess.Popen('LD_LIBRARY_PATH=build ./build/go-stdio-fib', shell=True) - p.wait() - ctx.add_post_fun(exe_fib) - - def exe_chain(bld): - p = Utils.subprocess.Popen('LD_LIBRARY_PATH=build ./build/go-stdio-chain', shell=True) - p.wait() - ctx.add_post_fun(exe_chain) - - def exe_mycgolib(bld): - p = Utils.subprocess.Popen('LD_LIBRARY_PATH=build ./build/my-cgo-test', shell=True) - p.wait() - ctx.add_post_fun(exe_mycgolib) - diff --git a/playground/gtest/tests/test1/wscript_build b/playground/gtest/tests/test1/wscript_build index c969fbaba0..2af2f0639e 100644 --- a/playground/gtest/tests/test1/wscript_build +++ b/playground/gtest/tests/test1/wscript_build @@ -1,18 +1,11 @@ #! /usr/bin/env python # encoding: utf-8 - -def fun(task): - pass - # print task.generator.bld.name_to_obj('somelib').link_task.outputs[0].abspath(task.env) - # task.ut_exec.append('--help') - bld.program( features = 'test', source = 'AccumulatorTest.cpp', target = 'unit_test_program', use = 'unittestmain useless GTEST', ut_cwd = bld.path.abspath(), - ut_fun = fun ) diff --git a/playground/gtest/wscript b/playground/gtest/wscript index 5919f3cacd..df8e9b6d99 100644 --- a/playground/gtest/wscript +++ b/playground/gtest/wscript @@ -42,24 +42,24 @@ def gtest_results(bld): if ' OK ]' in output[i]: continue while not '[ ' in output[i]: - Logs.warn('%s' % output[i]) + Logs.warn(output[i]) i += 1 elif ' FAILED ]' in line and code: - Logs.error('%s' % line) + Logs.error(line) elif ' PASSED ]' in line: - Logs.info('%s' % line) + Logs.info(line) def build(bld): bld.recurse('src tests') - # unittestw.summary is a pretty ugly function for displaying a report (feel free to improve!) + # waf_unit_test.summary is a pretty ugly function for displaying a report (feel free to improve!) # results -> bld.utest_results [(filename, returncode, stdout, stderr), (..., ), ...] #bld.add_post_fun(waf_unit_test.summary) bld.add_post_fun(gtest_results) # to execute all tests: # $ waf --alltests - # to set this behaviour permanenly: + # to set this behaviour permanently: bld.options.all_tests = True # debugging zone: diff --git a/playground/haxe/bytecode/src/Main.hx b/playground/haxe/bytecode/src/Main.hx new file mode 100644 index 0000000000..331e5bf830 --- /dev/null +++ b/playground/haxe/bytecode/src/Main.hx @@ -0,0 +1,5 @@ +class Main { + static function main() { + trace('hello'); + } +} diff --git a/playground/haxe/bytecode/src/wscript b/playground/haxe/bytecode/src/wscript new file mode 100644 index 0000000000..25348d659e --- /dev/null +++ b/playground/haxe/bytecode/src/wscript @@ -0,0 +1,15 @@ +def configure(ctx): + ctx.load('haxe') + ctx.check_haxe(mini='4.0.0', maxi='4.2.5') + ctx.check_haxe_pkg( + libs=['hashlink'], + uselib_store='HLR', + fetch=False) + +def build(ctx): + ctx.env.HAXE_FLAGS = ['-dce', 'full', '-main', 'Main'] + ctx( + compiler = 'HL', + source = 'Main.hx', + target = 'out.hl', + use = ['HLR']) diff --git a/playground/haxe/bytecode/wscript b/playground/haxe/bytecode/wscript new file mode 100644 index 0000000000..6edc0a0bec --- /dev/null +++ b/playground/haxe/bytecode/wscript @@ -0,0 +1,8 @@ +top = '.' +out = 'build' + +def configure(ctx): + ctx.recurse('src') + +def build(ctx): + ctx.recurse('src') diff --git a/playground/haxe/executable/README.md b/playground/haxe/executable/README.md new file mode 100644 index 0000000000..fcc7af2ae0 --- /dev/null +++ b/playground/haxe/executable/README.md @@ -0,0 +1,19 @@ +# Using `HLC` source generation with `clang` + +## Environment +In this particular case, you need to have a distribution of `hashlink` in your system. After installing, you need to perform additional steps to pass required files for binary generation (in this case - to `clang`): + +- either add hashlink's `lib` folder to `ctx.env.LIBPATH_HL` +- or replace `lib` folder with a symlink to hashlink's `lib` folder +- either add hashlink's `include` folder to `ctx.env.INCLUDES_HL` +- or replace `include` folder with a symlink to hashlink's `include` folder + +## Targets +In this particular case, generated `.c` files are placed in separate `bin` subdirectory. This enhances your build transparency and allows you to add desired checks or perform additional operations with generated `.c` sources if needed, while keeping things in parallel. Keep this in mind if you're planning to extend your build layout with additional Haxe targets + +## Running assembled binaries +Assuming that you have a `hashlink` distribution and all relevant system paths are adjusted, you could easily run your binary and see resulting output of `Main.hx:3: hello`. Keep in mind that if you're using an official `hashlink` distribution, it doesn't come with static libs for linking - this means that your produced binary requires paths to `libhl.dll` (or `.so`/`.dylib` - depends on your system). Of course, there may be a use case when you're building `hashlink` from sources or using it as a portable distribution - in these cases, you could run your binary while pointing paths to your dynamic libraries with adding correct paths (`$PWD/lib/` for example) to: + +- `PATH` on windows +- `LD_LIBRARY_PATH` on linux +- `DYLD_LIBRARY_PATH` on macOS \ No newline at end of file diff --git a/playground/haxe/executable/bin/wscript b/playground/haxe/executable/bin/wscript new file mode 100644 index 0000000000..8757fdbb93 --- /dev/null +++ b/playground/haxe/executable/bin/wscript @@ -0,0 +1,14 @@ +def configure(ctx): + ctx.load('clang') + ctx.check( + compiler='c', + lib='hl', + use='HL', + uselib_store='HL') + +def build(ctx): + ctx.program( + source = [ctx.bldnode.make_node('src/main/main.c')], + includes = [ctx.env.ROOT_INCLUDE_DIR], + target = 'app', + use = ['HL']) diff --git a/playground/haxe/executable/include/readme.txt b/playground/haxe/executable/include/readme.txt new file mode 100644 index 0000000000..03fce35ab9 --- /dev/null +++ b/playground/haxe/executable/include/readme.txt @@ -0,0 +1 @@ +place hashlink includes here (e.g. hlc.h) or replace this directory with symlink if using package manager - dedicated directory is used when native hashlink includes are used diff --git a/playground/haxe/executable/lib/readme.txt b/playground/haxe/executable/lib/readme.txt new file mode 100644 index 0000000000..22946ac917 --- /dev/null +++ b/playground/haxe/executable/lib/readme.txt @@ -0,0 +1 @@ +place hashlink libraries here (e.g. libhl.so) or replace this directory with symlink if using package manager - dedicated directory is used when native hashlink libs are linked diff --git a/playground/haxe/executable/src/Main.hx b/playground/haxe/executable/src/Main.hx new file mode 100644 index 0000000000..331e5bf830 --- /dev/null +++ b/playground/haxe/executable/src/Main.hx @@ -0,0 +1,5 @@ +class Main { + static function main() { + trace('hello'); + } +} diff --git a/playground/haxe/executable/src/wscript b/playground/haxe/executable/src/wscript new file mode 100644 index 0000000000..d147a47068 --- /dev/null +++ b/playground/haxe/executable/src/wscript @@ -0,0 +1,15 @@ +def configure(ctx): + ctx.load('haxe') + ctx.check_haxe(mini='4.0.0', maxi='4.2.5') + ctx.check_haxe_pkg( + libs=['hashlink'], + uselib_store='HLR') + +def build(ctx): + ctx.env.HAXE_FLAGS = ['-dce', 'full', '-main', 'Main'] + ctx( + compiler = 'HLC', + source = 'Main.hx', + res = ctx.env.ROOT_RES_DIR, + target = 'main', + use = ['HLR']) diff --git a/playground/haxe/executable/wscript b/playground/haxe/executable/wscript new file mode 100644 index 0000000000..4f99d22abd --- /dev/null +++ b/playground/haxe/executable/wscript @@ -0,0 +1,15 @@ +top = '.' +out = 'bin/waf' + +def configure(ctx): + ctx.env.ROOT_INCLUDE_DIR = ctx.path.get_bld().make_node('src').make_node('main').abspath() + ctx.env.ROOT_RES_DIR = ctx.path.make_node('res').abspath() + ctx.env.LIBPATH_HAXE = ctx.path.make_node('haxe_libraries').abspath() + ctx.env.LIBPATH_HL = ctx.path.make_node('lib').abspath() + ctx.env.INCLUDES_HL = ctx.path.make_node('include').abspath() + ctx.recurse('src') + ctx.recurse('bin') + +def build(ctx): + ctx.recurse('src') + ctx.recurse('bin') diff --git a/playground/implicit_order/wscript b/playground/implicit_order/wscript index 925d904788..2d4905c164 100644 --- a/playground/implicit_order/wscript +++ b/playground/implicit_order/wscript @@ -18,7 +18,7 @@ def configure(conf): def build(bld): - bld(rule='echo "int ko = $$RANDOM;" > ${TGT}', target='faa.h', always=True, update_outputs=True, shell=True, name='z2') + bld(rule='echo "int ko = $$RANDOM;" > ${TGT}', target='faa.h', always=True, shell=True, name='z2') bld.program(source='a.c main.c', target='foo', includes='.') # sort the tasks in reverse order to force the 'faa.h' creation in last position @@ -41,9 +41,9 @@ def are_implicit_nodes_ready(self): bld.dct_implicit_nodes = cache = {} try: - dct = cache[bld.cur] + dct = cache[bld.current_group] except KeyError: - dct = cache[bld.cur] = {} + dct = cache[bld.current_group] = {} for tsk in bld.cur_tasks: for x in tsk.outputs: dct[x] = tsk @@ -57,7 +57,7 @@ def are_implicit_nodes_ready(self): if modified: for tsk in self.run_after: if not tsk.hasrun: - Logs.warn("task %r is not ready..." % self) + Logs.warn('task %r is not ready...', self) raise Errors.TaskNotReady('not ready') Task.Task.are_implicit_nodes_ready = are_implicit_nodes_ready diff --git a/playground/java_recursive_use/a/src/JavaTestA.java b/playground/java_recursive_use/a/src/JavaTestA.java new file mode 100644 index 0000000000..cce730e740 --- /dev/null +++ b/playground/java_recursive_use/a/src/JavaTestA.java @@ -0,0 +1,2 @@ +public class JavaTestA { +} diff --git a/playground/java_recursive_use/a/wscript b/playground/java_recursive_use/a/wscript new file mode 100644 index 0000000000..26064a6aad --- /dev/null +++ b/playground/java_recursive_use/a/wscript @@ -0,0 +1,7 @@ + +def build(bld): + bld(features='javac jar', + srcdir='src', + name='a', + basedir='.' + ) diff --git a/playground/java_recursive_use/b/src/JavaTestB.java b/playground/java_recursive_use/b/src/JavaTestB.java new file mode 100644 index 0000000000..b2067ec415 --- /dev/null +++ b/playground/java_recursive_use/b/src/JavaTestB.java @@ -0,0 +1,2 @@ +public class JavaTestB extends JavaTestA { +} diff --git a/playground/java_recursive_use/b/wscript b/playground/java_recursive_use/b/wscript new file mode 100644 index 0000000000..a5e6785308 --- /dev/null +++ b/playground/java_recursive_use/b/wscript @@ -0,0 +1,8 @@ + +def build(bld): + bld(features='javac jar', + srcdir='src', + name='b', + basedir='.', + use='a' + ) diff --git a/playground/java_recursive_use/c/src/JavaTestC.java b/playground/java_recursive_use/c/src/JavaTestC.java new file mode 100644 index 0000000000..1f42f48b3c --- /dev/null +++ b/playground/java_recursive_use/c/src/JavaTestC.java @@ -0,0 +1,2 @@ +public class JavaTestC extends JavaTestB { +} diff --git a/playground/java_recursive_use/c/wscript b/playground/java_recursive_use/c/wscript new file mode 100644 index 0000000000..3cc393f8c3 --- /dev/null +++ b/playground/java_recursive_use/c/wscript @@ -0,0 +1,9 @@ + +def build(bld): + bld(features='javac jar', + srcdir='src', + name='c', + basedir='.', + use='b', + recurse_use = True + ) diff --git a/playground/java_recursive_use/d/src/JavaTestD.java b/playground/java_recursive_use/d/src/JavaTestD.java new file mode 100644 index 0000000000..308c741ba0 --- /dev/null +++ b/playground/java_recursive_use/d/src/JavaTestD.java @@ -0,0 +1,2 @@ +public class JavaTestD extends JavaTestC { +} diff --git a/playground/java_recursive_use/d/wscript b/playground/java_recursive_use/d/wscript new file mode 100644 index 0000000000..82580c45df --- /dev/null +++ b/playground/java_recursive_use/d/wscript @@ -0,0 +1,9 @@ + +def build(bld): + bld(features='javac jar', + srcdir='src', + name='d', + basedir='.', + use='b c', + recurse_use = True + ) diff --git a/playground/java_recursive_use/wscript b/playground/java_recursive_use/wscript new file mode 100644 index 0000000000..7d5f8d6259 --- /dev/null +++ b/playground/java_recursive_use/wscript @@ -0,0 +1,25 @@ +#! /usr/bin/env python +# encoding: utf-8# +# Federico Pellegrin, 2018 (fedepell) + +# Simple example testing usage of the recursive use processing for Java +# In the example b uses a, c uses b and d uses c. Therefore transitively +# for example c uses also a, but this by default is not enabled as it +# really depends on the code if the dependency is build or run time and +# so enabling by default may lead to unneeded work. On the other side for +# a project with many java artifacts and complex dependencies a manual +# management may be expensive. +# +# By setting recurse_use to True (or build wise by setting RECURSE_JAVA env +# variable to True) the recursive behaviour is enabled. +# +# Directory d tests the case when recursion stops earlier since +# a dependency is already explicitly defined + +def configure(conf): + conf.load('java') + # For build wide enabling: + # conf.env.RECURSE_JAVA = True + +def build(bld): + bld.recurse('a b c d') diff --git a/playground/javatest/jni/java/StringUtils.java b/playground/javatest/jni/java/StringUtils.java new file mode 100644 index 0000000000..39ec05c407 --- /dev/null +++ b/playground/javatest/jni/java/StringUtils.java @@ -0,0 +1,27 @@ +public final class StringUtils +{ + + public static final String LIBRARY_NAME = "stringUtils"; + + static + { + System.loadLibrary(LIBRARY_NAME); + } + + private StringUtils() + { + } + + public static native boolean isAlpha(String string); + + public static native boolean isEmpty(String string); + + public static void main(String[] args) + { + System.out.println(StringUtils.isAlpha("sureIs")); + System.out.println(StringUtils.isAlpha("nope!")); + System.out.println(StringUtils.isEmpty(" ")); + System.out.println(StringUtils.isEmpty("nope")); + } + +} diff --git a/playground/javatest/jni/jni/include/StringUtils.h b/playground/javatest/jni/jni/include/StringUtils.h new file mode 100644 index 0000000000..2064f96dcf --- /dev/null +++ b/playground/javatest/jni/jni/include/StringUtils.h @@ -0,0 +1,29 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class StringUtils */ + +#ifndef _Included_StringUtils +#define _Included_StringUtils +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: StringUtils + * Method: isAlpha + * Signature: (Ljava/lang/String;)Z + */ +JNIEXPORT jboolean JNICALL Java_StringUtils_isAlpha + (JNIEnv *, jclass, jstring); + +/* + * Class: StringUtils + * Method: isEmpty + * Signature: (Ljava/lang/String;)Z + */ +JNIEXPORT jboolean JNICALL Java_StringUtils_isEmpty + (JNIEnv *, jclass, jstring); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/playground/javatest/jni/jni/source/StringUtils.c b/playground/javatest/jni/jni/source/StringUtils.c new file mode 100644 index 0000000000..d6567ab195 --- /dev/null +++ b/playground/javatest/jni/jni/source/StringUtils.c @@ -0,0 +1,51 @@ +#include "StringUtils.h" +#include +#include + +JNIEXPORT jboolean JNICALL Java_StringUtils_isAlpha(JNIEnv *env, jclass clazz, + jstring jStr) +{ + jboolean ret = JNI_TRUE; + char *sp = NULL, *s = NULL; + + if (!jStr) + return JNI_FALSE; + + s = (char*)(*env)->GetStringUTFChars(env, jStr, 0); + sp = s + strlen(s); + if (sp <= s) + ret = JNI_FALSE; + do + { + if (!isalpha(*(--sp))) + ret = JNI_FALSE; + } + while (sp > s); + + (*env)->ReleaseStringUTFChars(env, jStr, s); + return ret; +} + +JNIEXPORT jboolean JNICALL Java_StringUtils_isEmpty(JNIEnv *env, jclass clazz, + jstring jStr) +{ + jboolean ret = JNI_TRUE; + char *sp = NULL, *s = NULL; + + if (!jStr) + return JNI_TRUE; + + s = (char*)(*env)->GetStringUTFChars(env, jStr, 0); + sp = s + strlen(s); + if (sp <= s) + ret = JNI_TRUE; + do + { + if (!isspace(*(--sp))) + ret = JNI_FALSE; + } + while (sp > s); + + (*env)->ReleaseStringUTFChars(env, jStr, s); + return ret; +} diff --git a/playground/javatest/jni/test/TestJni.java b/playground/javatest/jni/test/TestJni.java new file mode 100644 index 0000000000..d8d9c22b70 --- /dev/null +++ b/playground/javatest/jni/test/TestJni.java @@ -0,0 +1,20 @@ +import org.testng.Assert; +import org.testng.annotations.Test; + +public class TestJni { + + @Test + public void testTrue() { + Assert.assertEquals(true, StringUtils.isAlpha("myfootest"), "'myfootest' is alpha"); + } + + @Test + public void testFalse() { + Assert.assertEquals(false, StringUtils.isAlpha("my f00 t3$t"), "'my f00 t3$t' is not alpha"); + } + + @Test + public void testIsEmpty() { + Assert.assertEquals(false, StringUtils.isEmpty("emptyNOT"), "'emptyNOT' is not empty"); + } +} diff --git a/playground/javatest/jni/test/testng.xml b/playground/javatest/jni/test/testng.xml new file mode 100644 index 0000000000..237f8b6dd5 --- /dev/null +++ b/playground/javatest/jni/test/testng.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/playground/javatest/src/Nums.java b/playground/javatest/src/Nums.java new file mode 100644 index 0000000000..f12708dd35 --- /dev/null +++ b/playground/javatest/src/Nums.java @@ -0,0 +1,17 @@ +public class Nums { + private int value = 0; + + // Is bigger than 5 + public boolean isBiggerThanFive() { + if (this.value > 5) { + return true; + } else { + return false; + } + } + + // Set object value + public void setValue(int value) { + this.value = value; + } +} diff --git a/playground/javatest/test/TestNums.java b/playground/javatest/test/TestNums.java new file mode 100644 index 0000000000..573091dfeb --- /dev/null +++ b/playground/javatest/test/TestNums.java @@ -0,0 +1,31 @@ +import org.testng.Assert; +import org.testng.annotations.Test; + +public class TestNums { + Nums myNumObj = new Nums(); + + @Test + public void testTrue() { + myNumObj.setValue(10); + + boolean isBigger = myNumObj.isBiggerThanFive(); + Assert.assertEquals(true, isBigger, "10 should be bigger than 5"); + } + + @Test + public void testFalse() { + myNumObj.setValue(1); + + boolean isBigger = myNumObj.isBiggerThanFive(); + Assert.assertEquals(false, isBigger, "1 should be smaller than 5"); + } + + @Test + public void testBoundary() { + myNumObj.setValue(5); + + boolean isBigger = myNumObj.isBiggerThanFive(); + Assert.assertEquals(false, isBigger, "5 should not be bigger than 5"); + } +} + diff --git a/playground/javatest/test/testng.xml b/playground/javatest/test/testng.xml new file mode 100644 index 0000000000..d3dc165283 --- /dev/null +++ b/playground/javatest/test/testng.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/playground/javatest/wscript b/playground/javatest/wscript new file mode 100644 index 0000000000..6a954cc37f --- /dev/null +++ b/playground/javatest/wscript @@ -0,0 +1,76 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2019 (fedepell) + +# +# Simple script to demonstrate integration of Java Unit testing inside +# standard waf_unit_test using either TestNG or JUnit +# + +def test_results(bld): + """ + Custom post- function that prints out test results. + """ + lst = getattr(bld, 'utest_results', []) + if not lst: + return + for (f, code, out, err) in lst: + print(out.decode('utf-8')) + print(err.decode('utf-8')) + + +def options(opt): + opt.load('java waf_unit_test javatest') + opt.load('compiler_c') + +def configure(conf): + conf.load('java javatest') + conf.load('compiler_c') + conf.check_jni_headers() + +def build(bld): + bld(features = 'javac', + name = 'mainprog', + srcdir = 'src/', # folder containing the sources to compile + outdir = 'src', # folder where to output the classes (in the build directory) + sourcepath = ['src'], + basedir = 'src', # folder containing the classes and other files to package (must match outdir) + ) + + + bld(features = 'javac javatest', + srcdir = 'test/', # folder containing the sources to compile + outdir = 'test', # folder where to output the classes (in the build directory) + sourcepath = ['test'], + classpath = [ 'src' ], + basedir = 'test', # folder containing the classes and other files to package (must match outdir) + use = ['JAVATEST', 'mainprog'], + ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}', + jtest_source = bld.path.ant_glob('test/*.xml'), + # For JUnit do first JUnit configuration and no need to use jtest_source: + # ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} [TestClass]', + ) + + + # Demonstrate correct handling also of dependency to non-java tasks (see !2257) + bld(name='stjni', features='javac jar', srcdir='jni/java', outdir='jni/java', basedir='jni/java', destfile='stringUtils.jar') + + bld.shlib(source = 'jni/jni/source/StringUtils.c', + includes = 'jni/jni/include', + target = 'jni/stringUtils', + uselib = 'JAVA') + + bld(features = 'javac javatest', + srcdir = 'jni/test/', # folder containing the sources to compile + outdir = 'jni/test', # folder where to output the classes (in the build directory) + sourcepath = ['jni/test'], + classpath = [ 'jni/src' ], + basedir = 'jni/test', # folder containing the classes and other files to package (must match outdir) + use = ['JAVATEST', 'stjni', 'jni/stringUtils'], + ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}', + jtest_source = bld.path.ant_glob('jni/test/*.xml'), + ) + + + bld.add_post_fun(test_results) + diff --git a/playground/json/test.json b/playground/json/test.json new file mode 100644 index 0000000000..778faebe51 --- /dev/null +++ b/playground/json/test.json @@ -0,0 +1,9 @@ +{ + "array": [1, 2, "abc", 4.8, null], + "dict": { + "integer": 1, + "float": 4.8, + "string": "Hello, world!" + }, + "boolean": true +} diff --git a/playground/json/wscript b/playground/json/wscript new file mode 100644 index 0000000000..48f819b478 --- /dev/null +++ b/playground/json/wscript @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Matt Clarkson, 2015 (ita) + +VERSION='0.0.1' +APPNAME='json_test' + +top = '.' + +import sys +import waflib.Configure +waflib.Configure.autoconfig = True + +def options(opt): + opt.add_option( + '--pretty', + action = 'store_true', + help = 'pretty prints the writing of the JSON') + +def configure(conf): + pass + +def build(bld): + node = bld.srcnode.make_node('test.json') + json = node.read_json() + print('Read', json) + json['new_key'] = { + 'number': 199 + } + output = bld.bldnode.make_node('output.json') + output.write_json(json, pretty=bld.options.pretty) + print('Wrote', output.read()) diff --git a/playground/libtest/wscript b/playground/libtest/wscript new file mode 100644 index 0000000000..13e0032c6f --- /dev/null +++ b/playground/libtest/wscript @@ -0,0 +1,53 @@ +#! /usr/bin/env python + +def options(opt): + opt.load('compiler_c') + +def configure(conf): + conf.load('compiler_c') + conf.find_program('ldd') + conf.where_is_shlib(lib='m') + +# --- support code below --- + +import re +from waflib import Task, TaskGen, Configure + +class ldd_run(Task.Task): + color = 'PINK' + run_str = '${LDD} ${SRC} > ${TGT}' + + def post_run(self): + ret = Task.Task.post_run(self) + libname = self.generator.lib + re_libpath = re.compile('lib%s.*\s+=>\s+(\S+%s\S+)\s+' % (libname, libname), re.M) + m = re_libpath.search(self.outputs[0].read()) + if m: + self.generator.tmp.append(m.group(1)) + else: + return ret or 1 + return ret + +@TaskGen.feature('ldd_check') +@TaskGen.after_method('apply_link') +def do_ldd_check(self): + self.create_task('ldd_run', self.link_task.outputs[0], self.path.find_or_declare('ldd.out')) + +@Configure.conf +def where_is_shlib(self, lib): + tmp = [] + def check_msg(self): + return tmp[0] + + self.check( + fragment = 'int main() { return 0; }\n', + features = 'c cprogram ldd_check', + lib = lib, + linkflags = '-Wl,--no-as-needed', + msg = 'Where is library %r' % lib, + define = 'LIBFROM', + tmp = tmp, + okmsg = check_msg) + + return tmp[0] + diff --git a/playground/makedeps/wscript b/playground/makedeps/wscript index 04ed2d6729..78b9d0f88e 100644 --- a/playground/makedeps/wscript +++ b/playground/makedeps/wscript @@ -22,7 +22,6 @@ def build(bld): def xxx(**kw): # this is just an alias, but aliases are convenient, use them! - kw['update_outputs'] = True if not 'rule' in kw: kw['rule'] = 'cp ${SRC} ${TGT}' return bld(**kw) diff --git a/playground/maxjobs2/compute/wscript b/playground/maxjobs2/compute/wscript index b95da2a3bf..381ef8fd27 100644 --- a/playground/maxjobs2/compute/wscript +++ b/playground/maxjobs2/compute/wscript @@ -35,7 +35,7 @@ def lock_maxjob(self): try: self.lockfd = os.open(self.generator.bld.lockfile, os.O_TRUNC | os.O_CREAT | os.O_RDWR) fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB) - except (OSError, IOError), e: + except EnvironmentError as e: if e.errno in (errno.EACCES, errno.EAGAIN): time.sleep(0.3) continue @@ -95,9 +95,9 @@ def process(self): except Exception, e: print type(e), e -Task.TaskBase.process_bound_maxjobs = Task.TaskBase.process +Task.Task.process_bound_maxjobs = Task.Task.process Task.Task.process = process -Task.TaskBase.lock_maxjob = lock_maxjob -Task.TaskBase.release_maxjob = release_maxjob -Task.TaskBase.wait_maxjob = wait_maxjob +Task.Task.lock_maxjob = lock_maxjob +Task.Task.release_maxjob = release_maxjob +Task.Task.wait_maxjob = wait_maxjob diff --git a/playground/msvs/a.cpp b/playground/msvs/a.cpp new file mode 100644 index 0000000000..642c45f744 --- /dev/null +++ b/playground/msvs/a.cpp @@ -0,0 +1,13 @@ + +#ifdef _MSC_VER +# define testshlib_EXPORT __declspec(dllexport) +#else +# define testshlib_EXPORT +#endif + +extern testshlib_EXPORT void foo(); + +static const int truc=5; + +void foo() { } + diff --git a/playground/msvs/b.cpp b/playground/msvs/b.cpp new file mode 100644 index 0000000000..1e09a10f76 --- /dev/null +++ b/playground/msvs/b.cpp @@ -0,0 +1 @@ +int u = 64; diff --git a/playground/msvs/main.cpp b/playground/msvs/main.cpp new file mode 100644 index 0000000000..dacbb25b8b --- /dev/null +++ b/playground/msvs/main.cpp @@ -0,0 +1,8 @@ +#include + +extern void foo(); + +int main() { + foo(); + return 0; +} diff --git a/playground/msvs/wscript b/playground/msvs/wscript new file mode 100644 index 0000000000..b23c6c25fc --- /dev/null +++ b/playground/msvs/wscript @@ -0,0 +1,21 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016 (ita) + +VERSION='0.0.1' +APPNAME='msvs_test' + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_cxx msvs') + +def configure(conf): + conf.load('compiler_cxx') + +def build(bld): + bld.shlib(source='a.cpp', target='mylib', vnum='9.8.7') + bld.program(source='main.cpp', target='app', use='mylib') + bld.stlib(source='b.cpp', target='stlib') + diff --git a/playground/netcache/Netcache.java b/playground/netcache/Netcache.java index a8b5adc1f4..363e47ea13 100644 --- a/playground/netcache/Netcache.java +++ b/playground/netcache/Netcache.java @@ -1,5 +1,6 @@ // Thomas Nagy, 2011 +// TODO security // TODO handle all exceptions properly import java.util.HashMap; diff --git a/docs/book/examples/cprog_objects/main.c b/playground/netcache/main.c similarity index 96% rename from docs/book/examples/cprog_objects/main.c rename to playground/netcache/main.c index cb3f7482fa..1846bfb186 100644 --- a/docs/book/examples/cprog_objects/main.c +++ b/playground/netcache/main.c @@ -1,3 +1,4 @@ int main() { return 0; } + diff --git a/docs/book/examples/cprog_objects/test.c b/playground/netcache/test_staticlib.c similarity index 53% rename from docs/book/examples/cprog_objects/test.c rename to playground/netcache/test_staticlib.c index a1b614d540..8d6212b028 100644 --- a/docs/book/examples/cprog_objects/test.c +++ b/playground/netcache/test_staticlib.c @@ -1,3 +1,3 @@ int foo() { - return 0; + return 45; } diff --git a/playground/netcache/wscript b/playground/netcache/wscript index 88b298455a..e0b215a3a7 100644 --- a/playground/netcache/wscript +++ b/playground/netcache/wscript @@ -1,24 +1,40 @@ #! /usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2012 (ita) - -# A test script for the network cache - -APPNAME='cc_test' +# Thomas Nagy, 2006-2016 (ita) +# +# The Waf network cache consists of a client +# (waflib/extras/netcache_client.py) and a simple +# tcp server that provides a way to share build +# files over a a network. +# +# There are no restrictions to the use of the cache +# at this point, so use with a firewall! +# +# +# The Java server can be run in the current folder using: +# rm -rf /tmp/wafcache/; javac Netcache.java && java Netcache +# +# Then run the example and compare the outputs: +# waf configure clean build --zones=netcache +# waf configure clean build --zones=netcache +# + +APPNAME='netcache_test' top = '.' out = 'build' def options(opt): - #opt.tool_options('compiler_c') + #opt.load('compiler_c') pass def configure(conf): - #conf.check_tool('compiler_c') - conf.check_tool('gcc') - conf.check_tool('netcache_client') + #conf.load('compiler_c') + conf.load('gcc') + #conf.load('netcache_client') def build(bld): + bld.load('netcache_client') bld( features = 'c cprogram', source = 'main.c', diff --git a/playground/parallel_cmd/wscript b/playground/parallel_cmd/wscript index 99307b59b7..b540eb9225 100644 --- a/playground/parallel_cmd/wscript +++ b/playground/parallel_cmd/wscript @@ -65,7 +65,7 @@ def build_all_at_once(ctx): f(self) sem.release() return f2 - Task.TaskBase.process = with_sem(Task.TaskBase.process) + Task.Task.process = with_sem(Task.Task.process) threads = [] for var in ctx.all_envs: diff --git a/playground/pch/wscript b/playground/pch/wscript index 0331618d53..fb8e69fa90 100644 --- a/playground/pch/wscript +++ b/playground/pch/wscript @@ -53,7 +53,7 @@ def build(bld): source = 'a.cpp', use = 'BOOST') - bld.program( + bld(features = 'cxx cxxprogram', target = 'test1', source = 'b.cpp c.cpp main.cpp', use = 'base-with-pch') diff --git a/playground/prefork/Prefork.java b/playground/prefork/Prefork.java deleted file mode 100644 index 235f32955f..0000000000 --- a/playground/prefork/Prefork.java +++ /dev/null @@ -1,232 +0,0 @@ -// Thomas Nagy, 2015 - -import java.util.HashMap; -import java.util.Map; -import java.util.Arrays; -import java.util.List; -import java.util.Date; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.Collections; -import java.util.Scanner; -import java.lang.Math; -import java.lang.StringBuilder; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.File; -import java.io.StringWriter; -import java.io.PrintWriter; - -import java.lang.Math; -import com.eclipsesource.json.JsonObject; -import com.eclipsesource.json.JsonArray; - -import java.lang.ProcessBuilder; -import java.lang.ProcessBuilder.Redirect; -import java.lang.Process; - -import java.io.FileOutputStream; -import java.net.ServerSocket; -import java.net.Socket; - -public class Prefork implements Runnable, Comparator { - private static int HEADER_SIZE = 64; - private static int BUF = 2048; - static String SHARED_KEY = ""; - - private Socket sock = null; - private int port = 0; - - public Prefork(Socket sock, int port) { - this.sock = sock; - this.port = port; - } - - public boolean safeCompare(String a, String b) { - int sum = Math.abs(a.length() - b.length()); - for (int i = 0; i < b.length(); ++i) { - sum |= a.charAt(i) ^ b.charAt(i); - } - return sum == 0; - } - - public void run () - { - try { - if (sock != null) - { - while (true) { - InputStream in = sock.getInputStream(); - OutputStream out = sock.getOutputStream(); - - byte b[] = new byte[HEADER_SIZE]; - int off = 0; - while (off < b.length) { - int c = in.read(b, off, b.length - off); - if (c <= 0) { - throw new RuntimeException("Connection closed too early"); - } - off += c; - } - - String line = new String(b); - String key = line.substring(line.length() - 20); - if (key.length() != 20) { - System.err.println("Fatal error in the application"); - } - if (!safeCompare(key, SHARED_KEY)) - { - System.err.println("Invalid key given " + key.length() + " " + SHARED_KEY.length() + " " + key + " " + SHARED_KEY); - sock.close(); - } - - //System.out.println(new String(b)); - String[] args = line.substring(0, line.length() - 20).split(","); - if (args[0].equals("REQ")) { - process(args, sock); - } - else - { - System.out.println("Invalid command " + new String(b) + " on port " + this.port); - sock.close(); - break; - } - } - } else { - // magic trick to avoid creating a new inner class - ServerSocket server = new ServerSocket(port); - server.setReuseAddress(true); - while (true) { - Socket conn = server.accept(); - conn.setTcpNoDelay(true); - Prefork tmp = new Prefork(conn, port); - Thread t = new Thread(tmp); - t.start(); - } - } - } catch (InterruptedException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - public String make_out(Socket sock, String stdout, String stderr, String exc) { - if ((stdout == null || stdout.length() == 0) && (stderr == null || stderr.length() == 0) && (exc == null || exc.length() == 0)) - { - return null; - } - - JsonArray ret = new JsonArray(); - ret.add(stdout); - ret.add(stderr); - ret.add(exc); - return ret.toString(); - } - - public String readFile(File f) throws IOException { - String ret = new Scanner(f).useDelimiter("\\A").next(); - return ret; - } - - public void process(String[] args, Socket sock) throws IOException, InterruptedException { - long size = new Long(args[1].trim()); - //System.out.println("" + args[1] + " " + args[2] + " " + args[3] + " " + args.length); - - byte[] buf = new byte[BUF]; - StringBuilder sb = new StringBuilder(); - InputStream in = sock.getInputStream(); - long cnt = 0; - while (cnt < size) { - int c = in.read(buf, 0, (int) Math.min(BUF, size-cnt)); - if (c <= 0) { - throw new RuntimeException("Connection closed too early"); - } - sb.append(new String(buf, 0, c)); - cnt += c; - } - - String stdout = null; - String stderr = null; - String exc = null; - - JsonObject kw = JsonObject.readFrom(sb.toString()); - boolean isShell = kw.get("shell").asBoolean(); - - String[] command = null; - - if (isShell) { - command = new String[] {"sh", "-c", kw.get("cmd").asString()}; - } - else - { - JsonArray arr = kw.get("cmd").asArray(); - int siz = arr.size(); - command = new String[siz]; - for (int i =0; i < siz; ++i) { - command[i] = arr.get(i).asString(); - } - } - - ProcessBuilder pb = new ProcessBuilder(command); - String cwd = kw.get("cwd").asString(); - if (cwd != null) { - pb.directory(new File(cwd)); - } - - long threadId = Thread.currentThread().getId(); - String errFile = "log_err_" + threadId; - File elog = new File(errFile); - pb.redirectError(Redirect.to(elog)); - - String outFile = "log_out_" + threadId; - File olog = new File(outFile); - pb.redirectOutput(Redirect.to(olog)); - - int status = -8; - try { - Process p = pb.start(); - status = p.waitFor(); - } catch (IOException e) { - StringWriter sw = new StringWriter(); - e.printStackTrace(new PrintWriter(sw)); - exc = sw.toString(); - } - - if (olog.length() != 0) { - stdout = readFile(olog); - } - olog.delete(); - - if (elog.length() != 0) { - stderr = readFile(elog); - } - elog.delete(); - - OutputStream out = sock.getOutputStream(); - String msg = make_out(sock, stdout, stderr, exc); - - // RES, status, ret size - int len = msg != null ? msg.length() : 0; - - String ret = String.format("%-64s", String.format("RES,%d,%d", status, len)); - out.write(ret.getBytes()); - if (len > 0) - { - out.write(msg.getBytes()); - } - } - - public int compare(Object[] a, Object[] b) { - return ((Long) a[0]).compareTo((Long) b[0]); - } - - public static void main(String[] args) { - Map env = System.getenv(); - SHARED_KEY = env.get("SHARED_KEY"); - Prefork tmp = new Prefork(null, new Integer(args[0])); - tmp.run(); - } -} - diff --git a/playground/prefork/README b/playground/prefork/README deleted file mode 100644 index d07baa4c18..0000000000 --- a/playground/prefork/README +++ /dev/null @@ -1,7 +0,0 @@ -TODO - work in progress - -Get a copy of https://github.com/ralfstx/minimal-json, currently minimal-json-0.9.3-SNAPSHOT.jar - -Build the java file with: -javac -target 1.7 -source 1.7 -cp ./minimal-json-0.9.3-SNAPSHOT.jar:. Prefork.java - diff --git a/playground/protoc/inc/message.proto b/playground/protoc/inc/message.proto index e526472a4e..4dbfd1a247 100644 --- a/playground/protoc/inc/message.proto +++ b/playground/protoc/inc/message.proto @@ -1,7 +1,9 @@ package udp.tc.tests; -option java_package = "com.udp.tc.tests"; -option java_outer_classname = "MessageProtos"; +import "message_inc.proto"; + +option java_package ="com.udp.tc.tests"; +option java_outer_classname= "MessageProtos"; option cc_generic_services = false; option java_generic_services = false; option py_generic_services = false; @@ -9,4 +11,5 @@ option py_generic_services = false; message Message { required int32 test = 1; optional uint32 blah = 2; + required IncludeMe custom = 3; } diff --git a/playground/protoc/inc/message_inc.proto b/playground/protoc/inc/message_inc.proto new file mode 100644 index 0000000000..5da32dfdc8 --- /dev/null +++ b/playground/protoc/inc/message_inc.proto @@ -0,0 +1,14 @@ +package udp.tc.tests; + +option java_package = "com.udp.tc.tests"; +option java_outer_classname = "MessageInc"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + + +message IncludeMe { + required int32 test = 1; + optional uint32 blah = 2; + optional uint32 justinc = 3; +} diff --git a/playground/protoc/inc/msgCaseTest.proto b/playground/protoc/inc/msgCaseTest.proto new file mode 100644 index 0000000000..4d9b637440 --- /dev/null +++ b/playground/protoc/inc/msgCaseTest.proto @@ -0,0 +1,14 @@ +package udp.tc.tests; + +import "message_inc.proto"; + +option java_package ="com.udp.tc.tests"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message MessageCaseTest { + required int32 test = 1; + optional uint32 blah = 2; + required IncludeMe custom = 3; +} diff --git a/playground/protoc/incboth/messageboth.proto b/playground/protoc/incboth/messageboth.proto new file mode 100644 index 0000000000..2567c7b8f8 --- /dev/null +++ b/playground/protoc/incboth/messageboth.proto @@ -0,0 +1,14 @@ +package udp.tc.tests; + +import "messageboth_inc.proto"; + +option java_package = "com.udp.tc.tests"; +option java_outer_classname = "MessageBothProtos"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message MessageBoth { + required int32 test = 1; + optional uint32 blah = 2; +} diff --git a/playground/protoc/incboth/messageboth_inc.proto b/playground/protoc/incboth/messageboth_inc.proto new file mode 100644 index 0000000000..7adc19365b --- /dev/null +++ b/playground/protoc/incboth/messageboth_inc.proto @@ -0,0 +1,4 @@ +message IncludeMeBoth { + required int32 test = 1; + optional uint32 blah = 2; +} diff --git a/playground/protoc/incdeep/a/b/test.proto b/playground/protoc/incdeep/a/b/test.proto new file mode 100644 index 0000000000..d6ada83969 --- /dev/null +++ b/playground/protoc/incdeep/a/b/test.proto @@ -0,0 +1,4 @@ +message DeepTest { + required string deep = 1; +} + diff --git a/playground/protoc/increcurse/increc/message.proto b/playground/protoc/increcurse/increc/message.proto new file mode 100644 index 0000000000..f65198dcef --- /dev/null +++ b/playground/protoc/increcurse/increc/message.proto @@ -0,0 +1,17 @@ +package udp.tc.tests; + +import "message_inc.proto"; +import "message_inc_tl.proto"; + +option java_package ="com.udp.tc.tests"; +option java_outer_classname= "MessageProtos"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message Message { + required int32 test = 1; + optional uint32 blah = 2; + required IncludeMe custom = 3; + required IncludeMeFromTop customfromtop = 4; +} diff --git a/playground/protoc/increcurse/increc/message_inc.proto b/playground/protoc/increcurse/increc/message_inc.proto new file mode 100644 index 0000000000..5da32dfdc8 --- /dev/null +++ b/playground/protoc/increcurse/increc/message_inc.proto @@ -0,0 +1,14 @@ +package udp.tc.tests; + +option java_package = "com.udp.tc.tests"; +option java_outer_classname = "MessageInc"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + + +message IncludeMe { + required int32 test = 1; + optional uint32 blah = 2; + optional uint32 justinc = 3; +} diff --git a/playground/protoc/increcurse/wscript b/playground/protoc/increcurse/wscript new file mode 100644 index 0000000000..d94d5c65b6 --- /dev/null +++ b/playground/protoc/increcurse/wscript @@ -0,0 +1,11 @@ +#! /usr/bin/env python + + +def build(bld): + bld( + features = 'py', + name = 'pbpyrec', + source = ['increc/message.proto'], + protoc_includes = ['increc', 'othermod/deep/inc'], + protoc_extincludes = ['/usr/include/pblib', '/usr/share/protos'] + ) diff --git a/playground/protoc/incseparate/depinotherdir.proto b/playground/protoc/incseparate/depinotherdir.proto new file mode 100644 index 0000000000..4dbfd1a247 --- /dev/null +++ b/playground/protoc/incseparate/depinotherdir.proto @@ -0,0 +1,15 @@ +package udp.tc.tests; + +import "message_inc.proto"; + +option java_package ="com.udp.tc.tests"; +option java_outer_classname= "MessageProtos"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message Message { + required int32 test = 1; + optional uint32 blah = 2; + required IncludeMe custom = 3; +} diff --git a/playground/protoc/othermod/deep/inc/message_inc_tl.proto b/playground/protoc/othermod/deep/inc/message_inc_tl.proto new file mode 100644 index 0000000000..6a17dac28c --- /dev/null +++ b/playground/protoc/othermod/deep/inc/message_inc_tl.proto @@ -0,0 +1,11 @@ +package udp.tc.tests; + +option java_package = "com.udp.tc.tests"; +option java_outer_classname = "MessageInc"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + +message IncludeMeFromTop { + required int32 testext = 1; +} diff --git a/playground/protoc/wscript b/playground/protoc/wscript index a2135718dc..45d4df6189 100644 --- a/playground/protoc/wscript +++ b/playground/protoc/wscript @@ -1,18 +1,64 @@ #! /usr/bin/env python +import os +from waflib import Logs + top = '.' out = 'build' def options(opt): - opt.load('compiler_cxx') + opt.load('compiler_cxx python java') def configure(conf): - conf.load('compiler_cxx protoc') + conf.load('compiler_cxx python java protoc') + conf.check_python_version(minver=(2, 5, 0)) + # Here you have to point to your protobuf-java JAR + if os.path.isfile('/tmp/cp/protobuf-java-2.5.0.jar'): + conf.env.CLASSPATH_PROTOBUF = ['/tmp/cp/protobuf-java-2.5.0.jar'] + else: + Logs.warn('Edit the wscript file and set CLASSPATH_PROTOBUF for java') def build(bld): bld( features = 'cxx cxxshlib', - source = ['inc/message.proto'], + source = ['inc/message_inc.proto','inc/message.proto'], + name = 'somelib', target = 'somelib', - includes = ['.', 'inc']) + includes = ['inc'], + export_includes = ['inc']) + + bld( + features = 'cxx cxxshlib', + source = ['incdeep/a/b/test.proto'], + target = 'somedeeplib', + includes = ['incdeep']) + + bld( + features = 'cxx cxxshlib', + source = ['incseparate/depinotherdir.proto'], + target = 'crossdirlib', + includes = ['incseparate'], + use = ['somelib']) + + bld( + features = 'py', + name = 'pbpy', + source = ['inc/message_inc.proto','inc/message.proto'], + protoc_includes = ['inc']) + + bld( + features = 'cxx py', + name = 'pbboth', + source = ['incboth/messageboth_inc.proto', 'incboth/messageboth.proto'], + protoc_includes = ['incboth']) # either protoc_includes or includes would work in this case + + if bld.env.CLASSPATH_PROTOBUF: + bld( + features = 'javac protoc', + name = 'pbjava', + srcdir = 'inc/', + source = ['inc/message_inc.proto', 'inc/message.proto', 'inc/msgCaseTest.proto' ], + use = 'PROTOBUF', + protoc_includes = ['inc']) + bld.recurse('increcurse') diff --git a/playground/protoc_gen_java/proto.source b/playground/protoc_gen_java/proto.source new file mode 100644 index 0000000000..5da32dfdc8 --- /dev/null +++ b/playground/protoc_gen_java/proto.source @@ -0,0 +1,14 @@ +package udp.tc.tests; + +option java_package = "com.udp.tc.tests"; +option java_outer_classname = "MessageInc"; +option cc_generic_services = false; +option java_generic_services = false; +option py_generic_services = false; + + +message IncludeMe { + required int32 test = 1; + optional uint32 blah = 2; + optional uint32 justinc = 3; +} diff --git a/playground/protoc_gen_java/wscript b/playground/protoc_gen_java/wscript new file mode 100644 index 0000000000..5b4dc274b7 --- /dev/null +++ b/playground/protoc_gen_java/wscript @@ -0,0 +1,51 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2019 (fedepell) + +import os +from waflib import Logs + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_cxx java') + +def configure(conf): + conf.load('compiler_cxx java protoc') + # Here you have to point to your protobuf-java JAR + conf.env.CLASSPATH_PROTOBUF = ['/usr/share/maven-repo/com/google/protobuf/protobuf-java/3.0.0/protobuf-java-3.0.0.jar'] + +def build(bld): + + # this simulates a .proto generator. the gen.proto is generated in build + genp = bld( + rule = "cp ${SRC} ${TGT}", + source = "proto.source", + target = "inc/gen.proto" + ) + + # cxx doesn't have a problem with this, just knows gen.proto will pop up later + bld( + features = 'cxx cxxshlib', + source = [ bld.path.find_or_declare(genp.target) ], + name = 'somelib', + target = 'somelib' + ) + + # but for java: + + # we either put grouping because of protoc java generations needs .proto to generate out fname (#2218) + # or accept that java dep is not strict on the .java file name (but relies just on explicit task ordering) + # bld.add_group() + + # inc/gen.proto is an implicit dependency, but the file is generated at + # build time while protoc extra uses it before to determine the .java file + # name that will get generated + bld( + features = 'javac protoc', + name = 'pbjava', + srcdir = bld.path.find_or_declare(genp.target).parent, + source = [ bld.path.find_or_declare(genp.target) ], + use = 'PROTOBUF', + ) diff --git a/playground/pyqt5/res/test.txt b/playground/pyqt5/res/test.txt new file mode 100644 index 0000000000..9b227d91c6 --- /dev/null +++ b/playground/pyqt5/res/test.txt @@ -0,0 +1,2 @@ +change me to see qrc dependencies! + diff --git a/playground/pyqt5/sampleRes.qrc b/playground/pyqt5/sampleRes.qrc new file mode 100644 index 0000000000..687c51007a --- /dev/null +++ b/playground/pyqt5/sampleRes.qrc @@ -0,0 +1,5 @@ + + + res/test.txt + + diff --git a/playground/pyqt5/src/firstgui.ui b/playground/pyqt5/src/firstgui.ui new file mode 100644 index 0000000000..cb7f9d30b2 --- /dev/null +++ b/playground/pyqt5/src/firstgui.ui @@ -0,0 +1,130 @@ + + + myfirstgui + + + + 0 + 0 + 411 + 247 + + + + My First Gui! + + + + + 20 + 210 + 381 + 32 + + + + Qt::Horizontal + + + QDialogButtonBox::Close + + + + + + 10 + 10 + 101 + 21 + + + + + + + 120 + 10 + 281 + 192 + + + + + + + 10 + 180 + 101 + 23 + + + + clear + + + + + + 10 + 40 + 101 + 23 + + + + add + + + + + + + buttonBox + accepted() + myfirstgui + accept() + + + 258 + 274 + + + 157 + 274 + + + + + buttonBox + rejected() + myfirstgui + reject() + + + 316 + 260 + + + 286 + 274 + + + + + clearBtn + clicked() + listWidget + clear() + + + 177 + 253 + + + 177 + 174 + + + + + diff --git a/playground/pyqt5/src/sample.py b/playground/pyqt5/src/sample.py new file mode 100644 index 0000000000..ebd3bc4be4 --- /dev/null +++ b/playground/pyqt5/src/sample.py @@ -0,0 +1,28 @@ +import sys +# If pyqt5 bindings are used uncomment the following line: +from PyQt5 import QtCore, QtGui, QtWidgets +# If pyside2 bindings are used uncomment the following line: +#from PySide2 import QtCore, QtGui, QtWidgets + +from firstgui import Ui_myfirstgui + +class MyFirstGuiProgram(Ui_myfirstgui): + def __init__(self, dialog): + Ui_myfirstgui.__init__(self) + self.setupUi(dialog) + + # Connect "add" button with a custom function (addInputTextToListbox) + self.addBtn.clicked.connect(self.addInputTextToListbox) + + def addInputTextToListbox(self): + txt = self.myTextInput.text() + self.listWidget.addItem(txt) + +if __name__ == '__main__': + app = QtWidgets.QApplication(sys.argv) + dialog = QtWidgets.QDialog() + + prog = MyFirstGuiProgram(dialog) + + dialog.show() + sys.exit(app.exec_()) diff --git a/playground/pyqt5/wscript b/playground/pyqt5/wscript new file mode 100644 index 0000000000..87e0269496 --- /dev/null +++ b/playground/pyqt5/wscript @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8# +# Federico Pellegrin, 2016 (fedepell) + +""" +Python QT5 helper tools example: +converts QT5 Designer tools files (UI and QRC) into python files with +the appropriate tools (pyqt5 and pyside2 searched) and manages their +python compilation and installation using standard python waf Tool + +""" +def options(opt): + # Load also python to demonstrate mixed calls + opt.load('python pyqt5') + +def configure(conf): + # Recent UIC/RCC versions require explicit python generator selection + conf.env.QT_PYUIC_FLAGS = ['-g', 'python'] + conf.env.QT_PYRCC_FLAGS = ['-g', 'python'] + # Load also python to demonstrate mixed calls + conf.load('python pyqt5') + conf.check_python_version((2,7,4)) + +def build(bld): + # Demonstrates mixed usage of py and pyqt5 module, and tests also install_path and install_from + # (since generated files go into build it has to be reset inside the pyqt5 tool) + bld(features="py pyqt5", source="src/sample.py src/firstgui.ui", install_path="${PREFIX}/play/", install_from="src/") + + # Simple usage on a resource file. If a file referenced inside the resource changes it will be rebuilt + # as the qrc XML is parsed and dependencies are calculated + bld(features="pyqt5", source="sampleRes.qrc") + diff --git a/playground/pytest/src/bar/__init__.py b/playground/pytest/src/bar/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/pytest/src/bar/bar.py b/playground/pytest/src/bar/bar.py new file mode 100644 index 0000000000..4fb91625a7 --- /dev/null +++ b/playground/pytest/src/bar/bar.py @@ -0,0 +1,11 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2017 (xbreak) + +import os + +def read_resource(): + filename = os.path.join(os.path.dirname(__file__), 'resource.txt') + with open(filename, 'r') as f: + return f.readline() + diff --git a/playground/pytest/src/bar/resource.txt b/playground/pytest/src/bar/resource.txt new file mode 100644 index 0000000000..b46ba6ead9 --- /dev/null +++ b/playground/pytest/src/bar/resource.txt @@ -0,0 +1 @@ +resource! diff --git a/playground/pytest/src/baz/baz_ext.c b/playground/pytest/src/baz/baz_ext.c new file mode 100644 index 0000000000..2a88d19a6e --- /dev/null +++ b/playground/pytest/src/baz/baz_ext.c @@ -0,0 +1,25 @@ +#include + +static PyObject *ping() { + return PyUnicode_FromString("pong"); +} + +static PyMethodDef methods[] = { + {"ping", ping, METH_VARARGS, "Ping function"}, + {NULL, NULL, 0, NULL} +}; + +static struct PyModuleDef module = { + PyModuleDef_HEAD_INIT, + "ping", + NULL, /* no docs */ + -1, + methods +}; + +PyMODINIT_FUNC +PyInit_baz_ext(void) +{ + return PyModule_Create(&module); +} + diff --git a/playground/pytest/src/foo/__init__.py b/playground/pytest/src/foo/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/pytest/src/foo/foo.py b/playground/pytest/src/foo/foo.py new file mode 100644 index 0000000000..387397c86e --- /dev/null +++ b/playground/pytest/src/foo/foo.py @@ -0,0 +1,15 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016 (xbreak) + +import os + +# Import the foo extension shared object +from foo import foo_ext + +def sum(a, b): + return a + b + +def ping(): + return foo_ext.ping() + diff --git a/playground/pytest/src/foo_ext.c b/playground/pytest/src/foo_ext.c new file mode 100644 index 0000000000..e5e7a62b37 --- /dev/null +++ b/playground/pytest/src/foo_ext.c @@ -0,0 +1,25 @@ +#include + +static PyObject *ping() { + return PyUnicode_FromString("pong"); +} + +static PyMethodDef methods[] = { + {"ping", ping, METH_VARARGS, "Ping function"}, + {NULL, NULL, 0, NULL} +}; + +static struct PyModuleDef module = { + PyModuleDef_HEAD_INIT, + "ping", + NULL, /* no docs */ + -1, + methods +}; + +PyMODINIT_FUNC +PyInit_foo_ext(void) +{ + return PyModule_Create(&module); +} + diff --git a/playground/pytest/test/test_bar.py b/playground/pytest/test/test_bar.py new file mode 100644 index 0000000000..71a2807186 --- /dev/null +++ b/playground/pytest/test/test_bar.py @@ -0,0 +1,10 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016 (xbreak) + +import unittest +from bar import bar + +class test_bar(unittest.TestCase): + def test_read_resource(self): + self.assertEqual('resource!\n', bar.read_resource()) diff --git a/playground/pytest/test/test_baz.py b/playground/pytest/test/test_baz.py new file mode 100644 index 0000000000..4f9a79a7a6 --- /dev/null +++ b/playground/pytest/test/test_baz.py @@ -0,0 +1,10 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2017 (xbreak) + +import unittest + +class test_baz(unittest.TestCase): + + def test_import_baz_ext(self): + import baz_ext diff --git a/playground/pytest/test/test_foo.py b/playground/pytest/test/test_foo.py new file mode 100644 index 0000000000..1dfc5bc517 --- /dev/null +++ b/playground/pytest/test/test_foo.py @@ -0,0 +1,16 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016 (xbreak) + +import unittest +from foo import foo + +class test_foo(unittest.TestCase): + def test_add_integers(self): + self.assertEqual(7, foo.sum(4, 3)) + + def test_add_strings(self): + self.assertEqual('foobar', foo.sum('foo', 'bar')) + + def test_foo(self): + self.assertEqual('pong', foo.ping()) diff --git a/playground/pytest/wscript b/playground/pytest/wscript new file mode 100644 index 0000000000..f2dac3cf4f --- /dev/null +++ b/playground/pytest/wscript @@ -0,0 +1,94 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016 (xbreak) + +""" +Execute Python tests during build + +To force all tests, run with "waf build --alltests" +""" + +top = '.' +out = 'build' + +def test_results(bld): + """ + Custom post- function that prints out test results. + """ + lst = getattr(bld, 'utest_results', []) + if not lst: + return + for (f, code, out, err) in lst: + print(out.decode('utf-8')) + print(err.decode('utf-8')) + + +def options(opt): + opt.load('python compiler_c') + opt.load('waf_unit_test pytest') + +def configure(cnf): + cnf.load('python compiler_c waf_unit_test pytest buildcopy') + # The foo_ext module is using Python 3: + cnf.check_python_version(minver=(3, 0, 0)) + cnf.check_python_headers() + +def build(bld): + # foo_ext and baz_ext are Python C extensions that demonstrates unit test + # environment population of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH. + + # foo_ext is installed as part of the foo Python package and thus does not need + # to specify a PYTHONPATH via pytest_path. + bld(name = 'foo_ext', + features = 'c cshlib pyext', + source = 'src/foo_ext.c', + target = 'src/foo/foo_ext', + install_path = '${PYTHONDIR}/foo') + + # baz_ext is a stand-alone Python module so we need to specify pytest_path to where baz is built: + bld(name = 'baz_ext', + features = 'c cshlib pyext', + source = 'src/baz/baz_ext.c', + target = 'src/baz/baz_ext', + install_path = '${PYTHONDIR}', + pytest_path = [bld.path.find_dir('src/baz').get_bld()]) + + # Foo is a Python package that together with foo_ext is complete. + # Since the package is incomplete in the source directory and cannot be tested there + # we use the `buildcopy' feature to copy sources to build. + # + # If buildcopy_source is not specified, source will be used as input. + bld(name = 'foo', + features = 'py buildcopy', + use = 'foo_ext', + source = bld.path.ant_glob('src/foo/*.py'), + install_from = 'src') + + # The bar module has a non-Python dependency to resource.txt which we want to copy, + # but in this case we cannot add resource.txt to the sources because there's no feature + # for it. Therefore, we use the attribute buildcopy_source instead. + bld(name = 'bar', + features = 'py buildcopy', + source = bld.path.ant_glob('src/bar/*.py'), + buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'], + install_from = 'src') + + # Unit test example using the built in module unittest and let that discover + # any test cases. + # By using ``foo bar baz_ext`` the relevant variables for those taskgens + # will be added to sys.path via ``PYTHONPATH`` as well as any library paths from + # dependent libraries to the system library path e.g. ``LD_LIBRARY_PATH``. + # + # The dependency chain looks like the following: + # + # foo_test -> foo -> foo_ext -> libpython (external) + # -> bar -> (resource.txt) + # -> baz_ext -> libpython (external) + # + bld(name = 'py_test', + features = 'pytest', + use = 'foo bar baz_ext', + pytest_source = bld.path.ant_glob('test/*.py'), + ut_str = '${PYTHON} -B -m unittest discover') + + bld.add_post_fun(test_results) diff --git a/playground/qt4/but.ui b/playground/qt4/but.ui new file mode 100644 index 0000000000..40c8c4f2e1 --- /dev/null +++ b/playground/qt4/but.ui @@ -0,0 +1,32 @@ + + + Form + + + + 0 + 0 + 208 + 113 + + + + Form + + + + + 40 + 30 + 83 + 26 + + + + Hello, world! + + + + + + diff --git a/playground/qt4/data/some.txt b/playground/qt4/data/some.txt new file mode 100644 index 0000000000..3c58ea3478 --- /dev/null +++ b/playground/qt4/data/some.txt @@ -0,0 +1 @@ +tada tadam tadadam diff --git a/demos/qt4/foo.cpp b/playground/qt4/foo.cpp similarity index 100% rename from demos/qt4/foo.cpp rename to playground/qt4/foo.cpp diff --git a/demos/qt4/foo.h b/playground/qt4/foo.h similarity index 100% rename from demos/qt4/foo.h rename to playground/qt4/foo.h diff --git a/playground/qt4/linguist/fr.ts b/playground/qt4/linguist/fr.ts new file mode 100644 index 0000000000..415c174ac5 --- /dev/null +++ b/playground/qt4/linguist/fr.ts @@ -0,0 +1,4 @@ + + + + diff --git a/demos/qt4/main.cpp b/playground/qt4/main.cpp similarity index 100% rename from demos/qt4/main.cpp rename to playground/qt4/main.cpp diff --git a/demos/qt4/textures.qrc b/playground/qt4/textures.qrc similarity index 100% rename from demos/qt4/textures.qrc rename to playground/qt4/textures.qrc diff --git a/demos/qt4/wscript b/playground/qt4/wscript similarity index 100% rename from demos/qt4/wscript rename to playground/qt4/wscript diff --git a/playground/qt5-and-pyqt5/pyqt5/res/test.txt b/playground/qt5-and-pyqt5/pyqt5/res/test.txt new file mode 100644 index 0000000000..9b227d91c6 --- /dev/null +++ b/playground/qt5-and-pyqt5/pyqt5/res/test.txt @@ -0,0 +1,2 @@ +change me to see qrc dependencies! + diff --git a/playground/qt5-and-pyqt5/pyqt5/sampleRes.qrc b/playground/qt5-and-pyqt5/pyqt5/sampleRes.qrc new file mode 100644 index 0000000000..687c51007a --- /dev/null +++ b/playground/qt5-and-pyqt5/pyqt5/sampleRes.qrc @@ -0,0 +1,5 @@ + + + res/test.txt + + diff --git a/playground/qt5-and-pyqt5/pyqt5/src/firstgui.ui b/playground/qt5-and-pyqt5/pyqt5/src/firstgui.ui new file mode 100644 index 0000000000..cb7f9d30b2 --- /dev/null +++ b/playground/qt5-and-pyqt5/pyqt5/src/firstgui.ui @@ -0,0 +1,130 @@ + + + myfirstgui + + + + 0 + 0 + 411 + 247 + + + + My First Gui! + + + + + 20 + 210 + 381 + 32 + + + + Qt::Horizontal + + + QDialogButtonBox::Close + + + + + + 10 + 10 + 101 + 21 + + + + + + + 120 + 10 + 281 + 192 + + + + + + + 10 + 180 + 101 + 23 + + + + clear + + + + + + 10 + 40 + 101 + 23 + + + + add + + + + + + + buttonBox + accepted() + myfirstgui + accept() + + + 258 + 274 + + + 157 + 274 + + + + + buttonBox + rejected() + myfirstgui + reject() + + + 316 + 260 + + + 286 + 274 + + + + + clearBtn + clicked() + listWidget + clear() + + + 177 + 253 + + + 177 + 174 + + + + + diff --git a/playground/qt5-and-pyqt5/pyqt5/src/sample.py b/playground/qt5-and-pyqt5/pyqt5/src/sample.py new file mode 100644 index 0000000000..80335e7c47 --- /dev/null +++ b/playground/qt5-and-pyqt5/pyqt5/src/sample.py @@ -0,0 +1,24 @@ +import sys +from PyQt5 import QtCore, QtGui, QtWidgets +from firstgui import Ui_myfirstgui + +class MyFirstGuiProgram(Ui_myfirstgui): + def __init__(self, dialog): + Ui_myfirstgui.__init__(self) + self.setupUi(dialog) + + # Connect "add" button with a custom function (addInputTextToListbox) + self.addBtn.clicked.connect(self.addInputTextToListbox) + + def addInputTextToListbox(self): + txt = self.myTextInput.text() + self.listWidget.addItem(txt) + +if __name__ == '__main__': + app = QtWidgets.QApplication(sys.argv) + dialog = QtWidgets.QDialog() + + prog = MyFirstGuiProgram(dialog) + + dialog.show() + sys.exit(app.exec_()) diff --git a/playground/qt5-and-pyqt5/pyqt5/wscript b/playground/qt5-and-pyqt5/pyqt5/wscript new file mode 100644 index 0000000000..a5e242d934 --- /dev/null +++ b/playground/qt5-and-pyqt5/pyqt5/wscript @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8# +# Federico Pellegrin, 2016 (fedepell) + +""" +Python QT5 helper tools example: +converts QT5 Designer tools files (UI and QRC) into python files with +the appropriate tools (pyqt5 and pyside2 searched) and manages their +python compilation and installation using standard python waf Tool + +""" +def options(opt): + # Load also python to demonstrate mixed calls + opt.load('python pyqt5') + +def configure(conf): + # Load also python to demonstrate mixed calls + conf.load('python pyqt5') + conf.check_python_version((2,7,4)) + +def build(bld): + # Demonstrates mixed usage of py and pyqt5 module, and tests also install_path and install_from + # (since generated files go into build it has to be reset inside the pyqt5 tool) + bld(features="py pyqt5", source="src/sample.py src/firstgui.ui", install_path="${PREFIX}/play/", install_from="src/") + + # Simple usage on a resource file. If a file referenced inside the resource changes it will be rebuilt + # as the qrc XML is parsed and dependencies are calculated + bld(features="pyqt5", source="sampleRes.qrc") + diff --git a/playground/qt5-and-pyqt5/qt5/but.ui b/playground/qt5-and-pyqt5/qt5/but.ui new file mode 100644 index 0000000000..40c8c4f2e1 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/but.ui @@ -0,0 +1,32 @@ + + + Form + + + + 0 + 0 + 208 + 113 + + + + Form + + + + + 40 + 30 + 83 + 26 + + + + Hello, world! + + + + + + diff --git a/playground/qt5-and-pyqt5/qt5/data/some.txt b/playground/qt5-and-pyqt5/qt5/data/some.txt new file mode 100644 index 0000000000..3c58ea3478 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/data/some.txt @@ -0,0 +1 @@ +tada tadam tadadam diff --git a/playground/qt5-and-pyqt5/qt5/foo.cpp b/playground/qt5-and-pyqt5/qt5/foo.cpp new file mode 100644 index 0000000000..ab5620d66f --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/foo.cpp @@ -0,0 +1,21 @@ +// Thomas Nagy, 2011-2016 + +#include "foo.h" + +Foo::Foo() : QWidget(NULL) { + +} + +class Bar_private : public QWidget { + Q_OBJECT + signals: + void test(); + public: + Bar_private(); +}; + +Bar_private::Bar_private() : QWidget(NULL) { +} + +#include "foo.moc" + diff --git a/playground/qt5-and-pyqt5/qt5/foo.h b/playground/qt5-and-pyqt5/qt5/foo.h new file mode 100644 index 0000000000..e05b3e99b6 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/foo.h @@ -0,0 +1,16 @@ +// Thomas Nagy, 2011-2016 + +#ifndef _FOO +#define _FOO + +#include + +class Foo : public QWidget { + Q_OBJECT + signals: + void test(); + public: + Foo(); +}; + +#endif diff --git a/playground/qt5-and-pyqt5/qt5/linguist/fr.ts b/playground/qt5-and-pyqt5/qt5/linguist/fr.ts new file mode 100644 index 0000000000..415c174ac5 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/linguist/fr.ts @@ -0,0 +1,4 @@ + + + + diff --git a/playground/qt5-and-pyqt5/qt5/main.cpp b/playground/qt5-and-pyqt5/qt5/main.cpp new file mode 100644 index 0000000000..4f407c6cad --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/main.cpp @@ -0,0 +1,26 @@ +// Thomas Nagy, 2016 (ita) + +#include +//#include +//#include "mainwindow.h" +#include "ui_but.h" + +int main(int argc, char **argv) +{ + Q_INIT_RESOURCE(res); + QApplication app(argc, argv); + QWidget window; + Ui::Form ui; + ui.setupUi(&window); + window.show(); + return app.exec(); +/* + MainWindow window; + if (argc == 2) + window.openFile(argv[1]); + else + window.openFile(":/files/bubbles.svg"); + window.show(); + return app.exec(); +*/ +} diff --git a/playground/qt5-and-pyqt5/qt5/res.qrc b/playground/qt5-and-pyqt5/qt5/res.qrc new file mode 100644 index 0000000000..4cd17c4c15 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/res.qrc @@ -0,0 +1,7 @@ + + + + ../../../docs/slides/presentation/gfx/waflogo.svg + + + diff --git a/playground/qt5-and-pyqt5/qt5/wscript b/playground/qt5-and-pyqt5/qt5/wscript new file mode 100644 index 0000000000..5ab374c916 --- /dev/null +++ b/playground/qt5-and-pyqt5/qt5/wscript @@ -0,0 +1,34 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016 (ita) + +VERSION='0.0.1' +APPNAME='qt5_test' + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_cxx qt5') + +def configure(conf): + conf.load('compiler_cxx qt5') + #conf.env.append_value('CXXFLAGS', ['-g']) # test + +def build(bld): + # According to the Qt5 documentation: + # Qt classes in foo.h -> declare foo.h as a header to be processed by moc + # add the resulting moc_foo.cpp to the source files + # Qt classes in foo.cpp -> include foo.moc at the end of foo.cpp + # + bld( + features = 'qt5 cxx cxxprogram', + use = 'QT5CORE QT5GUI QT5SVG QT5WIDGETS', + source = 'main.cpp res.qrc but.ui foo.cpp', + moc = 'foo.h', + target = 'window', + includes = '.', + lang = bld.path.ant_glob('linguist/*.ts'), + langname = 'somefile', # include the .qm files from somefile.qrc + ) + diff --git a/playground/qt5-and-pyqt5/qtchainer/qtchainer.py b/playground/qt5-and-pyqt5/qtchainer/qtchainer.py new file mode 100644 index 0000000000..55d4c08b43 --- /dev/null +++ b/playground/qt5-and-pyqt5/qtchainer/qtchainer.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2016 (fedepell) + +# +# Example extra that chains to either qt5 or pyqt5 for QRC/UI files as +# just one handler for an extension can be natively defined. The extra +# has to be loaded after qt5 and pyqt5 and files need to have explicitly +# set the feature they want to use. +# + +import os +from waflib.Tools import python +from waflib.Tools import cxx +from waflib.extras import pyqt5 +from waflib.Tools import qt5 +from waflib import Task +from waflib.TaskGen import extension +from waflib import Logs + + +EXT_RCC = ['.qrc'] +""" +File extension for the resource (.qrc) files +""" + +EXT_UI = ['.ui'] +""" +File extension for the user interface (.ui) files +""" + + +@extension(*EXT_RCC) +def create_chain_task(self, node): + "Creates rcc and py task for ``.qrc`` files" + if 'qt5' in self.features: + qt5.create_rcc_task(self, node) + elif 'pyqt5' in self.features: + pyqt5.create_pyrcc_task(self, node) + else: + Logs.warn("No feature explicitly defined for '%s'",node) + + +@extension(*EXT_UI) +def create_chain_task(self, node): + "Create uic tasks and py for user interface ``.ui`` definition files" + if 'qt5' in self.features: + qt5.create_uic_task(self, node) + elif 'pyqt5' in self.features: + pyqt5.create_pyuic_task(self, node) + else: + Logs.warn("No feature explicitly defined for '%s'",node) + diff --git a/playground/qt5-and-pyqt5/wscript b/playground/qt5-and-pyqt5/wscript new file mode 100644 index 0000000000..cd679ea929 --- /dev/null +++ b/playground/qt5-and-pyqt5/wscript @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8# +# Federico Pellegrin, 2016 (fedepell) + +# Simple example with custom local extra tool to be able to use at the same +# time both qt5 and Python qt5. Both have a handler to some extensions +# (qrc/ui) so the last one loaded will overwrite the previous one. +# The small extra tool will just override the handler and pass to the +# correct one as needed. Must be loaded after qt5 and pyqt5. +# + +def options(opt): + # Load what needed for qt5 and pyqt5 and chainer as *last* so it + # will chain to the proper one depending on feature + opt.load('compiler_cxx qt5 python pyqt5') + opt.load('qtchainer', tooldir='qtchainer') + +def configure(conf): + conf.load('compiler_cxx qt5 python pyqt5 qtchainer') + conf.check_python_version((2,7,4)) + +def build(bld): + # Build both pyqt5 and qt5. + # - qt5 is from demos/qt5, just a reference to waflogo.svg has been + # fixed as the directory is not one level deeper in this playground + # - pyqt5 is from playground/pyqt5 + bld.recurse("pyqt5 qt5") + diff --git a/playground/redirect/wscript b/playground/redirect/wscript index afb8428d63..cf33f847b8 100644 --- a/playground/redirect/wscript +++ b/playground/redirect/wscript @@ -4,7 +4,7 @@ Run: "waf configure clean build" The program "app" writes to both stdout and stderr, it is executed -directly or through another python process (just for the fun of it) +directly or through another python process """ top = '.' diff --git a/playground/sphinx/src/conf.py b/playground/sphinx/src/conf.py new file mode 100644 index 0000000000..7bb1ed699e --- /dev/null +++ b/playground/sphinx/src/conf.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'project' +copyright = '2019, sphinx' +author = 'sphinx' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '1.0.0' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'projectdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'project.tex', 'project Documentation', + 'sphinx', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'project', 'project Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'project', 'project Documentation', + author, 'project', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] diff --git a/playground/sphinx/src/index.rst b/playground/sphinx/src/index.rst new file mode 100644 index 0000000000..e8606fe774 --- /dev/null +++ b/playground/sphinx/src/index.rst @@ -0,0 +1,20 @@ +.. project documentation master file, created by + sphinx-quickstart on Sun May 26 12:49:41 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to project's documentation! +=================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/playground/sphinx/wscript b/playground/sphinx/wscript new file mode 100644 index 0000000000..0df455e595 --- /dev/null +++ b/playground/sphinx/wscript @@ -0,0 +1,14 @@ +"""Sphinx documentation wscript example +""" + + +def configure(cnf): + cnf.load('sphinx') + + +def build(bld): + # When multiple output format are given, the install_path_FMT can specify where to place a specific format, fallback is always on install_path + bld(features='sphinx', sphinx_source='src', sphinx_output_format=['html', 'info', 'man'], install_path_man='${DOCDIR}/man', install_path='${DOCDIR}') + + # Old style syntax, with single format + # bld(features='sphinx', sphinx_source='src', sphinx_output_format='man') diff --git a/playground/stale_files/wscript b/playground/stale_files/wscript index 6e9dd86d94..4b07637e7c 100644 --- a/playground/stale_files/wscript +++ b/playground/stale_files/wscript @@ -4,7 +4,7 @@ """ See waflib/extras/stale.py for more information. -Do not forget to reconfigure the proect after changing "configure" below +Do not forget to reconfigure the project after changing "configure" below """ VERSION='0.0.1' diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/funi.c.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.c.stpl new file mode 100644 index 0000000000..5857da8722 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.c.stpl @@ -0,0 +1,10 @@ +#include "funi.h" + +%for i in range(maxfuni): +int func{{i}}(int a) +{ + if (a < {{i}}) + return a; + return {{i}}; +} +%end diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/funi.h.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.h.stpl new file mode 100644 index 0000000000..5712f0f7d1 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.h.stpl @@ -0,0 +1,36 @@ +#pragma once +#if defined _WIN32 || defined __CYGWIN__ + #ifdef BUILDING_DLL + #ifdef __GNUC__ + #define DLL_PUBLIC __attribute__ ((dllexport)) + #else + #define DLL_PUBLIC __declspec(dllexport) + #endif + #else + #ifdef __GNUC__ + #define DLL_PUBLIC __attribute__ ((dllimport)) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif + #endif + #define DLL_LOCAL +#else + #if __GNUC__ >= 4 + #define DLL_PUBLIC __attribute__ ((visibility ("default"))) + #define DLL_LOCAL __attribute__ ((visibility ("hidden"))) + #else + #define DLL_PUBLIC + #define DLL_LOCAL + #endif +#endif + +#ifdef __cplusplus +extern "C" +{ +#endif +%for i in range(maxfuni): +DLL_PUBLIC int func{{i}}(int a); +%end +#ifdef __cplusplus +}//extern +#endif diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/funi.py.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.py.stpl new file mode 100644 index 0000000000..2a6af0f40a --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/funi.py.stpl @@ -0,0 +1,35 @@ +#! /usr/bin/env python +from cffi import FFI +import os.path +import re + +_ffi = FFI() + +pat = '%s.dll' +if os.sep == '/': + pat = 'lib%s.so' + +_apifile = os.path.join(os.path.dirname(__file__),'funi.h') +_dllname = os.path.join(os.path.split(__file__)[0], pat % 'funi') +def _api(): + with open(_apifile) as f: + api = f.readlines() + rng = [i for i in range(len(api)) if re.search('extern', api[i])] + apicffi = [] + for i in range(rng[0]+2,rng[1]-1): + a = api[i] + if not re.search('^#',a): + a = a.replace('DLL_PUBLIC','') + apicffi += [a] + return apicffi + +_ffi.cdef('\n'.join(_api()),override=True) + +_dll = _ffi.dlopen(_dllname) + +class Func: +%for i in range(maxfuni): + @staticmethod + def func{{i}}(a): + return _dll.func{{i}}(a); +%end diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/funi_cs.cs.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/funi_cs.cs.stpl new file mode 100644 index 0000000000..1311c79507 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/funi_cs.cs.stpl @@ -0,0 +1,15 @@ +// vi:syntax=cs +using System; +using System.Runtime.InteropServices; + +namespace Funi +{ + public class Func + { + %for i in range(maxfuni): + [DllImport("{{dllname}}", CallingConvention = CallingConvention.Cdecl)] + public static extern int func{{i}}(int a); + %end + } +} + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.cpp.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.cpp.stpl new file mode 100644 index 0000000000..69dc011e40 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.cpp.stpl @@ -0,0 +1,10 @@ +#include +#include "funi.h" +using namespace std; +int main() +{ + %for i in range(maxfuni): + cout << func{{i}}(2) << endl; + %end + return 0; +} diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.py.stpl b/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.py.stpl new file mode 100644 index 0000000000..244ebd666d --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/test_funi.py.stpl @@ -0,0 +1,12 @@ +import funi + +%for i in range(maxfuni): + +def test_func{{i}}(): + assert funi.Func.func{{i}}({{i}}) == {{i}} + assert funi.Func.func{{i}}({{i}}-1) == {{i}}-1 + assert funi.Func.func{{i}}({{i}}+{{maxfuni}}) == {{i}} + +%end + + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/api/wscript_build b/playground/stpl_c_py_cs_satellite_wix/src/api/wscript_build new file mode 100644 index 0000000000..06a5bd7783 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/api/wscript_build @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +#vim syntax=python + +funi = bld.env.dllname +gui = bld.env.guiname+'.exe' +csdll = bld.env.dllname+'_cs.dll' + +bld(rule=bld.stpl,source='funi.c.stpl',target='funi.c') +bld(rule=bld.stpl,source='funi.h.stpl',target='funi.h') +bld(rule=bld.stpl,source='funi_cs.cs.stpl',target='funi_cs.cs') +bld(rule=bld.stpl,source='funi.py.stpl',target='funi.py') +bld(rule=bld.stpl,source='test_funi.py.stpl',target='test_funi.py') +bld(rule=bld.stpl,source='test_funi.cpp.stpl',target='test_funi.cpp') + +bld.add_group() + +bld.shlib( + source = ['funi.c'], + target = funi, + defines = ['BUILDING_DLL']) + +bld.program( + source = 'test_funi.cpp', + includes = ['.'], + target = 'test_funi', + use = funi) + +bld.add_group() + +bld(features='cs',source='funi_cs.cs',gen=csdll,includes='.', name='csdll') + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/build.py b/playground/stpl_c_py_cs_satellite_wix/src/build.py new file mode 100644 index 0000000000..0eb984be8f --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/build.py @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8 + +import sys +import os +import codecs +import bottle +import shutil + +def stpl(tsk): + ps = tsk.inputs[0].abspath() + pt = tsk.outputs[0].abspath() + bld = tsk.generator.bld + lookup,name=os.path.split(ps) + st=bottle.template(name,template_lookup=[lookup], company = bld.env.company, guiname=bld.env.guiname, version=bld.env.version, + dllname=bld.env.dllname, maxfuni=bld.env.maxfuni) + with codecs.open(pt,mode='w',encoding="utf-8") as f: f.write(st) + os.chmod(pt, 493) + +# copy files that already exist +def src2bld(self, filename): + self(features='subst', source=filename, target=filename, is_copy=True) + +def build(bld): + # clean initialization + bld.src2bld = src2bld + bld.stpl = stpl + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/AssemblyInfo.cs.stpl b/playground/stpl_c_py_cs_satellite_wix/src/gui/AssemblyInfo.cs.stpl new file mode 100644 index 0000000000..9ee50798da --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/AssemblyInfo.cs.stpl @@ -0,0 +1,22 @@ +// vim:syntax=cs +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using System.Resources; + +[assembly: AssemblyTitle("{{guiname}}")] +[assembly: AssemblyDescription("GUI project to test of WAF")] +[assembly: AssemblyConfiguration("Development Release")] +[assembly: AssemblyCompany("{{company}}")] +[assembly: AssemblyProduct("{{guiname}}")] +[assembly: AssemblyCopyright("Copyright © {{company}} 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] +[assembly: NeutralResourcesLanguageAttribute("en", UltimateResourceFallbackLocation.MainAssembly)] + +[assembly: ComVisible(false)] + +[assembly: Guid("267d6386-3c5e-4334-a323-c5274f881cdc")] + +[assembly: AssemblyVersion("{{version}}")] +[assembly: AssemblyFileVersion("{{version}}")] diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/FormFuni.cs b/playground/stpl_c_py_cs_satellite_wix/src/gui/FormFuni.cs new file mode 100644 index 0000000000..df2aa16cee --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/FormFuni.cs @@ -0,0 +1,46 @@ +using System; +using System.Windows.Forms; +using System.Threading; +using System.Resources; +using Funi; + +namespace Funi +{ + public class FormFuni : Form + { + public FormFuni() + { + InitializeComponent(); + } + + private static System.Resources.ResourceManager res = new System.Resources.ResourceManager("funigui.Resources", typeof(FormFuni).Assembly); + + private void InitializeComponent() + { + this.text1 = new System.Windows.Forms.Label(); + this.SuspendLayout(); + string greeting = res.GetString("greeting"); + // text1 + this.text1.Dock = System.Windows.Forms.DockStyle.Top; + this.text1.Location = new System.Drawing.Point(0, 0); + this.text1.Name = "text1"; + this.text1.Text = greeting + Func.func3(4).ToString(); + this.text1.Size = new System.Drawing.Size(289, 369); + this.text1.TabIndex = 0; + /// text1 + // FormFuni + this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); + this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + this.ClientSize = new System.Drawing.Size(200, 200); + this.Controls.Add(this.text1); + this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; + this.Name = "FormFuni"; + this.Text = greeting; + /// FormFuni + this.ResumeLayout(false); + + } + + private System.Windows.Forms.Label text1; + } +} diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources.resx b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources.resx new file mode 100644 index 0000000000..ee68392462 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources.resx @@ -0,0 +1,65 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=a77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=a77a5c561934e089 + + + + Hello + + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/Icon1.ico b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/Icon1.ico new file mode 100644 index 0000000000..2c5e26411b Binary files /dev/null and b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/Icon1.ico differ diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/resources.fr.txt b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/resources.fr.txt new file mode 100644 index 0000000000..71ed560285 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/Resources/resources.fr.txt @@ -0,0 +1 @@ +greeting=Salut diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/program.cs b/playground/stpl_c_py_cs_satellite_wix/src/gui/program.cs new file mode 100644 index 0000000000..ee1d0507c7 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/program.cs @@ -0,0 +1,31 @@ +using System; +using System.Windows.Forms; +using System.Globalization; +using System.Threading; +using Funi; + +namespace Funi +{ + static class Program + { + [STAThread] + static void Main() + { + +// /* for testing only: +// * the according Satellite Assembly is chosen based on CurrentThread.CurrentCulture + CultureInfo culture; + culture = CultureInfo.CreateSpecificCulture("fr"); + + CultureInfo.DefaultThreadCurrentCulture = culture; + CultureInfo.DefaultThreadCurrentUICulture = culture; + + Thread.CurrentThread.CurrentCulture = culture; + Thread.CurrentThread.CurrentUICulture = culture; +// * fallback is english +// */ + + Application.Run(new FormFuni()); + } + } +} diff --git a/playground/stpl_c_py_cs_satellite_wix/src/gui/wscript_build b/playground/stpl_c_py_cs_satellite_wix/src/gui/wscript_build new file mode 100644 index 0000000000..db3d982575 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/gui/wscript_build @@ -0,0 +1,102 @@ +#! /usr/bin/env python +# encoding: utf-8 +#vim syntax=python + +import os, sys, shutil +from waflib import Utils + +bld.add_group() + +funigui = ctx.env.guiname+'.exe' +csdll = ctx.env.dllname+'_cs.dll' +funi_cs = bld.path.find_or_declare(csdll) + +# obtain the file names to copy +from waflib import TaskGen +@TaskGen.feature('copy_over') +@TaskGen.before_method('process_subst') +def get_filenames_to_copy_from_task_generators(self): + src = self.bld.get_tgen_by_name(self.from_tg).tasks[-1].outputs[0] + self.source = [src] + self.target = [src.name] + self.is_copy = True + +# just copy the files to this folder +bld(features='copy_over subst', from_tg='funi') +bld(features='copy_over subst', from_tg='csdll') + +# copy the same file to the build directory +bld.src2bld(bld, 'Resources/Icon1.ico') + +bld(rule=bld.stpl,source='AssemblyInfo.cs.stpl',target='AssemblyInfo.cs') + +bld.add_group() + +src = """ + program.cs + FormFuni.cs + AssemblyInfo.cs + Resources.resx +""".strip().split() + +refs = """ + System + System.Core + System.Windows.Forms + System.Xml + System.Xml.Linq + System.Data + System.Data.DataSetExtensions + System.Drawing +""".strip().split() + +if Utils.is_win32: + refs = """ + System + System.Core + System.Windows.Forms + System.Linq + System.RunTime.InteropServices + System.Xml + System.Xml.Linq + System.Threading.Tasks + System.Data + System.Data.DataSetExtensions + System.Deployment + System.Drawing +""".strip().split() + + +CSFLAGS = [] +def csflag(x): + global CSFLAGS + CSFLAGS+=[x] +csflag(r'/platform:x64') +csflag(r'/errorreport:prompt') +if Utils.is_win32: + csflag(r'/errorendlocation') + csflag(r'/preferreduilang:en-US') + csflag(r'/highentropyva-') +csflag(r'/debug:pdbonly') +csflag(r'/filealign:512') +csflag(r'/define:'+ctx.env.guiname[1:]) #EstimPRO or EstimRESEARCH +csflag(r'/nologo') +csflag(r'/noconfig') +csflag(r'/nowarn:1701,1702') +csflag(r'/target:winexe') +if ctx.options.stubs: + csflag('/optimize-') + csflag('/define:DEBUG') + csflag('/define:TRACE') +else: + csflag('/optimize+') + +csflag(r'/win32icon:gui/Resources/Icon1.ico') + + +bld(features='cs',source=src,gen=funigui,csflags=CSFLAGS,use=[r+'.dll' for r in refs]+[funi_cs.abspath()]) + +bld.add_group() + +bld(features='satellite_assembly',source='Resources/resources.fr.txt', gen=funigui) + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/msi/bundle.wxs b/playground/stpl_c_py_cs_satellite_wix/src/msi/bundle.wxs new file mode 100644 index 0000000000..89c5c7c03c --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/msi/bundle.wxs @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/msi/funi.wxs b/playground/stpl_c_py_cs_satellite_wix/src/msi/funi.wxs new file mode 100644 index 0000000000..88fa5b4565 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/msi/funi.wxs @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/msi/readme.rtf b/playground/stpl_c_py_cs_satellite_wix/src/msi/readme.rtf new file mode 100644 index 0000000000..9356309809 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/msi/readme.rtf @@ -0,0 +1,182 @@ +{\rtf1\adeflang1025\ansi\ansicpg1252\uc1\adeff31507\deff0\stshfdbch31505\stshfloch31506\stshfhich31506\stshfbi31507\deflang1033\deflangfe2052\themelang1033\themelangfe2052\themelangcs0{\fonttbl{\f0\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\f13\fbidi \fnil\fcharset134\fprq2{\*\panose 02010600030101010101}SimSun{\*\falt \'cb\'ce\'cc\'e5};}{\f34\fbidi \froman\fcharset1\fprq2{\*\panose 02040503050406030204}Cambria Math;} +{\f37\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;}{\f39\fbidi \fnil\fcharset134\fprq2{\*\panose 02010600030101010101}@SimSun;}{\flomajor\f31500\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fdbmajor\f31501\fbidi \fnil\fcharset134\fprq2{\*\panose 02010600030101010101}SimSun{\*\falt \'cb\'ce\'cc\'e5};}{\fhimajor\f31502\fbidi \froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria;} +{\fbimajor\f31503\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\flominor\f31504\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fdbminor\f31505\fbidi \fnil\fcharset134\fprq2{\*\panose 02010600030101010101}SimSun{\*\falt \'cb\'ce\'cc\'e5};}{\fhiminor\f31506\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;} +{\fbiminor\f31507\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f40\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\f41\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\f43\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\f44\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\f45\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\f46\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\f47\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\f48\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\f172\fbidi \fnil\fcharset0\fprq2 SimSun Western{\*\falt \'cb\'ce\'cc\'e5};} +{\f410\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\f411\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;}{\f413\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\f414\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;} +{\f417\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;}{\f418\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);}{\f432\fbidi \fnil\fcharset0\fprq2 @SimSun Western;}{\flomajor\f31508\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\flomajor\f31509\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flomajor\f31511\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flomajor\f31512\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} +{\flomajor\f31513\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flomajor\f31514\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flomajor\f31515\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} +{\flomajor\f31516\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbmajor\f31520\fbidi \fnil\fcharset0\fprq2 SimSun Western{\*\falt \'cb\'ce\'cc\'e5};}{\fhimajor\f31528\fbidi \froman\fcharset238\fprq2 Cambria CE;} +{\fhimajor\f31529\fbidi \froman\fcharset204\fprq2 Cambria Cyr;}{\fhimajor\f31531\fbidi \froman\fcharset161\fprq2 Cambria Greek;}{\fhimajor\f31532\fbidi \froman\fcharset162\fprq2 Cambria Tur;} +{\fhimajor\f31535\fbidi \froman\fcharset186\fprq2 Cambria Baltic;}{\fhimajor\f31536\fbidi \froman\fcharset163\fprq2 Cambria (Vietnamese);}{\fbimajor\f31538\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\fbimajor\f31539\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fbimajor\f31541\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbimajor\f31542\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} +{\fbimajor\f31543\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fbimajor\f31544\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbimajor\f31545\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} +{\fbimajor\f31546\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\flominor\f31548\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flominor\f31549\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\flominor\f31551\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flominor\f31552\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flominor\f31553\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\flominor\f31554\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flominor\f31555\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flominor\f31556\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} +{\fdbminor\f31560\fbidi \fnil\fcharset0\fprq2 SimSun Western{\*\falt \'cb\'ce\'cc\'e5};}{\fhiminor\f31568\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\fhiminor\f31569\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;} +{\fhiminor\f31571\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\fhiminor\f31572\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;}{\fhiminor\f31575\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;} +{\fhiminor\f31576\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);}{\fbiminor\f31578\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbiminor\f31579\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\fbiminor\f31581\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbiminor\f31582\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbiminor\f31583\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\fbiminor\f31584\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbiminor\f31585\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbiminor\f31586\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}} +{\colortbl;\red0\green0\blue0;\red0\green0\blue255;\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0; +\red128\green0\blue128;\red128\green0\blue0;\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;}{\*\defchp \fs22\loch\af31506\hich\af31506\dbch\af31505 }{\*\defpap \ql \li0\ri0\sa200\sl276\slmult1 +\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 }\noqfpromote {\stylesheet{\ql \li0\ri0\sa200\sl276\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af31507\afs22\alang1025 +\ltrch\fcs0 \fs22\lang1033\langfe2052\loch\f31506\hich\af31506\dbch\af31505\cgrid\langnp1033\langfenp2052 \snext0 \sqformat \spriority0 \styrsid7174289 Normal;}{\*\cs10 \additive \ssemihidden \sunhideused \spriority1 Default Paragraph Font;}{\* +\ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\trcbpat1\trcfpat1\tblind0\tblindtype3\tscellwidthfts0\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv \ql \li0\ri0\sa200\sl276\slmult1 +\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af31507\afs22\alang1025 \ltrch\fcs0 \fs22\lang1033\langfe2052\loch\f31506\hich\af31506\dbch\af31505\cgrid\langnp1033\langfenp2052 +\snext11 \ssemihidden \sunhideused \sqformat Normal Table;}}{\*\rsidtbl \rsid69038\rsid82046\rsid87707\rsid95659\rsid134211\rsid154312\rsid162960\rsid343415\rsid353316\rsid357831\rsid424175\rsid425239\rsid487345\rsid555870\rsid592788\rsid662841\rsid664362 +\rsid669664\rsid677615\rsid682290\rsid723102\rsid729875\rsid745976\rsid753359\rsid861325\rsid884569\rsid936148\rsid1004892\rsid1063873\rsid1134810\rsid1186501\rsid1186883\rsid1194660\rsid1376795\rsid1401633\rsid1529513\rsid1535688\rsid1536023\rsid1581410 +\rsid1582120\rsid1584154\rsid1647099\rsid1654806\rsid1655805\rsid1734857\rsid1776626\rsid1781686\rsid1793423\rsid1796780\rsid1797706\rsid1842033\rsid1977053\rsid2124302\rsid2178214\rsid2188409\rsid2368406\rsid2455043\rsid2556191\rsid2559080\rsid2706421 +\rsid2717148\rsid2838319\rsid2842440\rsid2846834\rsid2849503\rsid2890251\rsid2915487\rsid2978369\rsid3087645\rsid3096048\rsid3157651\rsid3213286\rsid3216382\rsid3231882\rsid3243724\rsid3292963\rsid3306157\rsid3411267\rsid3494792\rsid3552460\rsid3555247 +\rsid3564302\rsid3564469\rsid3606601\rsid3613764\rsid3678404\rsid3702734\rsid3744060\rsid3746433\rsid3764488\rsid3767582\rsid3802866\rsid3822118\rsid3867395\rsid3879311\rsid3883140\rsid3941726\rsid3960565\rsid4017175\rsid4069295\rsid4086842\rsid4217327 +\rsid4218194\rsid4265694\rsid4287373\rsid4474139\rsid4480618\rsid4481007\rsid4526124\rsid4541201\rsid4543468\rsid4553189\rsid4597977\rsid4598568\rsid4606409\rsid4617940\rsid4661390\rsid4665798\rsid4669776\rsid4718821\rsid4743566\rsid4745032\rsid4862560 +\rsid4916941\rsid4930440\rsid4934643\rsid4942416\rsid4984938\rsid5051074\rsid5057133\rsid5072156\rsid5112549\rsid5126983\rsid5137805\rsid5208771\rsid5321575\rsid5330704\rsid5333576\rsid5390032\rsid5393962\rsid5406273\rsid5466724\rsid5468218\rsid5528489 +\rsid5584391\rsid5655967\rsid5657287\rsid5770668\rsid5772499\rsid5782795\rsid5797800\rsid5835669\rsid5844695\rsid5990916\rsid6119276\rsid6161711\rsid6182604\rsid6187748\rsid6193081\rsid6244483\rsid6294159\rsid6361415\rsid6366086\rsid6386107\rsid6438581 +\rsid6444379\rsid6499201\rsid6505118\rsid6570222\rsid6572459\rsid6631200\rsid6649362\rsid6685800\rsid6825220\rsid6825684\rsid6833010\rsid6893441\rsid6896301\rsid7031407\rsid7040521\rsid7090529\rsid7099881\rsid7147977\rsid7151242\rsid7161394\rsid7170939 +\rsid7174289\rsid7216603\rsid7223901\rsid7239928\rsid7275269\rsid7285619\rsid7342289\rsid7343100\rsid7345751\rsid7359472\rsid7360787\rsid7365160\rsid7431355\rsid7567533\rsid7603088\rsid7632088\rsid7670100\rsid7677695\rsid7685243\rsid7690018\rsid7699692 +\rsid7749205\rsid7757442\rsid7804790\rsid7805881\rsid7823655\rsid7866042\rsid7869944\rsid7881919\rsid7887261\rsid7892841\rsid8001358\rsid8065457\rsid8082350\rsid8205271\rsid8263249\rsid8276646\rsid8340657\rsid8403836\rsid8404675\rsid8413005\rsid8415953 +\rsid8454550\rsid8464518\rsid8471740\rsid8473147\rsid8474305\rsid8608269\rsid8674070\rsid8678535\rsid8680380\rsid8716556\rsid8726521\rsid8740342\rsid8792794\rsid8850474\rsid8939938\rsid9008211\rsid9009783\rsid9046093\rsid9050817\rsid9199494\rsid9256582 +\rsid9382159\rsid9382524\rsid9580642\rsid9636050\rsid9650101\rsid9719636\rsid9790198\rsid9845373\rsid9896574\rsid9911041\rsid9917438\rsid9923535\rsid10058259\rsid10104370\rsid10104782\rsid10116859\rsid10119940\rsid10162764\rsid10171268\rsid10173470 +\rsid10181119\rsid10186326\rsid10227481\rsid10228062\rsid10238495\rsid10241328\rsid10251406\rsid10253016\rsid10308789\rsid10356536\rsid10448859\rsid10451319\rsid10553803\rsid10617894\rsid10624398\rsid10630071\rsid10644784\rsid10649121\rsid10693560 +\rsid10778738\rsid10819029\rsid10822871\rsid10833058\rsid10889418\rsid10906327\rsid10947796\rsid10948305\rsid10959936\rsid10970050\rsid11022248\rsid11091976\rsid11092694\rsid11106531\rsid11221818\rsid11223744\rsid11284859\rsid11297367\rsid11304667 +\rsid11410401\rsid11471275\rsid11473879\rsid11483990\rsid11630288\rsid11670184\rsid11683410\rsid11695780\rsid11749422\rsid11801413\rsid11823730\rsid11885140\rsid11948062\rsid11949587\rsid11952697\rsid12011226\rsid12011847\rsid12143706\rsid12196369 +\rsid12197931\rsid12211842\rsid12256397\rsid12260147\rsid12274050\rsid12278472\rsid12341722\rsid12386746\rsid12400576\rsid12462365\rsid12480687\rsid12524088\rsid12602696\rsid12654958\rsid12714736\rsid12730982\rsid12734740\rsid12736982\rsid12737686 +\rsid12847212\rsid12858831\rsid12938446\rsid12980310\rsid12998071\rsid12999036\rsid13001300\rsid13042177\rsid13060905\rsid13138458\rsid13256747\rsid13313954\rsid13320738\rsid13322171\rsid13331168\rsid13384129\rsid13444667\rsid13530695\rsid13530763 +\rsid13663661\rsid13763981\rsid13775311\rsid13793683\rsid13852375\rsid13857004\rsid13896517\rsid13924119\rsid13964242\rsid13968489\rsid13985729\rsid13986503\rsid13990163\rsid14027411\rsid14034599\rsid14118349\rsid14167608\rsid14171158\rsid14174498 +\rsid14237993\rsid14245230\rsid14301665\rsid14302208\rsid14304207\rsid14313482\rsid14318505\rsid14356566\rsid14439634\rsid14446103\rsid14447519\rsid14492879\rsid14495582\rsid14552397\rsid14581735\rsid14822449\rsid14837713\rsid14896889\rsid14968219 +\rsid15014585\rsid15022794\rsid15029050\rsid15091012\rsid15097832\rsid15155308\rsid15222503\rsid15229110\rsid15367816\rsid15419119\rsid15490739\rsid15532842\rsid15538613\rsid15554039\rsid15556187\rsid15561492\rsid15564061\rsid15611687\rsid15613741 +\rsid15615348\rsid15741996\rsid15751877\rsid15753374\rsid15757262\rsid15758596\rsid15809585\rsid15824214\rsid15873165\rsid15880090\rsid15884729\rsid15930247\rsid15934296\rsid16023137\rsid16066612\rsid16129228\rsid16151538\rsid16208867\rsid16216830 +\rsid16258247\rsid16278765\rsid16283291\rsid16283921\rsid16285193\rsid16330841\rsid16337005\rsid16339481\rsid16340046\rsid16346818\rsid16411054\rsid16455128\rsid16461848\rsid16515316\rsid16595350\rsid16596092\rsid16608749\rsid16647151\rsid16662723 +\rsid16678462\rsid16726804\rsid16738174\rsid16739818\rsid16740151\rsid16741504\rsid16742933}{\mmathPr\mmathFont34\mbrkBin0\mbrkBinSub0\msmallFrac0\mdispDef1\mlMargin0\mrMargin0\mdefJc1\mwrapIndent1440\mintLim0\mnaryLim1}{\info{\author roland} +{\operator roland}{\creatim\yr2015\mo12\dy8\hr22\min17}{\revtim\yr2015\mo12\dy8\hr22\min19}{\version1}{\edmins2}{\nofpages1}{\nofwords18}{\nofchars105}{\nofcharsws122}{\vern32769}}{\*\xmlnstbl {\xmlns1 http://schemas.microsoft.com/office/word/2003/wordml} +}\paperw12240\paperh15840\margl1440\margr1440\margt1440\margb1440\gutter0\ltrsect +\widowctrl\ftnbj\aenddoc\trackmoves1\trackformatting1\donotembedsysfont1\relyonvml0\donotembedlingdata0\grfdocevents0\validatexml1\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors1\noxlattoyen +\expshrtn\noultrlspc\dntblnsbdb\nospaceforul\formshade\horzdoc\dgmargin\dghspace180\dgvspace180\dghorigin1440\dgvorigin1440\dghshow1\dgvshow1 +\jexpand\viewkind1\viewscale140\pgbrdrhead\pgbrdrfoot\splytwnine\ftnlytwnine\htmautsp\nolnhtadjtbl\useltbaln\alntblind\lytcalctblwd\lyttblrtgr\lnbrkrule\nobrkwrptbl\snaptogridincell\allowfieldendsel\wrppunct +\asianbrkrule\rsidroot1529513\newtblstyruls\nogrowautofit\usenormstyforlist\noindnmbrts\felnbrelev\nocxsptable\indrlsweleven\noafcnsttbl\afelev\utinl\hwelev\spltpgpar\notcvasp\notbrkcnstfrctbl\notvatxbx\krnprsnet\cachedcolbal \nouicompat \fet0 +{\*\wgrffmtfilter 2450}\nofeaturethrottle1\ilfomacatclnup0\ltrpar \sectd \ltrsect\linex0\endnhere\sectlinegrid360\sectdefaultcl\sectrsid7174289\sftnbj {\*\pnseclvl1\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2 +\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6 +\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang +{\pntxtb (}{\pntxta )}}\pard\plain \ltrpar\ql \li0\ri0\sa200\sl276\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0\pararsid1529513 \rtlch\fcs1 \af31507\afs22\alang1025 \ltrch\fcs0 +\fs22\lang1033\langfe2052\loch\af31506\hich\af31506\dbch\af31505\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af31507 \ltrch\fcs0 \insrsid1529513 \hich\af31506\dbch\af31505\loch\f31506 This program itself is nonsense.\hich\af31506\dbch\af31505\loch\f31506 + It serves no purpose. +\par \hich\af31506\dbch\af31505\loch\f31506 It\hich\af31506\dbch\af31505\loch\f31506 s \hich\af31506\dbch\af31505\loch\f31506 project\loch\af31506\dbch\af31505\hich\f31506 \rquote \hich\af31506\dbch\af31505\loch\f31506 s \hich\af31506\dbch\af31505\loch\f31506 +purpose is to test waf, the python base build tool. +\par +\par +\par }{\*\themedata 504b030414000600080000002100828abc13fa0000001c020000130000005b436f6e74656e745f54797065735d2e786d6cac91cb6ac3301045f785fe83d0b6d8 +72ba28a5d8cea249777d2cd20f18e4b12d6a8f843409c9df77ecb850ba082d74231062ce997b55ae8fe3a00e1893f354e9555e6885647de3a8abf4fbee29bbd7 +2a3150038327acf409935ed7d757e5ee14302999a654e99e393c18936c8f23a4dc072479697d1c81e51a3b13c07e4087e6b628ee8cf5c4489cf1c4d075f92a0b +44d7a07a83c82f308ac7b0a0f0fbf90c2480980b58abc733615aa2d210c2e02cb04430076a7ee833dfb6ce62e3ed7e14693e8317d8cd0433bf5c60f53fea2fe7 +065bd80facb647e9e25c7fc421fd2ddb526b2e9373fed4bb902e182e97b7b461e6bfad3f010000ffff0300504b030414000600080000002100a5d6a7e7c00000 +00360100000b0000005f72656c732f2e72656c73848fcf6ac3300c87ef85bd83d17d51d2c31825762fa590432fa37d00e1287f68221bdb1bebdb4fc7060abb08 +84a4eff7a93dfeae8bf9e194e720169aaa06c3e2433fcb68e1763dbf7f82c985a4a725085b787086a37bdbb55fbc50d1a33ccd311ba548b63095120f88d94fbc +52ae4264d1c910d24a45db3462247fa791715fd71f989e19e0364cd3f51652d73760ae8fa8c9ffb3c330cc9e4fc17faf2ce545046e37944c69e462a1a82fe353 +bd90a865aad41ed0b5b8f9d6fd010000ffff0300504b0304140006000800000021006b799616830000008a0000001c0000007468656d652f7468656d652f7468 +656d654d616e616765722e786d6c0ccc4d0ac3201040e17da17790d93763bb284562b2cbaebbf600439c1a41c7a0d29fdbd7e5e38337cedf14d59b4b0d592c9c +070d8a65cd2e88b7f07c2ca71ba8da481cc52c6ce1c715e6e97818c9b48d13df49c873517d23d59085adb5dd20d6b52bd521ef2cdd5eb9246a3d8b4757e8d3f7 +29e245eb2b260a0238fd010000ffff0300504b03041400060008000000210096b5ade296060000501b0000160000007468656d652f7468656d652f7468656d65 +312e786d6cec594f6fdb3614bf0fd87720746f6327761a07758ad8b19b2d4d1bc46e871e698996d850a240d2497d1bdae38001c3ba618715d86d87615b8116d8 +a5fb34d93a6c1dd0afb0475292c5585e9236d88aad3e2412f9e3fbff1e1fa9abd7eec70c1d1221294fda5efd72cd4324f1794093b0eddd1ef62fad79482a9c04 +98f184b4bd2991deb58df7dfbb8ad755446282607d22d771db8b944ad79796a40fc3585ee62949606ecc458c15bc8a702910f808e8c66c69b9565b5d8a314d3c +94e018c8de1a8fa94fd05093f43672e23d06af89927ac06762a049136785c10607758d9053d965021d62d6f6804fc08f86e4bef210c352c144dbab999fb7b471 +7509af678b985ab0b6b4ae6f7ed9ba6c4170b06c788a705430adf71bad2b5b057d03606a1ed7ebf5babd7a41cf00b0ef83a6569632cd467faddec9699640f671 +9e76b7d6ac355c7c89feca9cccad4ea7d36c65b258a206641f1b73f8b5da6a6373d9c11b90c537e7f08dce66b7bbeae00dc8e257e7f0fd2badd5868b37a088d1 +e4600ead1ddaef67d40bc898b3ed4af81ac0d76a197c86826828a24bb318f3442d8ab518dfe3a20f000d6458d104a9694ac6d88728eee2782428d60cf03ac1a5 +193be4cbb921cd0b495fd054b5bd0f530c1931a3f7eaf9f7af9e3f45c70f9e1d3ff8e9f8e1c3e3073f5a42ceaa6d9c84e5552fbffdeccfc71fa33f9e7ef3f2d1 +17d57859c6fffac327bffcfc793510d26726ce8b2f9ffcf6ecc98baf3efdfdbb4715f04d814765f890c644a29be408edf3181433567125272371be15c308d3f2 +8acd249438c19a4b05fd9e8a1cf4cd296699771c393ac4b5e01d01e5a30a787d72cf1178108989a2159c77a2d801ee72ce3a5c545a6147f32a99793849c26ae6 +6252c6ed637c58c5bb8b13c7bfbd490a75330f4b47f16e441c31f7184e140e494214d273fc80900aedee52ead87597fa824b3e56e82e451d4c2b4d32a423279a +668bb6690c7e9956e90cfe766cb37b077538abd27a8b1cba48c80acc2a841f12e698f13a9e281c57911ce298950d7e03aba84ac8c154f8655c4f2af074481847 +bd804859b5e696007d4b4edfc150b12addbecba6b18b148a1e54d1bc81392f23b7f84137c2715a851dd0242a633f900710a218ed715505dfe56e86e877f0034e +16bafb0e258ebb4faf06b769e888340b103d3311da9750aa9d0a1cd3e4efca31a3508f6d0c5c5c398602f8e2ebc71591f5b616e24dd893aa3261fb44f95d843b +5974bb5c04f4edafb95b7892ec1108f3f98de75dc97d5772bdff7cc95d94cf672db4b3da0a6557f70db629362d72bcb0431e53c6066acac80d699a6409fb44d0 +8741bdce9c0e4971624a2378cceaba830b05366b90e0ea23aaa241845368b0eb9e2612ca8c742851ca251ceccc70256d8d87265dd96361531f186c3d9058edf2 +c00eafe8e1fc5c509031bb4d680e9f39a3154de0accc56ae644441edd76156d7429d995bdd88664a9dc3ad50197c38af1a0c16d684060441db02565e85f3b966 +0d0713cc48a0ed6ef7dedc2dc60b17e92219e180643ed27acffba86e9c94c78ab90980d8a9f0913ee49d62b512b79626fb06dccee2a432bbc60276b9f7dec44b +7904cfbca4f3f6443ab2a49c9c2c41476dafd55c6e7ac8c769db1bc399161ee314bc2e75cf8759081743be1236ec4f4d6693e5336fb672c5dc24a8c33585b5fb +9cc24e1d4885545b58463634cc5416022cd19cacfccb4d30eb45296023fd35a458598360f8d7a4003bbaae25e331f155d9d9a5116d3bfb9a95523e51440ca2e0 +088dd844ec6370bf0e55d027a012ae264c45d02f708fa6ad6da6dce29c255df9f6cae0ec38666984b372ab5334cf640b37795cc860de4ae2816e95b21be5ceaf +8a49f90b52a51cc6ff3355f47e0237052b81f6800fd7b802239daf6d8f0b1571a8426944fdbe80c6c1d40e8816b88b8569082ab84c36ff0539d4ff6dce591a26 +ade1c0a7f669880485fd484582903d284b26fa4e2156cff62e4b9265844c4495c495a9157b440e091bea1ab8aaf7760f4510eaa69a6465c0e04ec69ffb9e65d0 +28d44d4e39df9c1a52ecbd3607fee9cec7263328e5d661d3d0e4f62f44acd855ed7ab33cdf7bcb8ae889599bd5c8b3029895b6825696f6af29c239b75a5bb1e6 +345e6ee6c28117e73586c1a2214ae1be07e93fb0ff51e133fb65426fa843be0fb515c187064d0cc206a2fa926d3c902e907670048d931db4c1a44959d366ad93 +b65abe595f70a75bf03d616c2dd959fc7d4e6317cd99cbcec9c58b34766661c7d6766ca1a9c1b327531486c6f941c638c67cd22a7f75e2a37be0e82db8df9f30 +254d30c1372581a1f51c983c80e4b71ccdd28dbf000000ffff0300504b0304140006000800000021000dd1909fb60000001b010000270000007468656d652f74 +68656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73848f4d0ac2301484f78277086f6fd3ba109126dd88d0add40384e4350d363f24 +51eced0dae2c082e8761be9969bb979dc9136332de3168aa1a083ae995719ac16db8ec8e4052164e89d93b64b060828e6f37ed1567914b284d262452282e3198 +720e274a939cd08a54f980ae38a38f56e422a3a641c8bbd048f7757da0f19b017cc524bd62107bd5001996509affb3fd381a89672f1f165dfe514173d9850528 +a2c6cce0239baa4c04ca5bbabac4df000000ffff0300504b01022d0014000600080000002100828abc13fa0000001c0200001300000000000000000000000000 +000000005b436f6e74656e745f54797065735d2e786d6c504b01022d0014000600080000002100a5d6a7e7c0000000360100000b000000000000000000000000 +002b0100005f72656c732f2e72656c73504b01022d00140006000800000021006b799616830000008a0000001c00000000000000000000000000140200007468 +656d652f7468656d652f7468656d654d616e616765722e786d6c504b01022d001400060008000000210096b5ade296060000501b000016000000000000000000 +00000000d10200007468656d652f7468656d652f7468656d65312e786d6c504b01022d00140006000800000021000dd1909fb60000001b010000270000000000 +00000000000000009b0900007468656d652f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73504b050600000000050005005d010000960a00000000} +{\*\colorschememapping 3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d3822207374616e64616c6f6e653d22796573223f3e0d0a3c613a636c724d +617020786d6c6e733a613d22687474703a2f2f736368656d61732e6f70656e786d6c666f726d6174732e6f72672f64726177696e676d6c2f323030362f6d6169 +6e22206267313d226c743122207478313d22646b3122206267323d226c743222207478323d22646b322220616363656e74313d22616363656e74312220616363 +656e74323d22616363656e74322220616363656e74333d22616363656e74332220616363656e74343d22616363656e74342220616363656e74353d22616363656e74352220616363656e74363d22616363656e74362220686c696e6b3d22686c696e6b2220666f6c486c696e6b3d22666f6c486c696e6b222f3e} +{\*\latentstyles\lsdstimax267\lsdlockeddef0\lsdsemihiddendef1\lsdunhideuseddef1\lsdqformatdef0\lsdprioritydef99{\lsdlockedexcept \lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority0 \lsdlocked0 Normal; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 1;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 2;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 3;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 4; +\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 5;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 6;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 7;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 8;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 9; +\lsdpriority39 \lsdlocked0 toc 1;\lsdpriority39 \lsdlocked0 toc 2;\lsdpriority39 \lsdlocked0 toc 3;\lsdpriority39 \lsdlocked0 toc 4;\lsdpriority39 \lsdlocked0 toc 5;\lsdpriority39 \lsdlocked0 toc 6;\lsdpriority39 \lsdlocked0 toc 7; +\lsdpriority39 \lsdlocked0 toc 8;\lsdpriority39 \lsdlocked0 toc 9;\lsdqformat1 \lsdpriority35 \lsdlocked0 caption;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority10 \lsdlocked0 Title;\lsdpriority1 \lsdlocked0 Default Paragraph Font; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority11 \lsdlocked0 Subtitle;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority22 \lsdlocked0 Strong;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority20 \lsdlocked0 Emphasis; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority59 \lsdlocked0 Table Grid;\lsdunhideused0 \lsdlocked0 Placeholder Text;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority1 \lsdlocked0 No Spacing; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading;\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List;\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List;\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List;\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid;\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 1; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 1; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 1;\lsdunhideused0 \lsdlocked0 Revision; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority34 \lsdlocked0 List Paragraph;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority29 \lsdlocked0 Quote;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority30 \lsdlocked0 Intense Quote; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 1; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 1; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 1;\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 2; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 2;\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 3; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 3; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 3; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 3; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 3;\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 3; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 4; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 4; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 4; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 4; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 4;\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 5; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 5; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 5; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 5; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 5; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 5;\lsdsemihidden0 \lsdunhideused0 \lsdpriority60 \lsdlocked0 Light Shading Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority61 \lsdlocked0 Light List Accent 6; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority62 \lsdlocked0 Light Grid Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 6; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority65 \lsdlocked0 Medium List 1 Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority66 \lsdlocked0 Medium List 2 Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 6; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority70 \lsdlocked0 Dark List Accent 6; +\lsdsemihidden0 \lsdunhideused0 \lsdpriority71 \lsdlocked0 Colorful Shading Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority72 \lsdlocked0 Colorful List Accent 6;\lsdsemihidden0 \lsdunhideused0 \lsdpriority73 \lsdlocked0 Colorful Grid Accent 6; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority19 \lsdlocked0 Subtle Emphasis;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority21 \lsdlocked0 Intense Emphasis; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority31 \lsdlocked0 Subtle Reference;\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority32 \lsdlocked0 Intense Reference; +\lsdsemihidden0 \lsdunhideused0 \lsdqformat1 \lsdpriority33 \lsdlocked0 Book Title;\lsdpriority37 \lsdlocked0 Bibliography;\lsdqformat1 \lsdpriority39 \lsdlocked0 TOC Heading;}}{\*\datastore 010500000200000018000000 +4d73786d6c322e534158584d4c5265616465722e352e3000000000000000000000060000 +d0cf11e0a1b11ae1000000000000000000000000000000003e000300feff090006000000000000000000000001000000010000000000000000100000feffffff00000000feffffff0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +fffffffffffffffffdfffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffff52006f006f007400200045006e00740072007900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000500ffffffffffffffffffffffffec69d9888b8b3d4c859eaf6cd158be0f000000000000000000000000d03f +a31cfe31d101feffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000 +00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000 +000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000105000000000000}} \ No newline at end of file diff --git a/playground/stpl_c_py_cs_satellite_wix/src/msi/wscript_build b/playground/stpl_c_py_cs_satellite_wix/src/msi/wscript_build new file mode 100644 index 0000000000..f907bf3d17 --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/msi/wscript_build @@ -0,0 +1,18 @@ +# vim:syntax=python + +exts = [x+'.dll' for x in 'WixNetFxExtension WixUIExtension WixBalExtension WixUtilExtension'.split()] + +funi = ctx.env['dllname'] +funigui = ctx.env['guiname'] +funiversion = ctx.env['version'] +funivisiblename = funigui+'_'+funiversion +funimsi = funivisiblename+'.msi' +funicompany = ctx.env['company'] + +define=lambda v:'-d'+v+'='+eval(v) +wixvar = [define(v) for v in 'funi funigui funimsi funivisiblename funiversion funicompany'.split()] + +bld(features = 'wix', source=['funi.wxs']+exts , gen=funimsi, candleflags=wixvar) +bld.add_group() +bld(features = 'wix', source=['bundle.wxs']+exts, gen='setup.exe', candleflags=wixvar) + diff --git a/playground/stpl_c_py_cs_satellite_wix/src/wscript b/playground/stpl_c_py_cs_satellite_wix/src/wscript new file mode 100644 index 0000000000..3bb15bb19c --- /dev/null +++ b/playground/stpl_c_py_cs_satellite_wix/src/wscript @@ -0,0 +1,91 @@ +# vim: syntax=python +# +# needs waf created with +# python waf-light --tools=resx,satellite_assembly,wix +# +# Assumed situation: +# - Logic in C, which depends on an external device; some code is generated +# - Wrapper for C# +# - GUI in C# using C# Wrapper +# - GUI localization via satellite assemblies +# - Wrapper for Python +# +# Python libs required: bottle, cffi, pytest +# +# This project requires plenty of applications and libraries such as: +# gcc, mono-devel, pytest, cffi +# LD_LIBRARY_PATH=$PWD/../build/api/: PATH=$PATH:$LD_LIBRARY_PATH waf configure build test --stubs +# + + +from waflib import Utils +import sys, os, shutil + +APPNAME = "funigui" +DLLNAME = "funi" +VERSION = "1.0" + +COMPANY = "FuniCo" +MAXFUNI = 4 + +top = "." +out = "../build" + +def options(ctx): + ctx.add_option("--stubs", action="store_true", default=False, help="Compile with stubs instead of using external device") + ctx.load('compiler_c compiler_cxx cs') + +PYTEST = '' +def configure (ctx): + global PYTEST + try: + PYTEST = ctx.find_program('py.test')[0] + except: + PYTEST = ctx.find_program('py.test',path_list=[r'C:\Python35\Scripts']) [0] + + if ctx.options.stubs: + print('!USING STUBS!') + ctx.env.append_value('DEFINES',['STUBS','DEBUG']) + else: + ctx.env.append_value('DEFINES',['NDEBUG']) + + ctx.load('compiler_c compiler_cxx cs resx satellite_assembly') + if Utils.is_win32: + ctx.load('wix') + + if ctx.env['CC_NAME'] == 'msvc': + if ctx.options.stubs: + ctx.env.append_value('CFLAGS',['/Z7','/EHsc','/W3']) + ctx.env.append_value('CXXFLAGS',['/Z7','/EHsc','/W3']) + else: + ctx.env.append_value('CFLAGS',['/Ox','/EHsc','/DNDEBUG','/W3']) + ctx.env.append_value('CXXFLAGS',['/Ox','/EHsc','/DNDEBUG','/W3']) + print(ctx.env['CC_NAME']) + else: + if ctx.options.stubs: + ctx.env.append_value('CFLAGS',['-g','-w']) + ctx.env.append_value('CXXFLAGS',['-g','-w']) + else: + ctx.env.append_value('CFLAGS',['-O2','-w']) + ctx.env.append_value('CXXFLAGS',['-O2','-w']) + ctx.env.guiname = APPNAME + ctx.env.version = VERSION + ctx.env.dllname = DLLNAME + ctx.env.maxfuni = MAXFUNI + ctx.env.company = COMPANY + ctx.load('print_commands') + +def build(ctx): + ctx.load('build', tooldir='.') # additional stuff + ctx.recurse('api') + ctx.recurse('gui') + if Utils.is_win32: + ctx.recurse('msi') + +def test(ctx): + if ctx.options.stubs: + cwd = ctx.path.find_node('../build/api').abspath() + print('running test in ',cwd) + ctx.cmd_and_log(os.path.join(cwd,'test_funi'),cwd=cwd) + ctx.cmd_and_log(PYTEST+' test_funi.py',cwd=cwd) + diff --git a/playground/strip/a.c b/playground/strip/a.c new file mode 100644 index 0000000000..28316047f9 --- /dev/null +++ b/playground/strip/a.c @@ -0,0 +1,4 @@ +int foo() { + return 23; +} + diff --git a/playground/strip/strip.py b/playground/strip/strip.py index 18521ded70..a2df9d5e12 100644 --- a/playground/strip/strip.py +++ b/playground/strip/strip.py @@ -1,49 +1,38 @@ #! /usr/bin/env python """ -Strip a program/library after it is created. Use this tool as an example. +Strip a program/library after it is created. -Usage:: +Since creating the file and modifying it occurs in the same +task, there will be no race condition with other tasks dependent +on the output. - bld.program(features='strip', source='main.c', target='foo') - -By using:: - - @TaskGen.feature('cprogram', 'cxxprogram', 'fcprogram') - - -If stripping at installation time is preferred, use the following:: - - import shutil, os - from waflib import Build - from waflib.Tools import ccroot - def copy_fun(self, src, tgt, **kw): - shutil.copy2(src, tgt) - os.chmod(tgt, kw.get('chmod', Utils.O644)) - try: - tsk = kw['tsk'] - except KeyError: - pass - else: - if isinstance(tsk.task, ccroot.link_task): - self.cmd_and_log('strip %s' % tgt) - Build.InstallContext.copy_fun = copy_fun +For other implementation possibilities, see strip_hack.py and strip_on_install.py """ +from waflib import Task + def configure(conf): conf.find_program('strip') -from waflib import Task, TaskGen -class strip(Task.Task): - run_str = '${STRIP} ${SRC}' - color = 'BLUE' - after = ['cprogram', 'cxxprogram', 'cshlib', 'cxxshlib', 'fcprogram', 'fcshlib'] - -@TaskGen.feature('strip') -@TaskGen.after('apply_link') -def add_strip_task(self): - try: - link_task = self.link_task - except AttributeError: - return - self.create_task('strip', link_task.outputs[0]) +def wrap_compiled_task(classname): + # override the class to add a new 'run' method + # such an implementation guarantees that the absence of race conditions + # + cls1 = Task.classes[classname] + cls2 = type(classname, (cls1,), {'run_str': '${STRIP} ${TGT[0].abspath()}'}) + cls3 = type(classname, (cls2,), {}) + + def run_all(self): + if self.env.NO_STRIPPING: + return cls1.run(self) + ret = cls1.run(self) + if ret: + return ret + return cls2.run(self) + cls3.run = run_all + +for k in 'cprogram cshlib cxxprogram cxxshlib fcprogram fcshlib dprogram dshlib'.split(): + if k in Task.classes: + wrap_compiled_task(k) + diff --git a/playground/strip/strip_hack.py b/playground/strip/strip_hack.py new file mode 100644 index 0000000000..0e3ce83fca --- /dev/null +++ b/playground/strip/strip_hack.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python + +""" +This is a hack; In general two tasks should not provide +the same output nodes (bad abstraction), and this cannot +scale to more than one operation + +In this case, the strip task has the same inputs as outputs +so the constraints added by Task.set_file_constraints +to prevent race conditions: + +- By setting the input node to be the link task output node + the strip tasks will run after their link tasks +- By setting the output node to be the link task output node, + any other task that also uses this output node will wait + for the strip task to finish too +- By overriding the runnable_status method, the strip task + will avoid the deadlock and force itself to run only when + the link task has run +""" + +def configure(conf): + conf.find_program('strip') + +from waflib import Task, TaskGen +class strip(Task.Task): + run_str = '${STRIP} ${SRC}' + color = 'BLUE' + no_errcheck_out = True + + def keyword(self): + return 'Stripping' + + def runnable_status(self): + if self in self.run_after: + self.run_after.remove(self) + ret = super(strip, self).runnable_status() + if ret == Task.ASK_LATER: + return ret + + if self.generator.link_task.hasrun == Task.SUCCESS: + # ensure that stripping always runs + # when a binary is written + return Task.RUN_ME + return Task.SKIP_ME + +@TaskGen.feature('cshlib', 'cxxshlib', 'cprogram', 'cxxprogram', 'fcprogram', 'fcshlib') +@TaskGen.after('apply_link') +def add_strip_task(self): + if getattr(self, 'link_task', None): + exe_node = self.link_task.outputs[0] + # special case: same inputs and outputs for a task + self.create_task('strip', exe_node, exe_node) + diff --git a/playground/strip/strip_on_install.py b/playground/strip/strip_on_install.py new file mode 100644 index 0000000000..7dd93a9c04 --- /dev/null +++ b/playground/strip/strip_on_install.py @@ -0,0 +1,20 @@ +#! /usr/bin/env python + +""" +Strip executables upon installation +""" + +import shutil, os +from waflib import Build, Utils, Context + +def copy_fun(self, src, tgt): + if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'): + tgt = '\\\\?\\' + tgt + shutil.copy2(src, tgt) + os.chmod(tgt, self.chmod) + + if getattr(self.generator, 'link_task', None): + if self.generator.link_task.outputs[0] in self.inputs: + self.generator.bld.cmd_and_log('strip %s' % tgt, quiet=Context.BOTH) +Build.inst.copy_fun = copy_fun + diff --git a/playground/strip/wscript b/playground/strip/wscript index 772eb5311d..da96687334 100644 --- a/playground/strip/wscript +++ b/playground/strip/wscript @@ -5,8 +5,13 @@ def options(opt): def configure(conf): conf.load('compiler_c') + + # choose wisely conf.load('strip', tooldir='.') + #conf.load('strip_hack', tooldir='.') + #conf.load('strip_on_install', tooldir='.') def build(bld): - bld.program(features='strip', source='main.c', target='app') + bld.shlib(source='a.c', target='lib1') + bld.program(source='main.c', target='app', use='lib1') diff --git a/playground/swig/embed/src1.cpp b/playground/swig/embed/src1.cpp index 25f84820f0..a760dc7ba1 100644 --- a/playground/swig/embed/src1.cpp +++ b/playground/swig/embed/src1.cpp @@ -3,7 +3,11 @@ extern "C" { +#if PY_VERSION_HEX >= 0x03000000 + void PyInit__swigdemo(void); +#else void init_swigdemo(void); +#endif } TestClass* TestClass::_instance = 0; @@ -11,7 +15,11 @@ TestClass* TestClass::_instance = 0; int main() { Py_Initialize(); +#if PY_VERSION_HEX >= 0x03000000 + PyInit__swigdemo(); +#else init_swigdemo(); +#endif /*FILE* file_py; file_py = fopen(i_oFile.toLocal8Bit(), "r"); diff --git a/playground/swig/extend/python/include.py b/playground/swig/extend/python/include.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/playground/swig/extend/python/include.py @@ -0,0 +1 @@ +print("hello") diff --git a/playground/swig/extend/python/test_swig_waf.i b/playground/swig/extend/python/test_swig_waf.i index 8a4291e7ae..41e20e8df8 100644 --- a/playground/swig/extend/python/test_swig_waf.i +++ b/playground/swig/extend/python/test_swig_waf.i @@ -6,3 +6,4 @@ %include "a.h" %module test_swig_waf +%pythoncode "python/include.py" diff --git a/playground/swig/wscript b/playground/swig/wscript index b461c4e785..f9b88f299b 100644 --- a/playground/swig/wscript +++ b/playground/swig/wscript @@ -39,7 +39,7 @@ def build(bld): # embedding # - # use swig_flags = '-c++ -python -dump_classes' for debugging + # use swig_flags = '-c++ -python -debug-classes' for debugging obj = bld( features = 'cxx cxxprogram pyembed', @@ -74,14 +74,11 @@ def build(bld): vnum = '1.2.3', use = 'mylib') - - # Swig forces a dynamic build therefore a build group is necessary - from waflib import Build - bld.post_mode = Build.POST_LAZY bld.add_group() + python_site_package = '${PREFIX}/lib/python%s/site-packages' % bld.env.PYTHON_VERSION generated_py = bld.path.find_or_declare('extend/python/test_swig_waf.py') - bld(feature='py', source=generated_py, install_path=python_site_package, install_from=bld.path.get_bld()) + bld(features='py', source=generated_py, install_path=python_site_package, install_from=bld.path.get_bld()) bld.add_post_fun(exec_test_python) @@ -90,81 +87,39 @@ def build(bld): if not bld.env.HAVE_JAVA: return - from waflib.extras import swig - - srcdir = bld.path.get_bld().make_node('extend/java/hmm') # destination for generated java file (without the packages!) - - #""" # BEGIN BLOCK 1 - d = bld.path.make_node('extend/java') - javanodes = [d.find_or_declare(x) for x in 'A.java test_swig_waf.java test_swig_wafJNI.java'.split()] - dec = bld.tools['swig'].swigf - #@dec <- python 2.3 does not support the @decorator notation - def swig_java(tsk): - tsk.outputs.extend(javanodes) - bld.tools['swig'].swigf(swig_java) - """ # END BLOCK 1 - #"""# do not remove - - + swigsrcdir = bld.path.get_bld().make_node('extend/java') # destination for generated java source from swig + swigoutdir = bld.path.get_bld().make_node('extend/jar') # destination for generated class files - - - bld( + # Will generate code via swig and also the JNI library in C++ + jniswig = bld( features = 'cxx cxxshlib', source = 'extend/java/test_swig_waf.i', target = 'extend/java/_test_swig_waf', - swig_flags = '-c++ -java -package foo.bar.pouet', + swig_flags = '-c++ -java -package foo.bar.pouet -outdir extend/java/foo/bar/pouet', includes = 'extend', vnum = '1.2.3', uselib = 'JAVA', use = 'mylib') - #""" # BEGIN BLOCK 2 - for x in javanodes: - bld(rule='cp ${SRC} ${TGT}', source=x, - target=srcdir.make_node('foo/bar/pouet/' + x.name), before=['javac'], after=['swig']) - """ # END BLOCK 2 - - def move_java_files(task): - import os, shutil - from waflib import Utils - - node = srcdir.make_node('foo/bar/pouet/') - node.mkdir() - orig = task.inputs[0].parent.get_bld().abspath() - files = Utils.listdir(orig) - for x in files: - if x.endswith('.java'): - # create a node in the directory we want to - j = node.make_node(x) # create a node - shutil.copy2(orig + os.sep + x, j.abspath()) # create the physical file for the node - j.sig = Utils.h_file(j.abspath()) # update the node signature - # depend on the .i file to make sure the .java files are copied after swig is executed - bld(name='move_and_read', rule=move_java_files, source='extend/java/test_swig_waf.i', after=['swig'], before=['javac']) - #""" - - - bld(rule='cp ${SRC} ${TGT}', source=bld.path.find_resource('extend/java/Foo.java'), - target=srcdir.make_node('foo/bar/pouet/Foo.java'), before=['javac'], after=['swig']) - - tmp = bld.path.get_bld().make_node('maha') - - bld(features = 'javac jar', - srcdir = srcdir, + # Java will contain the generated code from swigsrcdir plus the local sources + jswig = bld(features = 'javac jar', + srcdir = [ swigsrcdir , 'extend/java'] , sourcepath = [], - outdir = tmp, # we do need another folder here - basedir = tmp, - destfile = 'maha.jar' + outdir = swigoutdir, + basedir = swigoutdir, + destfile = 'maha.jar', ) - bld.add_post_fun(exec_test_java) + # Post JNI library and Java generators so we have tasks created + jniswig.post() + jswig.post() + # Now make sure javac task is executed after swig generation + for x in jniswig.tasks: + if x.__class__.__name__ == 'swig': + jswig.javac_task.set_run_after(x) - ######################################### - # listing the java nodes is required to ensure the swig task - # is executed whenever the java files are removed from - # the build directory - # - # to list the java files automatically, comment the starting character '#' in the lines "BEGIN BLOCK 1" and "BEGIN BLOCK 2" + # Launch the test after build + bld.add_post_fun(exec_test_java) def exec_test_java(bld): @@ -180,7 +135,7 @@ def exec_test_python(bld): proc = subprocess.Popen(''' PYTHONPATH=$PYTHONPATH:build/extend/python LD_LIBRARY_PATH=$LD_LIBRARY_PATH:build/extend/python:build/extend -python -c "import test_swig_waf; a=test_swig_waf.A(); print 'Testing: a.add(2, 3) ->', a.add(2, 3)" +python -c "import test_swig_waf; a=test_swig_waf.A(); print('Testing: a.add(2, 3) -> %r' % a.add(2, 3))" '''.replace('\n', ' '), shell=True) proc.wait() except: diff --git a/playground/syms/wscript b/playground/syms/wscript index bc40f71207..ef10aa501c 100644 --- a/playground/syms/wscript +++ b/playground/syms/wscript @@ -9,3 +9,10 @@ def configure(ctx): def build(ctx): ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib') ctx(features='c cprogram', source='main.c', target='app', use='testlib') + + # multiple shared libraries sharing the same file + ctx(features='c syms', source='a.c b.c', export_symbols_regex='mylib_.*', use='cshlib', + target='obj_example', sym_file='obj_example.def') + ctx(features='c cshlib syms', source=[], export_symbols_regex='mylib_.*', target='lib1', sym_file='obj_example.def') + ctx(features='c cshlib syms', source=[], export_symbols_regex='mylib_.*', target='lib2', sym_file='obj_example.def') + diff --git a/playground/task_semaphore/wscript b/playground/task_semaphore/wscript new file mode 100644 index 0000000000..cd11486ebf --- /dev/null +++ b/playground/task_semaphore/wscript @@ -0,0 +1,44 @@ +#! /usr/bin/env python + +""" +Task semaphore demo. Compare the runtimes: + + waf configure build --fast # 0m08 + waf configure build # 1m15 +""" + +import random, time +from waflib import Task, TaskGen, Utils + +def options(opt): + opt.add_option('--fast', action='store_true', default=False, help='Disable the semaphore to compare the runtime', dest='fast') + +def configure(conf): + pass + +def build(bld): + # max 20 jobs globally + bld.jobs = 20 + + bld(features='make_100') + + class Foo(Task.Task): + always_run = True + + if not bld.options.fast: + semaphore = Task.TaskSemaphore(2) # 2 jobs maximum + + def uid(self): + # unique id for each object + return Utils.h_list(self.num) + + def run(self): + time.sleep(random.randint(1000, 2000) / 1000.) + print("Task %r" % self.num) + + @TaskGen.feature('make_100') + def make_100_bound_tasks(self): + for x in range(100): + tsk = self.create_task('Foo') + tsk.num = x + diff --git a/playground/test_fail/wscript b/playground/test_fail/wscript index b8305d6309..a0dceb6b93 100644 --- a/playground/test_fail/wscript +++ b/playground/test_fail/wscript @@ -3,7 +3,7 @@ # Thomas Nagy, 2011 (ita) """ -Map a compilation failure to a successs status. People playing with C++ templates +Map a compilation failure to a success status. People playing with C++ templates might need this. """ @@ -24,12 +24,15 @@ def build(bld): # the feature 'fail' is defined below from waflib.Tools.cxx import cxx + +# our task class class cxxfail(cxx): def run(self): ret = super(cxxfail, self).run() self.outputs[0].write('just a simulation') return not ret +# @extension would apply this to all through TaskGen.mappings def one_more_mapping(self, node): return self.create_compiled_task('cxxfail', node) @@ -37,5 +40,8 @@ from waflib.TaskGen import feature, before @before('process_source') @feature('fail') def remap_failure_to_success(self): + # override + self.mappings = dict(self.mappings) + # then change the extension processing self.mappings['.cpp'] = one_more_mapping diff --git a/playground/top_eq_out/wscript b/playground/top_eq_out/wscript index 8142c10d69..88bd3494b9 100644 --- a/playground/top_eq_out/wscript +++ b/playground/top_eq_out/wscript @@ -52,7 +52,7 @@ def distclean(ctx): from waflib import Context for fn in os.listdir('.'): - if fn.startswith('.conf_check_') or fn.startswith(".lock-w") \ + if fn.startswith(('.conf_check_', ".lock-w")) \ or fn in (Context.DBFILE, 'config.log') \ or fn == 'c4che': if os.path.isdir(fn): diff --git a/docs/book/examples/cprog_propagation/b.c b/playground/unity/a.c similarity index 53% rename from docs/book/examples/cprog_propagation/b.c rename to playground/unity/a.c index 67ea476d9f..88dc44b324 100644 --- a/docs/book/examples/cprog_propagation/b.c +++ b/playground/unity/a.c @@ -1,3 +1,3 @@ -int c() { +int foo() { return 34; } diff --git a/docs/book/examples/cprog_attributes/main.c b/playground/unity/main.c similarity index 100% rename from docs/book/examples/cprog_attributes/main.c rename to playground/unity/main.c diff --git a/playground/unity/wscript b/playground/unity/wscript new file mode 100644 index 0000000000..7993ca4bbf --- /dev/null +++ b/playground/unity/wscript @@ -0,0 +1,12 @@ +#! /usr/bin/env python + +def options(opt): + opt.load('compiler_c') + +def configure(conf): + conf.load('compiler_c') + conf.load('unity') + +def build(bld): + bld.program(source='a.c main.c', target='app') + diff --git a/playground/use/objects/lib/lib.c b/playground/use/objects/lib/lib.c new file mode 100644 index 0000000000..8a67e9c730 --- /dev/null +++ b/playground/use/objects/lib/lib.c @@ -0,0 +1,6 @@ +#include "lib.h" + +int calc(int a, int b) +{ + return a+b; +} \ No newline at end of file diff --git a/playground/use/objects/lib/lib.h b/playground/use/objects/lib/lib.h new file mode 100644 index 0000000000..33d9a9094c --- /dev/null +++ b/playground/use/objects/lib/lib.h @@ -0,0 +1,4 @@ + + +int calc(int a, int b); + diff --git a/playground/use/objects/lib/wscript b/playground/use/objects/lib/wscript new file mode 100644 index 0000000000..38fd984aac --- /dev/null +++ b/playground/use/objects/lib/wscript @@ -0,0 +1,15 @@ +#! /usr/bin/env python + +INCLUDES = [ + '.' +] + +def build(bld): + + bld.shlib( + target='lib', + source='lib.c', + includes=INCLUDES, + export_includes=INCLUDES, + install_path=bld.env.LIBDIR, + ) diff --git a/playground/use/objects/libex/libex.c b/playground/use/objects/libex/libex.c new file mode 100644 index 0000000000..14f912f684 --- /dev/null +++ b/playground/use/objects/libex/libex.c @@ -0,0 +1,7 @@ +#include "libex.h" +#include "lib.h" + +int calcex(int a, int b) +{ + return calc(a, b) * 2; +} \ No newline at end of file diff --git a/playground/use/objects/libex/libex.h b/playground/use/objects/libex/libex.h new file mode 100644 index 0000000000..68f5fca914 --- /dev/null +++ b/playground/use/objects/libex/libex.h @@ -0,0 +1,4 @@ + + +int calcex(int a, int b); + diff --git a/playground/use/objects/libex/wscript b/playground/use/objects/libex/wscript new file mode 100644 index 0000000000..fb0ebcf9fa --- /dev/null +++ b/playground/use/objects/libex/wscript @@ -0,0 +1,20 @@ +#! /usr/bin/env python + + +INCLUDES = [ + '.', + # '../lib/' +] + +def build(bld): + + bld.shlib( + target='libex', + source='libex.c', + includes=INCLUDES, + export_includes=INCLUDES, + install_path=bld.env.LIBDIR, + use='lib' + ) + + \ No newline at end of file diff --git a/playground/use/objects/wscript_build b/playground/use/objects/wscript_build index ce02a852d9..58d6e019ce 100644 --- a/playground/use/objects/wscript_build +++ b/playground/use/objects/wscript_build @@ -6,6 +6,10 @@ when linked, object files should bring the libraries (uselib) they refer to bld.env.LIB_Z = ['z'] +bld.recurse('lib') + +bld.recurse('libex') + bld.objects( source = 'a.c', target = 'A', @@ -16,7 +20,7 @@ bld.objects( bld.program( source = 'a-test.c', target = 'a-test', - use = 'A', + use = 'A libex', ) """ diff --git a/playground/xcode6/include/MyLib/SupportLib/SomeHeaderToExport.h b/playground/xcode6/include/MyLib/SupportLib/SomeHeaderToExport.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/playground/xcode6/include/MyLib/TestClass.h b/playground/xcode6/include/MyLib/TestClass.h new file mode 100644 index 0000000000..73f087caf9 --- /dev/null +++ b/playground/xcode6/include/MyLib/TestClass.h @@ -0,0 +1,7 @@ +#include + +class TestClass +{ +public: + std::string message(); +}; \ No newline at end of file diff --git a/playground/xcode6/src/MyLib/ObjcClass.m b/playground/xcode6/src/MyLib/ObjcClass.m new file mode 100644 index 0000000000..372686ff7d --- /dev/null +++ b/playground/xcode6/src/MyLib/ObjcClass.m @@ -0,0 +1,9 @@ +// +// ObjcClass.m +// +// +// Created by Simon Warg on 08/11/15. +// +// + +#import diff --git a/playground/xcode6/src/MyLib/TestClass.cpp b/playground/xcode6/src/MyLib/TestClass.cpp new file mode 100644 index 0000000000..80b8e034dd --- /dev/null +++ b/playground/xcode6/src/MyLib/TestClass.cpp @@ -0,0 +1,5 @@ +#include "MyLib/TestClass.h" + +std::string TestClass::message() { + return "Hello from TestClass"; +} \ No newline at end of file diff --git a/playground/xcode6/src/sample.txt b/playground/xcode6/src/sample.txt new file mode 100644 index 0000000000..b68ede55af --- /dev/null +++ b/playground/xcode6/src/sample.txt @@ -0,0 +1 @@ +Sample text file. \ No newline at end of file diff --git a/playground/xcode6/src/test.cpp b/playground/xcode6/src/test.cpp new file mode 100644 index 0000000000..44c057f32b --- /dev/null +++ b/playground/xcode6/src/test.cpp @@ -0,0 +1,13 @@ +#include +#include "MyLib/TestClass.h" +#include "config.h" + +int main(int argc, char const *argv[]) +{ + TestClass a; + std::cout << a.message() << std::endl; + std::cout << "Number should be 10: " << NUMBER << std::endl; + + + return 0; +} \ No newline at end of file diff --git a/playground/xcode6/wscript b/playground/xcode6/wscript new file mode 100644 index 0000000000..f1423257b3 --- /dev/null +++ b/playground/xcode6/wscript @@ -0,0 +1,113 @@ +#! /usr/bin/env python +# encoding: utf-8 + +from waflib import Task, TaskGen +top = '.' +out = 'build' +APPNAME = 'TestProject' +VERSION = '1.0' + +""" +To create the xcode project files: + waf configure xcode6 + +To configure and build using Waf: + waf configure build + +This demo will create an XCode project containing +an App bundle target, a dynamic library target, +a static library target and an executable target. +The generated XCode project can then be opened +and XCode can then build those targets. +Tested with XCode 8. + +""" + +def options(opt): + opt.load('compiler_cxx xcode6') + +def configure(conf): + + # Use environment variables to set default project configuration + # settings + conf.env.FRAMEWORK_VERSION = '1.0' + conf.env.ARCHS = 'x86_64' + conf.env.INSTALL_PATH = '/my/install/path' + + # The xcode6 tool will also pick up any c config files generated by + # the c_config tool, and it'll be added to your project's include path + conf.load('c_config') + conf.define('NUMBER', 10) + conf.write_config_header('config.h') + + # This must be called at the end of configure() + conf.load('compiler_cxx xcode6') + + conf.env.append_value('CXXFLAGS', ['-O2']) + + conf.check(cxxflags='-std=c++11', uselib_store='STD11', mandatory=False) + +def build(bld): + + # Make .framework targets + tg = bld.framework( + includes='include', + + # Source files + source=bld.path.ant_glob('src/MyLib/*.cpp'), + + # If you don't want the source files to appear in a default + # 'Source' folder, you can define your own folder structure + # using a dictionary, where the key is the desired name of the folder + # and the value are the files. + group_files={ + 'Source files': bld.path.ant_glob('src/MyLib/*.cpp|*.m|*.mm'), + 'Include': bld.path.ant_glob(incl=['include/MyLib/*.h'], dir=True), + 'Help': ['src/sample.txt'] + }, + + # If you want to ship your header files with your .framework, then + # specify them using the 'export_headers' param + export_headers=bld.path.ant_glob(incl=['include/MyLib/*.h', 'include/MyLib/SupportLib/*.h']), + target='MyLib', + + # The 'install' param will set the INSTALL_PATH for the + # binary, and will also trigger XCode to copy the target to that + # path + install='~/Library/Frameworks' + ) + + # Make .a static library targets + bld.stlib( + source=bld.path.ant_glob('src/MyLib/*.cpp'), + includes = 'include', + target='MyStaticLib', + ) + + # Make standard executable target + bld.program( + source=['src/test.cpp'], + includes='include', + target='MyExe', + use='MyDynLib STD11' + ) + + # Make .dylib shared libraries + bld.shlib( + source=bld.path.ant_glob('src/MyLib/*.cpp'), + includes='include', + target='MyDynLib', + ) + + # Make an app bundle target + tg2 = bld.app( + source=bld.path.ant_glob('src/*.cpp'), + includes='include', + target='MyApp', + use='MyLib', + uselib='SDL2', + cxxflags='-DSOME_DEFINE', + framework='Cocoa', + # Override default setting in a target + settings={"Debug": {"CONFIG_NAME": 'Debug'}} + ) diff --git a/playground/xilinx-ise/wscript b/playground/xilinx-ise/wscript index df467a995f..e8b56ad478 100644 --- a/playground/xilinx-ise/wscript +++ b/playground/xilinx-ise/wscript @@ -232,9 +232,6 @@ run source=['%s.ncd' % fn, '%s.ut' % fn], ) - for tgen in bld.get_all_task_gen(): - tgen.update_outputs=True - if bld.cmd == 'clean': for tgen in bld.get_all_task_gen(): for tgt in waflib.Utils.to_list(tgen.target): @@ -259,13 +256,13 @@ run shutil.rmtree(x) except: pass - + def distclean(ctx): import os, shutil from waflib import Context for fn in os.listdir('.'): - if fn.startswith('.conf_check_') or fn.startswith(".lock-w") \ + if fn.startswith(('.conf_check_', ".lock-w")) \ or fn in (Context.DBFILE, 'config.log') \ or fn == 'c4che': if os.path.isdir(fn): diff --git a/tests/apis/wscript b/tests/apis/wscript index 62777a66ec..5713513292 100755 --- a/tests/apis/wscript +++ b/tests/apis/wscript @@ -3,12 +3,6 @@ import os, shutil from waflib import Node, Build, Utils, Logs -def tt(msg, result, expected): - color = 'RED' - if result == expected: - color = 'GREEN' - Logs.pprint(color, msg.ljust(20) + " %r" % result) - def exists(path): try: os.stat(path) @@ -40,6 +34,15 @@ def configure(ctx): def test(ctx): bld = Build.BuildContext() + errors = [] + def tt(msg, result, expected): + color = 'RED' + if result == expected: + color = 'GREEN' + else: + errors.append(result) + Logs.pprint(color, msg.ljust(20) + " %r" % result) + # 1. absdir is wrong, keep the drive letter # 2. split should use os.sep @@ -56,7 +59,8 @@ def test(ctx): tt('path_from', dd.path_from(pp), os.path.split(os.getcwd())[1]) tt('path_from (reverse)', pp.path_from(dd), '..') tt('same path', pp.path_from(pp), '.') - tt('same_root', bld.root.path_from(bld.root), '.') + tt('path from root is abspath()', pp.path_from(bld.root), pp.abspath()) + tt('root from root', bld.root.path_from(bld.root), bld.root.abspath()) tt('root height', bld.root.height(), 0) tt('self height', dd.height(), len(absdir)) @@ -118,7 +122,7 @@ def test(ctx): nf.write("aha") nf.get_bld_sig() tt('find_resource src/abc', bld.srcnode.find_resource(['abc']), nf) - tt('find_or_declare src/abc', bld.srcnode.find_or_declare(['abc']), nf) + tt('find_or_declare src/abc', bld.srcnode.find_or_declare(['abc']), bld.bldnode.make_node(['abc'])) tt('src.get_bld()', bld.srcnode.get_bld(), bld.bldnode) tt('bld.get_src()', bld.bldnode.get_src(), bld.srcnode) @@ -136,7 +140,46 @@ def test(ctx): create('src/b.txt') nd = bld.srcnode.make_node('c.txt') nd.write("test") - - tt("ant_glob ->", len(bld.srcnode.ant_glob('*.txt', flat=False)), 1) + create('d.TXT') + nd2 = bld.srcnode.make_node('d.TXT') + nd2.write("test") + nd3 = bld.srcnode.make_node('e.e+(e).txt') + nd3.write("test") + + tt("ant_glob ->", len(bld.srcnode.ant_glob('*.txt', flat=False)), 2) + tt("ant_glob (icase) ->", len(bld.srcnode.ant_glob('*.txt', flat=False, ignorecase=True)), 3) + tt("ant_glob (parentheses) ->", len(bld.srcnode.ant_glob('e.e+[(]e[)].txt', flat=False)), 1) #print("ant_glob src ->", bld.srcnode.ant_glob('*.txt')) + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if not self.parent: + val = '' + elif not self.parent.name: + val = self.name + '\\' + else: + val = self.parent.abspath().rstrip('\\') + '\\' + self.name + self.cache_abspath = val + return val + + # the local class will be unused soon enough + old_abspath = bld.node_class.abspath + bld.node_class.abspath = abspath + + unc1 = '\\\\computer\\share\\file' + lst = Utils.split_path_win32(unc1) + node = bld.root.make_node(lst) + tt('UNC head node', lst[0], '\\\\computer') + tt('UNC share path', node.abspath(), unc1) + + unc2 = '\\\\?\\C:\\foo' + lst = Utils.split_path_win32(unc2) + node = bld.root.make_node(lst) + tt('UNC long path', node.abspath(), 'C:\\foo') + + if errors: + bld.fatal('There are test failures ^^') + diff --git a/tests/config/wscript b/tests/config/wscript new file mode 100644 index 0000000000..baa352daff --- /dev/null +++ b/tests/config/wscript @@ -0,0 +1,155 @@ +#! /usr/bin/env python +# encoding: utf-8 + +from waflib.Logs import pprint + +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_c') + +def configure(conf): + conf.load('compiler_c') + + conf.failure = 0 + def disp(color, result): + pprint(color, result) + if color == 'RED': + conf.failure=1 + + def test(*funs): + conf.env.stash() + conf.in_msg = 1 # suppress outputs + for f in funs: + ret = f() + if not ret: + color = "GREEN" + else: + color = "RED" + if ret: + ret = '\t\t' + ret + else: + ret = '' + disp(color, "%s%s" % (f.__doc__, ret)) + conf.env.revert() + conf.in_msg = 0 + return None + + @test + def fun1(): + "global_define=1 by default -> no DEFINES_X anywhere" + conf.check_cfg(package='libpng') + conf.check_cc(header_name='unistd.h') + for x in conf.env: + if x.startswith('DEFINES_') and x != 'DEFINES_ST': + return 'conf.env.%s = %r' % (x, conf.env[x]) + + @test + def fun2(): + "global_define=1 -> no DEFINES_X anywhere" + conf.check_cfg(package='libpng', global_define=1) + conf.check_cc(header_name='unistd.h', global_define=1) + for x in conf.env: + if x.startswith('DEFINES_') and x != 'DEFINES_ST': + return 'conf.env.%s = %r' % (x, conf.env[x]) + + @test + def fun3(): + "global_define=0 -> DEFINES=[]" + conf.check_cfg(package='libpng', global_define=0) + conf.check_cc(header_name='unistd.h', global_define=0) + if conf.env.DEFINES: + return 'conf.env.DEFINES = %r' % conf.env.DEFINES + + @test + def fun4(): + "global_define=0 -> DEFINES_LIBPNG=['HAVE_LIBPNG=1']" + conf.check_cfg(package='libpng', global_define=0) + val = conf.env.DEFINES_LIBPNG + if not isinstance(val, list) or not "HAVE_LIBPNG=1" in val: + return 'conf.env.DEFINES_LIBPNG = %r' % val + + @test + def fun5(): + "global_defines=0, uselib_store=UNISTD -> DEFINES_UNISTD=['HAVE_UNISTD_H=1']" + conf.check_cc(header_name='unistd.h', uselib_store='UNISTD', global_define=0) + val = conf.env.DEFINES_UNISTD + if not isinstance(val, list) or not 'HAVE_UNISTD_H=1' in val: + return 'conf.env.DEFINES_UNISTD = %r' % val + + @test + def fun6(): + "global_defines=0, uselib_store=UNISTD, define_name=FOO -> DEFINES_UNISTD=['FOO=1']" + conf.check_cc(header_name='unistd.h', uselib_store='UNISTD', global_define=0, define_name='FOO') + val = conf.env.DEFINES_UNISTD + if not isinstance(val, list) or not 'FOO=1' in val: + return 'conf.env.DEFINES_UNISTD = %r' % val + + @test + def fun7(): + "uselib_store=UNISTD -> HAVE_UNISTD=1" + conf.check_cc(header_name='unistd.h', uselib_store='UNISTD') + val = conf.env.HAVE_UNISTD + if val != 1: + return 'conf.env.HAVE_UNISTD = %r' % val + + @test + def fun8(): + "global_defines=0, define_name=HAVE_FOO -> DEFINES_LIBPNG=['HAVE_FOO=1']" + conf.check_cfg(package='libpng', global_define=0, define_name='HAVE_FOO') + val = conf.env.DEFINES_LIBPNG + if not isinstance(val, list) or not "HAVE_FOO=1" in val: + return 'conf.env.DEFINES_LIBPNG = %r' % val + + @test + def modversion1(): + "modversion=libpng -> DEFINES=['LIBPNG_VERSION=X']" + conf.check_cfg(modversion='libpng') + val = conf.env.DEFINES + # automatic uppercase + if not isinstance(val, list) or not val[0].startswith("LIBPNG_VERSION="): + return 'conf.env.DEFINES = %r' % val + + @test + def modversion2(): + "modversion=libpng, uselib_store=foo -> DEFINES=['FOO_VERSION=X']" + conf.check_cfg(modversion='libpng', uselib_store='foo') + val = conf.env.DEFINES + # automatic uppercase + if not isinstance(val, list) or not val[0].startswith("FOO_VERSION="): + return 'conf.env.DEFINES = %r' % val + + @test + def modversion3(): + "modversion=libpng, uselib_store=foo, define_name=bar -> DEFINES=['bar=X']" + conf.check_cfg(modversion='libpng', uselib_store='foo', define_name='bar') + val = conf.env.DEFINES + if not isinstance(val, list) or not val[0].startswith("bar="): + return 'conf.env.DEFINES = %r' % val + + @test + def atleast_version1(): + "atleast_version=1.0, global_define=1 -> DEFINES=['HAVE_LIBPNG=1']" + # same in waf 1.8 and 1.9 + conf.check_cfg(package='libpng', atleast_version='1.0', global_define=1, args='--libs --cflags') + val = conf.env.DEFINES + if not isinstance(val, list) or not 'HAVE_LIBPNG=1' in val: + return 'conf.env.DEFINES = %r' % val + if not conf.env.LIB_LIBPNG: + return 'expected conf.env.LIB_LIBPNG to be defined :-/' + + @test + def atleast_version2(): + "atleast_version=1.0, uselib_store=foo -> DEFINES=['HAVE_FOO=1']" + conf.check_cfg(package='libpng', uselib_store='foo', atleast_version='1.0', args='--libs --cflags') + val = conf.env.DEFINES + if not isinstance(val, list) or not 'HAVE_FOO=1' in val: + return 'conf.env.DEFINES = %r' % val + if not conf.env.LIB_foo: + return 'expected conf.env.LIB_foo to be defined :-/' + + + if conf.failure: + conf.fatal('One or several test failed, check the outputs above') + diff --git a/tests/general/wscript b/tests/general/wscript new file mode 100644 index 0000000000..eab051cf96 --- /dev/null +++ b/tests/general/wscript @@ -0,0 +1,82 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016 (ita) + +top = '.' +out = 'build' + +import functools +from waflib import Utils, Logs + +def configure(conf): + pass + +def fun1(): + return 0 + +def fun2(arg1, arg2): + return 1 + +def fun(arg1, arg2, task, one=1, two=2): + print(arg1, arg2, task, one, two) + +par1 = functools.partial(fun, 'arg1') +par2 = functools.partial(par1, 'arg2', one=11, two=22) + +def fun3(): + return 32 +par3 = functools.partial(par1, 'arg2', one=11, two=22) + +def build(bld): + + bld.failure = 0 + def disp(color, result): + Logs.pprint(color, result) + if color == 'RED': + bld.failure=1 + def stop_status(bld): + if bld.failure: + bld.fatal('One or several test failed, check the outputs above') + bld.add_post_fun(stop_status) + + def simple_hash(fun): + status = '' + try: + Utils.h_cmd(fun) + except Exception as e: + status = str(e) + return status + + def hash_test(name, fun): + ret = simple_hash(fun) + if not ret: + color = "GREEN" + else: + color = "RED" + ret = ret or 'ok' + disp(color, '%s\t\t%s' % (name, ret)) + + hash_test('simple function 1', fun1) + hash_test('simple function 2', fun1) + hash_test('simple partial', par1) + hash_test('nested partial', par2) + + def hash_twice(name, fun): + try: + ret1 = Utils.h_cmd(fun) + ret2 = Utils.h_cmd(fun) + except Exception as e: + msg = str(e) + color = 'RED' + else: + if ret1 == ret2: + msg = 'ok %r' % ret1 + color = 'GREEN' + else: + msg = '%r != %r' % (ret1, ret2) + color = 'RED' + disp(color, '%s\t\t%s' % (name, msg)) + + hash_twice('consistent on fun3', fun3) + hash_twice('consistent on par3', par3) + diff --git a/tests/init/side_cwd/side.txt b/tests/init/side_cwd/side.txt new file mode 100644 index 0000000000..9c558e357c --- /dev/null +++ b/tests/init/side_cwd/side.txt @@ -0,0 +1 @@ +. diff --git a/tests/init/up_cwd/project/sub/test.txt b/tests/init/up_cwd/project/sub/test.txt new file mode 100644 index 0000000000..9c558e357c --- /dev/null +++ b/tests/init/up_cwd/project/sub/test.txt @@ -0,0 +1 @@ +. diff --git a/tests/init/up_cwd/project/wscript b/tests/init/up_cwd/project/wscript new file mode 100755 index 0000000000..1994de56dd --- /dev/null +++ b/tests/init/up_cwd/project/wscript @@ -0,0 +1,54 @@ +#! /usr/bin/env waf + +top = '.' +out = 'tmp_out' + +import os, random, time +from waflib import ConfigSet, Context, Build, Configure, Utils + +Configure.autoconfig='clobber' + +def options(opt): + opt.add_option('--based', action='store', default='foo', help='base directory', dest='based') + opt.add_option('--dumpf', action='store', default='foo', help='dump config to this file', dest='dumpf') + opt.add_option('--force-autoconfig', action='store', default=False, dest='force_autoconfig') + +def configure(ctx): + # force autoconfig to depend on an always-changing file - once + if ctx.options.force_autoconfig: + fname = ctx.options.dumpf + '_autoconfig' + s = "%f%d" % (time.time(), random.randint(0, 10**9)) + Utils.writef(fname, s) + ctx.files.append(fname) + Configure.autoconfig = False + +def build(ctx): + pass + +def write_conf(ctx): + if not ctx.options.dumpf: + raise ValueError('Missing --dumpf option') + if not ctx.options.based: + raise ValueError('Missing --based option') + + def g(x): + # path from conf.options.based + based = ctx.root.find_node(ctx.options.based) + node = ctx.root.find_node(x) + return node.path_from(based) + + env = ConfigSet.ConfigSet() + env.cwd_dir = g(os.getcwd()) + env.top_dir = g(Context.top_dir) + env.out_dir = g(Context.out_dir) + env.run_dir = g(Context.run_dir) + env.launch_dir = g(Context.launch_dir) + + env.store(ctx.options.dumpf) + +for y in (Build.BuildContext, Configure.ConfigurationContext): + class tmp(y): + def execute(self, *k, **kw): + super(self.__class__, self).execute(*k, **kw) + write_conf(self) + diff --git a/tests/init/wscript b/tests/init/wscript new file mode 100644 index 0000000000..6578dac53b --- /dev/null +++ b/tests/init/wscript @@ -0,0 +1,138 @@ +#! /usr/bin/env python + +top = '.' +out = 'build' + +import os, shutil, sys +from waflib import ConfigSet, Context, Logs + +def options(opt): + pass + +def configure(conf): + pass + + +def run_command(ctx, *k, **kw): + with open('/dev/null', 'w') as f: + kw['stdout'] = f + ret = ctx.exec_command(*k, **kw) + if ret: + ctx.fatal('Command failed ret:%r - %r %r' % (ret, k, kw)) + return ret + +def cleanup(ctx): + for y in ('side_cwd', 'up_cwd'): + lst = ctx.path.find_node(y).ant_glob(['**/.lock-waf*']) + for k in lst: + k.delete() + + for k in ctx.path.ant_glob('**/tmp_out', dir=True): + shutil.rmtree(k.abspath()) + +def build(bld): + failures = [] + up_cwd = bld.path.find_node('up_cwd').abspath() + side_cwd = bld.path.find_node('side_cwd').abspath() + proj_cwd = bld.path.find_node('up_cwd/project').abspath() + proj_sub_cwd = bld.path.find_node('up_cwd/project/sub').abspath() + proj_out_cwd = bld.path.make_node('up_cwd/project/tmp_out').abspath() + wscript = bld.path.find_node('up_cwd/project/wscript').abspath() + + d_node = bld.path.make_node('path_to_record') + dumpf_default = d_node.abspath() + + def make_cmd(cmd, based=proj_cwd, dumpf=dumpf_default): + return list(cmd) + ['--based=%s' % based, '--dumpf=%s' % dumpf] + + def test_cmd(cmd, cwd, test_name, cwd_dir='.', top_dir='.', out_dir='tmp_out', run_dir='.', launch_dir='.'): + cmd = make_cmd(cmd) + try: + run_command(bld, cmd, cwd=cwd) + v = ConfigSet.ConfigSet(dumpf_default) + finally: + for k in bld.path.ant_glob('**/path_to_record'): + k.delete() + + err = [] + def check_err(got, expected, var_name): + if got != expected: + Logs.pprint('RED', '- %s: %s -> got:%r expected:%r' % (test_name, var_name, got, expected)) + err.append(var_name) + + check_err(v.cwd_dir, cwd_dir, 'cwd') + check_err(v.top_dir, top_dir, 'top') + check_err(v.run_dir, run_dir, 'run') + check_err(v.out_dir, out_dir, 'out') + check_err(v.launch_dir, launch_dir, 'launch') + if err: + failures.append(test_name) + else: + Logs.pprint('GREEN', '- %s: ok' % test_name) + + exe = os.path.abspath(os.path.join(Context.launch_dir, sys.argv[0])) + + cleanup(bld) + + test_cmd([exe, 'configure'], proj_cwd, 'regular configure') + test_cmd([exe], proj_cwd, ' regular build from top') + test_cmd([exe], proj_out_cwd, ' regular build from out', launch_dir='tmp_out') + test_cmd([exe], proj_sub_cwd, ' regular build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([exe, 'configure', '--top=%s' % proj_cwd, '--out=%s' % proj_out_cwd], proj_cwd, 'configure with top/out from proj cwd') + test_cmd([exe], proj_cwd, ' next build from top') + test_cmd([exe], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([exe], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + test_cmd([exe, '--top=%s' % proj_cwd, '--out=foobar'], proj_cwd, + ' next build from top, verify out_dir==lock_file.out_dir') + test_cmd([exe, '--top=%s' % proj_cwd, '--out=foobar'], proj_sub_cwd, + ' next build from subfolder, verify out_dir==lock_file.out_dir', launch_dir='sub') + cleanup(bld) + + test_cmd([exe, 'configure', '--top=%s' % proj_cwd, '--out=%s' % proj_out_cwd], up_cwd, 'configure with top/out from up cwd', + launch_dir='..') + test_cmd([exe], proj_cwd, ' next build from top') + test_cmd([exe], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([exe], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([wscript, 'configure'], proj_cwd, 'wscript configure') + test_cmd([wscript], proj_cwd, ' next build from top') + test_cmd([wscript], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([wscript], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([wscript, 'configure', '--top=%s' % proj_cwd, '--out=%s' % proj_out_cwd], proj_cwd, 'wscript configure with top/out from proj cwd') + test_cmd([wscript], proj_cwd, ' next build from top') + test_cmd([wscript], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([wscript], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([wscript, 'configure', '--top=%s' % proj_cwd, '--out=%s' % proj_out_cwd], up_cwd, 'wscript configure with top/out from up cwd', + launch_dir='..') + test_cmd([wscript], proj_cwd, ' next build from top') + test_cmd([wscript], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([wscript], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([exe, '--top=%s' % proj_cwd], proj_cwd, 'autoconfig') + cleanup(bld) + + test_cmd([wscript, 'configure', '--top=project', '--out=project/tmp_out'], up_cwd, 'wscript configure with relative top/out from up cwd', + launch_dir='..') + test_cmd([wscript], proj_cwd, ' next build from top') + test_cmd([wscript], proj_out_cwd, ' next build from out', launch_dir='tmp_out') + test_cmd([wscript], proj_sub_cwd, ' next build from subfolder', launch_dir='sub') + cleanup(bld) + + test_cmd([exe, '--force-autoconfig', '--top=project'], up_cwd, 'autoconfig from up 1', launch_dir='..') + os.remove(dumpf_default + '_autoconfig') + test_cmd([exe, '--force-autoconfig', '--top=project'], up_cwd, 'autoconfig from up 2', launch_dir='..') + os.remove(dumpf_default + '_autoconfig') + test_cmd([exe, '--force-autoconfig', '--out=badout'], proj_cwd, 'autoconfig with clobber') + cleanup(bld) + + if failures: + bld.fatal('there were errors') + diff --git a/tests/install/wscript b/tests/install/wscript new file mode 100644 index 0000000000..adc25cf682 --- /dev/null +++ b/tests/install/wscript @@ -0,0 +1,91 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016 +# + +import os +from waflib import Build, Options, TaskGen, Utils + +def build(bld): + pass + +@TaskGen.feature('ruler') +@TaskGen.before('process_rule') +def test_helper(self): + if not self.bld.is_install or self.bld.is_install == Build.UNINSTALL: + while self.meths: # do not generate tasks: the target file may not be there + self.meths.pop() + return + + tg = self.bld.get_tgen_by_name(self.bring_in) + tg.post() # let it create its installation task + assert tg.install_task.outputs + self.source = tg.install_task.outputs + +def configure(conf): + + tmpdir_top = conf.bldnode.make_node('tmpdir') + tmpdir_top.delete(evict=False) + tmpdir = tmpdir_top.make_node('foo') + + def build(bld): + bld.is_install = env.INSTALL + bld.path.make_node('tmpfile').write('test') + + bld.install_as('${PREFIX}/bin/foo', 'tmpfile', chmod=Utils.O755) + bld.symlink_as('${PREFIX}/bin/bar', '../tmpfile') + tsk = bld.install_files('${PREFIX}/bin', 'tmpfile', chmod=Utils.O755, name='cheese') + bld(rule='ls -l ${SRC}', always=True, bring_in='cheese', features='ruler') + + # preserve the folder structure or not (relative_trick) + bld.path.make_node('blah/blah').mkdir() + bld(features='subst', source='tmpfile', target='blah/blah/rel1', is_copy=True, install_path='${PREFIX}') + + bld(features='subst', source='tmpfile', target='blah/blah/rel2', is_copy=True) + bld.install_files('${PREFIX}', 'blah/blah/rel2', relative_base=bld.path.get_bld(), relative_trick=True) + + bld(features='subst', source='tmpfile', target='blah/blah/rel3', is_copy=True) + bld.install_files('${PREFIX}', 'blah/blah/rel3', relative_base=bld.path.search_node('blah').get_bld(), relative_trick=True) + + bld(features='subst', source='tmpfile', target='blah/blah/rel4', is_copy=True) + bld.install_files('lib', 'blah/blah/rel4') + + def check(env): + tmpdir_top.delete(evict=False) + + env.INSTALL = Build.INSTALL + conf.run_build(build_fun=build, msg='building', okmsg='ok', errmsg='eh', env=env) + + assert tmpdir.exists() + assert tmpdir.make_node('bin/foo').exists() + assert tmpdir.make_node('bin/tmpfile').exists() + assert tmpdir.make_node('bin/foo').read() == tmpdir.make_node('bin/tmpfile').read() + assert os.path.lexists(tmpdir.make_node('bin/bar').abspath()) + assert os.readlink(tmpdir.make_node('bin/bar').abspath()) == '../tmpfile' + assert tmpdir.make_node('rel1').exists() + assert tmpdir.make_node('blah/blah/rel2').exists() + assert tmpdir.make_node('blah/rel3').exists() + assert tmpdir.make_node('lib/rel4').exists() + + env.INSTALL = Build.UNINSTALL + conf.run_build(build_fun=build, msg='building', okmsg='ok', errmsg='eh', env=env) + assert not tmpdir.exists() + assert not tmpdir.make_node('bin/foo').exists() + assert not tmpdir.make_node('bin/tmpfile').exists() + assert not os.path.lexists(tmpdir.make_node('bin/bar').abspath()) + assert not tmpdir.exists() + assert not tmpdir.make_node('rel1').exists() + assert not tmpdir.make_node('blah/blah/rel2').exists() + assert not tmpdir.make_node('blah/rel3').exists() + + env = conf.env.derive() + env.PREFIX = tmpdir.abspath() + Options.options.destdir = None + check(env) + + env = conf.env.derive() + env.PREFIX = '/foo' + Options.options.destdir = tmpdir_top.abspath() + check(env) + + diff --git a/tests/install_group/wscript b/tests/install_group/wscript new file mode 100644 index 0000000000..0b26a899d6 --- /dev/null +++ b/tests/install_group/wscript @@ -0,0 +1,72 @@ +#! /usr/bin/env python + +from waflib import Utils, Build, Logs +import os + +GRP = os.environ.get('WAF_TEST_GROUP', 'adm') + +def test_chown(bld): + def create_and_chown(tsk): + tsk.outputs[0].write('test') + Utils.lchown(tsk.outputs[0].abspath(), -1, GRP) + bld.conf.env.CAN_CHOWN = True + bld(rule=create_and_chown, target='foo.txt', always=True) + +def test_grp(bld): + def check_path(tsk): + import grp + entry = grp.getgrnam(GRP) + assert entry[0] == GRP + bld.conf.env.CAN_GRP = True + bld(rule=check_path, always=True) + +def test_chown_install(bld): + bld.is_install = Build.INSTALL + + dest_file = bld.bldnode.make_node('test/foo') + dest_link = bld.bldnode.make_node('test/foo_link') + + tmpfile = bld.bldnode.make_node('foo.txt') + tmpfile.write('this is a test') + + bld.install_as(dest_file, + tmpfile, + install_group=GRP) + bld.symlink_as(dest_link, + 'foo', + install_group=GRP) + + bld.add_group() + + def check_path(tsk): + import grp + gid = grp.getgrnam(GRP)[2] + assert os.stat(dest_file.abspath()).st_gid == gid + assert os.stat(dest_link.abspath()).st_gid == gid + bld(rule=check_path, always=True) + +def configure(conf): + conf.test(build_fun=test_grp, + msg='Checking for the python module grp', + okmsg='ok', + errmsg='grp is missing', + mandatory=False) + if not conf.env.CAN_GRP: + return + + conf.test(build_fun=test_chown, + msg='Checking for Utils.lchown', + okmsg='ok', + errmsg='chown does not seem to work', + mandatory=False) + if not conf.env.CAN_CHOWN: + return + + conf.test(build_fun=test_chown_install, + msg='Testing install_group="adm"', + okmsg='ok', + errmsg='there is a regression') + +def build(bld): + pass + diff --git a/tests/macplist/src/main.c b/tests/macplist/src/main.c new file mode 100644 index 0000000000..691b6bba2d --- /dev/null +++ b/tests/macplist/src/main.c @@ -0,0 +1,3 @@ +int main(int argc, const char** argv) { + return 0; +} diff --git a/tests/macplist/wscript b/tests/macplist/wscript new file mode 100644 index 0000000000..4973f1e9a0 --- /dev/null +++ b/tests/macplist/wscript @@ -0,0 +1,124 @@ +#! /usr/bin/env python3 + +import platform, sys + +top = '.' +out = 'bin' + +plist_string = ''' + + + + + CFBundleDevelopmentRegion + English + CFBundleExecutable + {app_name} + + CFBundleIdentifier + {bundle_domain} + + CFBundleVersion + {bundle_version} + + MiscKey + {env[PLATFORM_NAME]} + + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + {bundle_name} + + CFBundlePackageType + APPL + + CFBundleSignature + ???? + + +''' + +plist_context = { + 'bundle_domain': 'com.foo.bar.baz', + 'app_name': 'macplist_test', + 'bundle_version': '1.6.7' +} +expected_dict = { + 'env': { + 'PLATFORM_NAME': 'darwin' + } +} + +def options(opt): + opt.load('compiler_c') + +def configure(conf): + conf.load('compiler_c') + +def build(bld): + # Only testing feature on Darwin + if (platform.system() != 'Darwin'): + return + bld.env.PLATFORM_NAME = 'darwin' + plist = {'bundle_name': 'InterpolatedPlistFileTest'} + plist.update(plist_context) + bld.path.make_node('Info.plist').write(plist_string) + bld.program( + features="c cprogram", + target="InterpolatedPlistFileTest", + source="src/main.c", + mac_app=True, + mac_plist="Info.plist", + plist_context=plist) + bld.add_post_fun(test1) + + plist = {'bundle_name': 'InterpolatedPlistStringTest'} + plist.update(plist_context) + bld.program( + features="c cprogram", + target="InterpolatedPlistStringTest", + source="src/main.c", + mac_app=True, + mac_plist=plist_string, + plist_context=plist) + bld.add_post_fun(test2) + + bld.program( + features="c cprogram", + target="DefaultPlistTest", + source="src/main.c", + mac_app=True) + bld.add_post_fun(test3) + +def assert_eq(expected, actual): + exception_string = ''' +Expected `{expected}` +but instead got `{actual}` +''' + if (expected != actual): + raise Exception(exception_string.format(expected=expected,actual=actual)) + +def test1(ctx): + expected_plist = {'bundle_name': 'InterpolatedPlistFileTest'} + expected_plist.update(plist_context) + expected_plist.update(expected_dict) + expected_plist = plist_string.format(**expected_plist) + plist = ctx.path.make_node('bin/InterpolatedPlistFileTest.app/Contents/Info.plist').read() + assert_eq(expected_plist, plist) + + +def test2(ctx): + expected_plist = {'bundle_name': 'InterpolatedPlistStringTest'} + expected_plist.update(plist_context) + expected_plist.update(expected_dict) + expected_plist = plist_string.format(**expected_plist) + plist = ctx.path.make_node('bin/InterpolatedPlistStringTest.app/Contents/Info.plist').read() + assert_eq(expected_plist, plist) + +def test3(ctx): + from waflib.Tools import c_osx + expected_plist = {'app_name': 'DefaultPlistTest'} + expected_plist = c_osx.app_info.format(**expected_plist) + plist = ctx.path.make_node('bin/DefaultPlistTest.app/Contents/Info.plist').read() + assert_eq(expected_plist, plist) diff --git a/tests/msvc/wscript b/tests/msvc/wscript new file mode 100755 index 0000000000..85f6e75da7 --- /dev/null +++ b/tests/msvc/wscript @@ -0,0 +1,25 @@ +#! /usr/bin/env python3.1 + +from waflib.Tools import msvc +from waflib import Logs + +def configure(ctx): + pass + +def test(ctx): + errors = [] + msvc_versions = ['wsdk 6.1', 'winphone 8.0', 'msvc 9.0', 'msvc 16.6', 'msvc 16.0', 'msvc 15.9', 'msvc 15.0', 'msvc 14.0', 'msvc 12.0', 'msvc 11.0'] + sorted_versions = ['msvc 16.6', 'msvc 16.0', 'msvc 15.9', 'msvc 15.0', 'msvc 14.0', 'msvc 12.0', 'msvc 11.0', 'msvc 9.0', 'winphone 8.0', 'wsdk 6.1'] + + def tt(msg, result, expected): + color = 'RED' + if result == expected: + color = 'GREEN' + else: + errors.append(result) + Logs.pprint(color, msg.ljust(20) + " %r" % result) + + tt('msvc version detection sort', sorted(msvc_versions, key=msvc.MSVCVersion, reverse=True), sorted_versions) + + if errors: + ctx.fatal('There are test failures') diff --git a/tests/nodes/wscript b/tests/nodes/wscript index c2228ba8b1..debb5c1af0 100644 --- a/tests/nodes/wscript +++ b/tests/nodes/wscript @@ -1,5 +1,7 @@ #! /usr/bin/env python3 +from waflib import Utils + top = '.' out = 'bin' @@ -19,12 +21,17 @@ def build(bld): t = bld(rule='touch ${TGT}', target='bbb', source='aaa') t = bld(rule='touch ${TGT}', target='ccc', source='bbb') t = bld(rule='touch ${TGT}', target='ddd', source='ccc') + t = bld(rule=('touch ${TGT}', 'touch ${TGT}'), target='eee', source='ddd', chmod=Utils.O755) t.create_task('foo') #print( 'path from srcnode', bld.path.find_or_declare('aaa').path_from(bld.bldnode) ) - bld.install_files('/tmp/bar', 'wscript') + # folders as nodes are best avoided + dnode = bld.path.get_bld().make_node('testdir') + bld(rule='mkdir -p ${TGT}', target=dnode) + bld(rule='touch ${TGT}', source=dnode, target=dnode.make_node('stuff'), cls_str=lambda x: 'stuff') + bld.install_files('/tmp/bar', 'wscript') bld(features='c cprogram', source='main.c', target='app') @@ -39,9 +46,8 @@ def bar(ctx): from waflib import Task -# TODO update_outputs is a decorator too .. now -@Task.always_run -class foo_task(Task.Task): +class foo(Task.Task): + always_run = True def run(self): print("running foo") diff --git a/tests/post/wscript b/tests/post/wscript new file mode 100644 index 0000000000..b88c8f1305 --- /dev/null +++ b/tests/post/wscript @@ -0,0 +1,66 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016-2018 (ita) + +top = '.' +out = 'build' + +import inspect +from waflib import Utils, Logs, TaskGen + +@TaskGen.taskgen_method +def log(self): + fname = inspect.stack()[1][3] + try: + self.called.append(fname) + except AttributeError: + self.called = [fname] + +@TaskGen.taskgen_method +def check(self): + self.post() + result = ''.join(self.called) + if result == self.expected: + color = 'GREEN' + else: + color = 'RED' + result = 'got %r but expected %r' % (result, self.expected) + self.bld.failure = 1 + Logs.pprint(color, result) + +@TaskGen.feature('test1') +@TaskGen.after('d') +def a(self): + self.log() +@TaskGen.feature('test1') +@TaskGen.after('c') +def b(self): + self.log() +@TaskGen.feature('test1') +def c(self): + self.log() +@TaskGen.feature('test1') +def d(self): + self.log() +@TaskGen.feature('test1') +@TaskGen.after('f') +def e(self): + self.log() +@TaskGen.feature('test1') +def f(self): + self.log() + + +def configure(conf): + pass + +def build(bld): + + bld.failure = 0 + def stop_status(bld): + if bld.failure: + bld.fatal('One or several test failed, check the outputs above') + bld.add_post_fun(stop_status) + + bld(features='test1', expected='cbdafe').check() + diff --git a/tests/preproc/src/main.c b/tests/preproc/src/main.c index e07e1761a1..9ee24a54db 100644 --- a/tests/preproc/src/main.c +++ b/tests/preproc/src/main.c @@ -1,7 +1,11 @@ #if A +const char* c = "foo /*"; #if B +const char* d = " bar */"; +// /* #if C +// */ #include "a.h" #else #include "b.h" diff --git a/tests/preproc/wscript b/tests/preproc/wscript index b7d794de78..8b6a5c1d28 100644 --- a/tests/preproc/wscript +++ b/tests/preproc/wscript @@ -16,13 +16,18 @@ from waflib.Logs import pprint def configure(conf): pass -def trimquotes(s): - if not s: return '' - s = s.rstrip() - if s[0] == "'" and s[-1] == "'": return s[1:-1] - return s - def build(bld): + + bld.failure = 0 + def disp(color, result): + pprint(color, result) + if color == 'RED': + bld.failure=1 + def stop_status(bld): + if bld.failure: + bld.fatal('One or several test failed, check the outputs above') + bld.add_post_fun(stop_status) + defs = { 'm1' : "m1 9 + 9", 'fun0' : "fun0(x, y) x y", @@ -44,7 +49,7 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%s\t\t%r" % (ret, toks)) + disp(color, "%s\t\t%r" % (ret, toks)) test("1 + m1 + 1", "1+9+9+1") test("1 + fun0(1, +) 1", "1+1+1") @@ -65,7 +70,7 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%s" % str(ret)) + disp(color, "%s" % str(ret)) test("fun6(math, h)", ("<", "math.h")) @@ -77,7 +82,7 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%s\t\t%r" % (ret, toks)) + disp(color, "%s\t\t%r" % (ret, toks)) test("1+1", 2) test("1-1", 0) @@ -113,8 +118,7 @@ def build(bld): def add_defs(a, b, c, expected): main = bld.path.find_resource('src/main.c') - defs = ['A=%d' % a, 'B=%d' % b, 'C=%d' % c] - bld.env['DEFINES'] = ["%s %s" %(x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in defs]] + bld.env.DEFINES = ['A=%s' % str(a), 'B=%s' % str(b), 'C=%s' % str(c)] gruik = c_preproc.c_parser([main.parent]) gruik.start(main, bld.env) @@ -122,7 +126,7 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%r %r %r -> header %s (got %r)" % (a, b, c, expected, gruik.nodes)) + disp(color, "%r %r %r -> header %s (got %r)" % (a, b, c, expected, gruik.nodes)) add_defs(1, 1, 1, 'a.h') add_defs(1, 1, 0, 'b.h') @@ -145,15 +149,14 @@ def build(bld): def test_pasting(): main = bld.path.find_resource('src/pasting.c') - defs = ['PREFIX_VAL=', 'SUFFIX_VAL='] - bld.env['DEFINES'] = ["%s %s" %(x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in defs]] + bld.env.DEFINES = ['PREFIX_VAL=', 'SUFFIX_VAL='] gruik = c_preproc.c_parser([main.parent]) gruik.start(main, bld.env) if len(gruik.nodes) == 1 and gruik.nodes[0].name == 'a.h': color = "GREEN" else: color = "RED" - pprint(color, "token pasting -> %r (expected a.h)" % gruik.nodes) + disp(color, "token pasting -> %r (expected a.h)" % gruik.nodes) test_pasting() @@ -165,7 +168,7 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%s\t\t%r" % (ret, toks)) + disp(color, "%s\t\t%r" % (ret, toks)) test('a||b||c||d', 1) test('a&&b&&c&&d', 0) @@ -181,13 +184,12 @@ def build(bld): color = "GREEN" else: color = "RED" - pprint(color, "%s\t\t%r" % (expected, gruik.nodes)) + disp(color, "%s\t\t%r" % (expected, gruik.nodes)) test_rec("", "a") - test_rec("FOO=1", "aca") - test_rec("BAR=1", "abca") - test_rec("FOO=1 BAR=1", "aca") - + test_rec("FOO=1", "ac") + test_rec("BAR=1", "abc") + test_rec("FOO=1 BAR=1", "ac") return test("1?1,(0?5:9):3,4", 0) # <- invalid expression diff --git a/tests/utils/wscript b/tests/utils/wscript new file mode 100644 index 0000000000..0d4ee976dd --- /dev/null +++ b/tests/utils/wscript @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2012 (ita) + +VERSION='0.0.1' +APPNAME='preproc_test' +top = '.' +out = 'build' + +from waflib import Utils +from waflib.Logs import pprint + +def configure(conf): + pass + +def build(bld): + + bld.failure = 0 + def disp(color, result): + pprint(color, result) + if color == 'RED': + bld.failure=1 + def stop_status(bld): + if bld.failure: + bld.fatal('One or several test failed, check the outputs above') + bld.add_post_fun(stop_status) + + def test_shell(inp, expected): + ret = Utils.shell_escape(inp) + if ret == expected: + color = "GREEN" + else: + color = "RED" + disp(color, "%r -> %r\t\texpected: %r" % (inp, ret, expected)) + + test_shell("ls -l", "ls -l") + test_shell(['ls', '-l', 'a space'], "ls -l 'a space'") + + diff --git a/utils/genbench.py b/utils/genbench.py index d68ec6fda7..8d98c06cef 100755 --- a/utils/genbench.py +++ b/utils/genbench.py @@ -174,7 +174,7 @@ def createVCProjFile(lib_number, classes): """) for i in range(classes): - handle.write(' \n') + handle.write(r' \n') handle.write(""" diff --git a/utils/launcher/README.md b/utils/launcher/README.md index 62c3787b1b..26b72eed1b 100644 --- a/utils/launcher/README.md +++ b/utils/launcher/README.md @@ -1,6 +1,6 @@ #Waf-launcher This is a simple wrapper for the -[waf build system](http://code.google.com/p/waf/) +[waf build system](https://waf.io/) Since many windows users does not have python installed by default, the exe file from this project can be included along with the copy of diff --git a/utils/launcher/waf-launcher/Main.cs b/utils/launcher/waf-launcher/Main.cs index 780254f7a1..006f8b73fb 100644 --- a/utils/launcher/waf-launcher/Main.cs +++ b/utils/launcher/waf-launcher/Main.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.Diagnostics; namespace waflauncher @@ -21,7 +22,7 @@ public static System.Diagnostics.Process exec(string command,params string[] arg public static int Main (string[] args) { - //I run waf and if not succesful we try on-the-fly install of python + //I run waf and if not successful we try on-the-fly install of python if(!runWaf(args)){ //but first we ask the user if it's okay to install software on their computer if(mayInstall()){ @@ -90,7 +91,7 @@ public static void installPython(){ Console.WriteLine ("python2.7 downloaded to " + filename); Console.WriteLine ("Installing python"); - //filename must be qouted or else msiexec will fail + //filename must be quoted or else msiexec will fail exec("msiexec","/qn","/i","\"" +filename + "\""); Console.WriteLine ("Python is now installed"); } diff --git a/utils/pubkey.asc b/utils/pubkey.asc new file mode 100644 index 0000000000..7adb45e0a3 --- /dev/null +++ b/utils/pubkey.asc @@ -0,0 +1,51 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGTWwoEBEADYhx0mRt8fzJ0i9bGJctQSh4gyCXC9vHkclmtw8j/3ixFid045 +dajTcrjfUyGkErN4YPWiS94Fffv/1ATC+CH9AfYJuS5htQAx3j/0vbdSI2tstzxr +yemtBUCrmMKCGUA1rln3eo4e2/v/qJLxEZ0/PkXCmsFZkucU4BFH20i3lGswGZZn +yoppZsFAsGeZ0JZQnKeJuqvASPuzTPvvfEd1tErvNOtRJ9ObJDSSgKaWZaBSU4x5 +ARIjdzETSzstOYL+SgW2wiVoV0h9pAVaXCeL6e4Llr6fp6ptgqxcdlznLaIZhmuA +kmtGT2Cb8MxbgAqr2LWhgSN9VxvAe0yToMoRzj95RuAn054Kgcr2e9/CDDRvK45z +tuaP+579j+cUM0lNGyUxN/ZzMYdMs+xB816zokyV07WrNXbx0u16kzeLl5nP3cdB +H9b/14DCiq85hsAeDyZFAbWT7Cn26WYpHZj8mkFyFmba/jOCisTZW6iFekADarvE +ybctOUUMdrdtsqo53leYCrypWlJta4ixhNaGCMIw0p8/fZCcx5ECj3q5NXt+ZKiZ +5/f70LhsxvlcfNo9m2P06eit+IMV7STSx0CTKZUPyga5KeqGi4EtJZ0KGZWfBXhF +ODQAYpdnIMV+/9fMF0EE9iB0sgL0Me8hUL2KkhTWJ2OwnmE0VAZ5W+JmywARAQAB +tBxXYWYgUHJvamVjdCA8bm9yZXBseUB3YWYuaW8+iQJMBBMBCgA2FiEEjH6y+TsM +RfVzL+XRG6xXHc13IpUFAmTWwoECGwMFCRLMAwADCwkIBRUKCQgLAh4BAheAAAoJ +EBusVx3NdyKVSLsP/00nputckP0s1aC7ZSK+wiHn6umA4/kX4i+i+mQvStajsTZr +sSOs8UxL7kxRMDrKGdNekjxs0JU/MZ5+xL8+1odFxr/GSF343/9qXze301SKnmeS +ev3P/EJ9jkv8q+7LVjwlIkJ09OajvlEdC8Gkm6RuOaUcmO1G7secqCs15GXh+f8n +wS3nM0TGitzpomqE09C7OywUe3HhB+lSKCcOf6/8SFIeuxh5dp9Pcj3gUlpwXevH +IT/V+pMR/YlocpWQFyuMIRMIUCuYaWnHxndoB87ctwsepidQWj5DnbAG7gYGemGa +hhFG0VVHoLrR3tZAJcNxtr37E/9Efb152Ki9Eip+Pm5ov0miTGES1wCuFCmZXGaZ +aJcLBHng/iQme2XkRZ1t1MkywFK6eMvv3IBn3tICw54h5x6A7W2qTNeNvu3NQ7Oe +LRleMUmMFfceAXoCCqzcsdK44yetUutRSrEBf1hO13v34R5/DWagk1MU8y7SLBHA +oeIM1JwI0NEQx4kn1PLXBQbqllVllBakuPdIrk8zmflHd6/mSWcH2M2kDRzucuJN +ro912aDcD1Fp4WUM+CRfsP6tgkaDK1mXL/whtkHomMOUeA0OhGO8hp68PEb7toNR +h95ZBcAyRl7K/77Un80HxPT6rHBHyZOsX745UdivHbcdv/yjRNUywC/82RkiuQIN +BGTWwoEBEADEMA0ny4v9n7fjZPS9m8dypPXKgLUtlJKg9W1X8Wu5DiC+j24dhvoP +1IBVdEsRIRgDEu7VCI6t2OdlX7+4EYP3ltFlZ/tIgd8yhq2Kaim6hnQLPBehq/0K +7EXqvhZ5wxLi9jaW4ltlcmMPpEm/MBv0nv19TwEocQfevioFZwX/sohaRpqS8R26 +YVSBNsdmC1arJEXaFtmrXGPXMcCDflbkJ+Sk4okpkBANWHeNftT0LSV/5fov/gbA +nlwm7+UCU+D81JlCH1S1mHGHgHVsQatDSuN1GLdg0Uk85t/qrqpXcjz8SYMfqP1E +TPeq3DaArOtUxKdHR8dDtwZ4zRthhoOcqB9PvVXYq65f9+ad9fmASgYdrHMNccqg +V7VGu761MCD90iB2R7z4Ga3BWfxs+ywDV+wTzgMINRV1HlbNYpCF/vzUlvpBBx9f +ryclk6HhF6qtV3vWb5+r6gXUFunP9VBXqCOeJtbxBxRGImfYuuFen9qdq0aWtxl0 +59oO4Sj1vRxMnw6T5di13WgtGSKvI6SKgXf37o+u5+YepQVTOEg84mciS4GKq8KI +CTcMlO0LItU2Y1BD6T+RO6YdGVGU3CLUnq3WsTQ0mBtFNkeIwMNfU5yNynDO6Y2d +O4R9nB7uhrlSudheIj8VIIsMlPgBfIvkErjOAfGXsEWSmsRWRja2YQARAQABiQI8 +BBgBCgAmFiEEjH6y+TsMRfVzL+XRG6xXHc13IpUFAmTWwoECGwwFCRLMAwAACgkQ +G6xXHc13IpU58A/+KBmBO5vQ1sWiGczB5i887/v+fvbcbS6fHkLIaxApimVGXklV +Qjp7MGq1K1vnzPc0ovu9aFbxfdNSq6iwhAvB1Q9JFv1b7zhyWiaOIeCM6mYliBZW +GnWEe56GMiYW503vlglUEoYJ/4N/I6r1i5mNMj8D6xqtZfbAOk3bnt9ws7yBOpKe +LfZXWz9q23fXg1PRKmU0E9OFC87tfw/5Wltp7GVFzFdLWchhITTYcO+sjF3O2yNv +3STUQpmdx9NEZSEWKIEd7msnf7BA5FnNPjWZx59FULZWzuWOcJ/foPbktpvKXF34 +yT5SV9UIlXjCteZrMd5fxK5u6Tacae5n6fw8eFbS4l/6BOxSGROb+H3ilpqebzvJ +gv8ryQkFh076BevWndJMeq5kxuKv49jVON4SzbtEReLRkcFXFD6mGK+2d02ksW+f +NiGMBQYTXCfY97LROieyQ32hvhAIqacnE7L7F0U6Yz98gVkq57ThHBLNfOu4gqYj +PhpOr/iddN1mVNxzZagx+serw7xQUg8WhoFA9QkWmx7Bn3PGQnrtGEslbL6Lo+pI +VOzahFM1mHIcv3bfbGlN54fgKUoFOElnlSBNZrFS6E74MoNRq2CLzRmku7uNDcVR +IYlK05zLaeHsRNnBKgN7YOW05M4p+0K5OecuXm66Yajezi/+vyMf13tulIU= +=zzf7 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/utils/setup.py b/utils/setup.py new file mode 100644 index 0000000000..69c3bfcbb2 --- /dev/null +++ b/utils/setup.py @@ -0,0 +1,25 @@ +import setuptools +import sys +sys.path.insert(0,'..') +import waflib.Context + +with open("../README.md", "r") as fh: + long_description = fh.read() + +setuptools.setup( + name="waf", + version=waflib.Context.WAFVERSION, + author="Thomas Nagy", + author_email="author@example.com", + description="Build Framework", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://gitlab.com/ita1024/waf", + package_dir={'': '../'}, + packages=['waflib', 'waflib/Tools', 'waflib/extras'], + scripts=['../waf-light'], + classifiers=[ + "Programming Language :: Python :: 3", + "Operating System :: OS Independent", + ], +) diff --git a/utils/sign_file.py b/utils/sign_file.py index 7f3524aa21..41c5afc5d0 100755 --- a/utils/sign_file.py +++ b/utils/sign_file.py @@ -112,4 +112,4 @@ def parse_args(): sign_original_file(filename, encoding) else: create_signed_file(filename, encoding) - + diff --git a/utils/verify-sig.py b/utils/verify-sig.py index 950a4972c4..b555add27f 100755 --- a/utils/verify-sig.py +++ b/utils/verify-sig.py @@ -1,10 +1,10 @@ #! /usr/bin/env python -# encoding: ISO8859-1 +# encoding: utf-8 # Thomas Nagy, 2014-2015 """ A simple file for verifying signatures in signed waf files -This script is meant for Python >= 2.6 and the encoding is bytes - ISO8859-1 +This script is meant for Python >= 2.6 and the encoding is bytes - latin-1 Distributing detached signatures is boring """ @@ -33,14 +33,14 @@ try: txt = f.read() - lastline = txt.decode('ISO8859-1').splitlines()[-1] # just the last line + lastline = txt.decode('latin-1').splitlines()[-1] # just the last line if not lastline.startswith('#-----BEGIN PGP SIGNATURE-----'): print("ERROR: there is no signature to verify in %r :-/" % infile) sys.exit(1) sigtext = lastline.replace('\\n', '\n') # convert newlines sigtext = sigtext[1:] # omit the '# character' - sigtext = sigtext.encode('ISO8859-1') # python3 + sigtext = sigtext.encode('latin-1') # python3 f2.write(sigtext) f1.write(txt[:-len(lastline) - 1]) # one newline character was eaten from splitlines() diff --git a/utils/waf.bat b/utils/waf.bat index ecea1f7fb8..a8441a8462 100644 --- a/utils/waf.bat +++ b/utils/waf.bat @@ -5,17 +5,13 @@ rem from issue #964 Setlocal EnableDelayedExpansion rem Check Windows Version -set TOKEN=tokens=3* +set TOKEN=tokens=2* ver | findstr /i "5\.0\." > nul if %ERRORLEVEL% EQU 0 SET TOKEN=tokens=3* ver | findstr /i "5\.1\." > nul if %ERRORLEVEL% EQU 0 SET TOKEN=tokens=3* ver | findstr /i "5\.2\." > nul if %ERRORLEVEL% EQU 0 SET TOKEN=tokens=3* -ver | findstr /i "6\.0\." > nul -if %ERRORLEVEL% EQU 0 SET TOKEN=tokens=2* -ver | findstr /i "6\.1\." > nul -if %ERRORLEVEL% EQU 0 SET TOKEN=tokens=2* rem Start calculating PYTHON and PYTHON_DIR set PYTHON= @@ -26,7 +22,7 @@ Setlocal EnableDelayedExpansion set PYTHON_DIR_OK=FALSE set REGPATH= -for %%i in (3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0 2.7 2.6 2.5 2.4 2.3) do ( +for %%i in (3.12 3.11 3.10 3.9 3.8 3.7 3.6 3.5 3.4 3.3 3.2 3.1 3.0 2.7 2.6 2.5) do ( for %%j in (HKCU HKLM) do ( for %%k in (SOFTWARE\Wow6432Node SOFTWARE) do ( for %%l in (Python\PythonCore IronPython) do ( @@ -43,6 +39,7 @@ REG QUERY "!REGPATH!" /ve 1>nul 2>nul if !ERRORLEVEL! equ 0 ( for /F "%TOKEN% delims= " %%A IN ('REG QUERY "!REGPATH!" /ve') do @set REG_PYTHON_DIR=%%B if exist !REG_PYTHON_DIR! ( + IF NOT "!REG_PYTHON_DIR:~-1!"=="\" SET REG_PYTHON_DIR=!REG_PYTHON_DIR!\ set REG_PYTHON=!REG_PYTHON_DIR!!REG_PYTHON_EXE! rem set PYTHON_DIR_OK=TRUE if "!PYTHON_DIR_OK!"=="FALSE" ( @@ -76,7 +73,7 @@ Endlocal & SET PYTHON_DIR=%PYTHON_DIR% & SET PYTHON=%PYTHON% if "%PYTHON_DIR%" == "" ( rem @echo No Python dir -set PYTHON=python +set PYTHON=py goto running ) @@ -84,7 +81,7 @@ rem @echo %PYTHON_DIR% if "%PYTHON%" == "" ( rem @echo No Python -set PYTHON=python +set PYTHON=py goto running ) @@ -92,5 +89,6 @@ goto running @echo Using %PYTHON% -"%PYTHON%" -x "%~dp0waf" %* & Endlocal & exit /b %ERRORLEVEL% - +"%PYTHON%" -x "%~dp0waf" %* +Endlocal +exit /b %ERRORLEVEL% diff --git a/waf-light b/waf-light index 320745f6c0..eb0a34d7ed 100755 --- a/waf-light +++ b/waf-light @@ -1,7 +1,7 @@ #!/usr/bin/env python -# encoding: ISO8859-1 -# Thomas Nagy, 2005-2015 - +# encoding: latin-1 +# Thomas Nagy, 2005-2018 +# """ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions @@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE. import os, sys, inspect -VERSION="1.8.8" +VERSION="2.0.27" REVISION="x" GIT="x" INSTALL="x" @@ -142,6 +142,9 @@ def find_lib(): if name.endswith('waf-light'): w = test(base) if w: return w + for dir in sys.path: + if test(dir): + return dir err('waf-light requires waflib -> export WAFDIR=/folder') dirname = '%s-%s-%s' % (WAF, VERSION, REVISION) diff --git a/waflib/Build.py b/waflib/Build.py index fdd59fb032..3b1f06cba5 100644 --- a/waflib/Build.py +++ b/waflib/Build.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Classes related to the build phase (build, clean, install, step, etc) @@ -14,14 +14,13 @@ import cPickle except ImportError: import pickle as cPickle -from waflib import Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors -import waflib.Node +from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors CACHE_DIR = 'c4che' -"""Location of the cache files""" +"""Name of the cache directory""" CACHE_SUFFIX = '_cache.py' -"""Suffix for the cache files""" +"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form ´variant_name´_cache.py""" INSTALL = 1337 """Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`""" @@ -29,20 +28,23 @@ UNINSTALL = -1337 """Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`""" -SAVED_ATTRS = 'root node_deps raw_deps task_sigs'.split() -"""Build class members to save between the runs (root, node_deps, raw_deps, task_sigs)""" +SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() +"""Build class members to save between the runs; these should be all dicts +except for `root` which represents a :py:class:`waflib.Node.Node` instance +""" CFG_FILES = 'cfg_files' """Files from the build directory to hash before starting the build (``config.h`` written during the configuration)""" POST_AT_ONCE = 0 -"""Post mode: all task generators are posted before the build really starts""" +"""Post mode: all task generators are posted before any task executed""" POST_LAZY = 1 -"""Post mode: post the task generators group after group""" +"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done""" -POST_BOTH = 2 -"""Post mode: post the task generators at once, then re-check them for each group""" +PROTOCOL = -1 +if sys.platform == 'cli': + PROTOCOL = 0 class BuildContext(Context.Context): '''executes the build''' @@ -57,48 +59,65 @@ def __init__(self, **kw): """Non-zero value when installing or uninstalling file""" self.top_dir = kw.get('top_dir', Context.top_dir) + """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`""" + + self.out_dir = kw.get('out_dir', Context.out_dir) + """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`""" self.run_dir = kw.get('run_dir', Context.run_dir) + """See :py:attr:`waflib.Context.run_dir`""" - self.post_mode = POST_AT_ONCE - """post the task generators at once, group-by-group, or both""" + self.launch_dir = Context.launch_dir + """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`""" - # output directory - may be set until the nodes are considered - self.out_dir = kw.get('out_dir', Context.out_dir) + self.post_mode = POST_LAZY + """Whether to post the task generators at once or group-by-group (default is group-by-group)""" - self.cache_dir = kw.get('cache_dir', None) + self.cache_dir = kw.get('cache_dir') if not self.cache_dir: self.cache_dir = os.path.join(self.out_dir, CACHE_DIR) - # map names to environments, the '' must be defined self.all_envs = {} + """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment""" # ======================================= # # cache variables + self.node_sigs = {} + """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)""" + self.task_sigs = {} - """Signatures of the tasks (persists between build executions)""" + """Dict mapping task identifiers (uid) to task signatures (persists across builds)""" + + self.imp_sigs = {} + """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)""" self.node_deps = {} - """Dict of node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists between build executions)""" + """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" self.raw_deps = {} - """Dict of custom data returned by :py:meth:`waflib.Task.Task.scan` (persists between build executions)""" - - # list of folders that are already scanned - # so that we do not need to stat them one more time - self.cache_dir_contents = {} + """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" self.task_gen_cache_names = {} - self.launch_dir = Context.launch_dir - self.jobs = Options.options.jobs + """Amount of jobs to run in parallel""" + self.targets = Options.options.targets + """List of targets to build (default: \\*)""" + self.keep = Options.options.keep + """Whether the build should continue past errors""" + self.progress_bar = Options.options.progress_bar + """ + Level of progress status: - ############ stuff below has not been reviewed + 0. normal output + 1. progress bar + 2. IDE output + 3. No output at all + """ # Manual dependencies. self.deps_man = Utils.defaultdict(list) @@ -114,16 +133,21 @@ def __init__(self, **kw): """ List containing lists of task generators """ + self.group_names = {} """ Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group` """ + for v in SAVED_ATTRS: + if not hasattr(self, v): + setattr(self, v, {}) + def get_variant_dir(self): """Getter for the variant_dir attribute""" if not self.variant: return self.out_dir - return os.path.join(self.out_dir, self.variant) + return os.path.join(self.out_dir, os.path.normpath(self.variant)) variant_dir = property(get_variant_dir, None) def __call__(self, *k, **kw): @@ -148,56 +172,22 @@ def build(bld): kw['bld'] = self ret = TaskGen.task_gen(*k, **kw) self.task_gen_cache_names = {} # reset the cache, each time - self.add_to_group(ret, group=kw.get('group', None)) + self.add_to_group(ret, group=kw.get('group')) return ret - def rule(self, *k, **kw): + def __copy__(self): """ - Wrapper for creating a task generator using the decorator notation. The following code:: - - @bld.rule( - target = "foo" - ) - def _(tsk): - print("bar") - - is equivalent to:: - - def bar(tsk): - print("bar") + Build contexts cannot be copied - bld( - target = "foo", - rule = bar, - ) + :raises: :py:class:`waflib.Errors.WafError` """ - def f(rule): - ret = self(*k, **kw) - ret.rule = rule - return ret - return f - - def __copy__(self): - """Implemented to prevents copies of build contexts (raises an exception)""" - raise Errors.WafError('build contexts are not supposed to be copied') - - def install_files(self, *k, **kw): - """Actual implementation provided by :py:meth:`waflib.Build.InstallContext.install_files`""" - pass - - def install_as(self, *k, **kw): - """Actual implementation provided by :py:meth:`waflib.Build.InstallContext.install_as`""" - pass - - def symlink_as(self, *k, **kw): - """Actual implementation provided by :py:meth:`waflib.Build.InstallContext.symlink_as`""" - pass + raise Errors.WafError('build contexts cannot be copied') def load_envs(self): """ The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those - files. The config sets are then stored in the dict :py:attr:`waflib.Build.BuildContext.allenvs`. + files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`. """ node = self.root.find_node(self.cache_dir) if not node: @@ -213,21 +203,16 @@ def load_envs(self): self.all_envs[name] = env for f in env[CFG_FILES]: newnode = self.root.find_resource(f) - try: - h = Utils.h_file(newnode.abspath()) - except (IOError, AttributeError): - Logs.error('cannot find %r' % f) - h = Utils.SIG_NIL - newnode.sig = h + if not newnode or not newnode.exists(): + raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f) def init_dirs(self): """ Initialize the project directory and the build directory by creating the nodes :py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode` - corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory will be - created if it does not exist. + corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is + created if necessary. """ - if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)): raise Errors.WafError('The project was not configured: run "waf configure" first!') @@ -237,12 +222,12 @@ def init_dirs(self): def execute(self): """ - Restore the data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`. Overrides from :py:func:`waflib.Context.Context.execute` + Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`. + Overrides from :py:func:`waflib.Context.Context.execute` """ self.restore() if not self.all_envs: self.load_envs() - self.execute_build() def execute_build(self): @@ -255,7 +240,7 @@ def execute_build(self): * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ - Logs.info("Waf: Entering directory `%s'" % self.variant_dir) + Logs.info("Waf: Entering directory `%s'", self.variant_dir) self.recurse([self.run_dir]) self.pre_build() @@ -266,65 +251,70 @@ def execute_build(self): self.compile() finally: if self.progress_bar == 1 and sys.stderr.isatty(): - c = len(self.returned_tasks) or 1 + c = self.producer.processed or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on}) - Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) + Logs.info("Waf: Leaving directory `%s'", self.variant_dir) + try: + self.producer.bld = None + del self.producer + except AttributeError: + pass self.post_build() def restore(self): """ - Load the data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` + Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` """ try: env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py')) except EnvironmentError: pass else: - if env['version'] < Context.HEXVERSION: - raise Errors.WafError('Version mismatch! reconfigure the project') - for t in env['tools']: + if env.version < Context.HEXVERSION: + raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it') + + for t in env.tools: self.setup(**t) dbfn = os.path.join(self.variant_dir, Context.DBFILE) try: data = Utils.readf(dbfn, 'rb') - except (IOError, EOFError): + except (EnvironmentError, EOFError): # handle missing file/empty file - Logs.debug('build: Could not load the build cache %s (missing)' % dbfn) + Logs.debug('build: Could not load the build cache %s (missing)', dbfn) else: try: - waflib.Node.pickle_lock.acquire() - waflib.Node.Nod3 = self.node_class + Node.pickle_lock.acquire() + Node.Nod3 = self.node_class try: data = cPickle.loads(data) except Exception as e: - Logs.debug('build: Could not pickle the build cache %s: %r' % (dbfn, e)) + Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e) else: for x in SAVED_ATTRS: - setattr(self, x, data[x]) + setattr(self, x, data.get(x, {})) finally: - waflib.Node.pickle_lock.release() + Node.pickle_lock.release() self.init_dirs() def store(self): """ - Store the data for next runs, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary + Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary file to avoid problems on ctrl+c. """ - data = {} for x in SAVED_ATTRS: data[x] = getattr(self, x) db = os.path.join(self.variant_dir, Context.DBFILE) try: - waflib.Node.pickle_lock.acquire() - waflib.Node.Nod3 = self.node_class - x = cPickle.dumps(data, -1) + Node.pickle_lock.acquire() + Node.Nod3 = self.node_class + x = cPickle.dumps(data, PROTOCOL) finally: - waflib.Node.pickle_lock.release() + Node.pickle_lock.release() Utils.writef(db + '.tmp', x, m='wb') @@ -342,29 +332,34 @@ def store(self): def compile(self): """ Run the build by creating an instance of :py:class:`waflib.Runner.Parallel` - The cache file is not written if the build is up to date (no task executed). + The cache file is written when at least a task was executed. + + :raises: :py:class:`waflib.Errors.BuildError` in case the build fails """ Logs.debug('build: compile()') - # use another object to perform the producer-consumer logic (reduce the complexity) + # delegate the producer-consumer logic to another object to reduce the complexity self.producer = Runner.Parallel(self, self.jobs) self.producer.biter = self.get_build_iterator() - self.returned_tasks = [] # not part of the API yet try: self.producer.start() except KeyboardInterrupt: - self.store() + if self.is_dirty(): + self.store() raise else: - if self.producer.dirty: + if self.is_dirty(): self.store() if self.producer.error: raise Errors.BuildError(self.producer.error) + def is_dirty(self): + return self.producer.dirty + def setup(self, tool, tooldir=None, funs=None): """ - Import waf tools, used to import those accessed during the configuration:: + Import waf tools defined during the configuration:: def configure(conf): conf.load('glib2') @@ -379,11 +374,13 @@ def build(bld): :param funs: unused variable """ if isinstance(tool, list): - for i in tool: self.setup(i, tooldir) + for i in tool: + self.setup(i, tooldir) return module = Context.load_tool(tool, tooldir) - if hasattr(module, "setup"): module.setup(self) + if hasattr(module, "setup"): + module.setup(self) def get_env(self): """Getter for the env property""" @@ -408,26 +405,28 @@ def build(bld): :param path: file path :type path: string or :py:class:`waflib.Node.Node` - :param value: value to depend on - :type value: :py:class:`waflib.Node.Node`, string, or function returning a string + :param value: value to depend + :type value: :py:class:`waflib.Node.Node`, byte object, or function returning a byte object """ - if path is None: - raise ValueError('Invalid input') + if not path: + raise ValueError('Invalid input path %r' % path) - if isinstance(path, waflib.Node.Node): + if isinstance(path, Node.Node): node = path elif os.path.isabs(path): node = self.root.find_resource(path) else: node = self.path.find_resource(path) + if not node: + raise ValueError('Could not find the path %r' % path) if isinstance(value, list): - self.deps_man[id(node)].extend(value) + self.deps_man[node].extend(value) else: - self.deps_man[id(node)].append(value) + self.deps_man[node].append(value) def launch_node(self): - """Returns the launch directory as a :py:class:`waflib.Node.Node` object""" + """Returns the launch directory as a :py:class:`waflib.Node.Node` object (cached)""" try: # private cache return self.p_ln @@ -437,11 +436,13 @@ def launch_node(self): def hash_env_vars(self, env, vars_lst): """ - Hash configuration set variables:: + Hashes configuration set variables:: def build(bld): bld.hash_env_vars(bld.env, ['CXX', 'CC']) + This method uses an internal cache. + :param env: Configuration Set :type env: :py:class:`waflib.ConfigSet.ConfigSet` :param vars_lst: list of variables @@ -465,21 +466,23 @@ def build(bld): pass lst = [env[a] for a in vars_lst] - ret = Utils.h_list(lst) + cache[idx] = ret = Utils.h_list(lst) Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst) - - cache[idx] = ret - return ret def get_tgen_by_name(self, name): """ - Retrieves a task generator from its name or its target name - the name must be unique:: + Fetches a task generator by its name or its target attribute; + the name must be unique in a build:: def build(bld): tg = bld(name='foo') tg == bld.get_tgen_by_name('foo') + + This method use a private internal cache. + + :param name: Task generator name + :raises: :py:class:`waflib.Errors.WafError` in case there is no task genenerator by that name """ cache = self.task_gen_cache_names if not cache: @@ -496,9 +499,12 @@ def build(bld): except KeyError: raise Errors.WafError('Could not find a task generator for the name %r' % name) - def progress_line(self, state, total, col1, col2): + def progress_line(self, idx, total, col1, col2): """ - Compute the progress bar used by ``waf -p`` + Computes a progress bar line displayed when running ``waf -p`` + + :returns: progress bar line + :rtype: string """ if not sys.stderr.isatty(): return '' @@ -508,16 +514,16 @@ def progress_line(self, state, total, col1, col2): Utils.rot_idx += 1 ind = Utils.rot_chr[Utils.rot_idx % 4] - pc = (100.*state)/total - eta = str(self.timer) - fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind) - left = fs % (state, total, col1, pc, col2) - right = '][%s%s%s]' % (col1, eta, col2) + pc = (100. * idx)/total + fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind) + left = fs % (idx, total, col1, pc, col2) + right = '][%s%s%s]' % (col1, self.timer, col2) cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2) - if cols < 7: cols = 7 + if cols < 7: + cols = 7 - ratio = ((cols*state)//total) - 1 + ratio = ((cols * idx)//total) - 1 bar = ('='*ratio+'>').ljust(cols) msg = Logs.indicator % (left, bar, right) @@ -526,23 +532,23 @@ def progress_line(self, state, total, col1, col2): def declare_chain(self, *k, **kw): """ - Wrapper for :py:func:`waflib.TaskGen.declare_chain` provided for convenience + Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience """ return TaskGen.declare_chain(*k, **kw) def pre_build(self): - """Execute user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`""" + """Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`""" for m in getattr(self, 'pre_funs', []): m(self) def post_build(self): - """Executes the user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`""" + """Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`""" for m in getattr(self, 'post_funs', []): m(self) def add_pre_fun(self, meth): """ - Bind a method to execute after the scripts are read and before the build starts:: + Binds a callback method to execute after the scripts are read and before the build starts:: def mycallback(bld): print("Hello, world!") @@ -557,7 +563,7 @@ def build(bld): def add_post_fun(self, meth): """ - Bind a method to execute immediately after the build is successful:: + Binds a callback method to execute immediately after the build is successful:: def call_ldconfig(bld): bld.exec_command('/sbin/ldconfig') @@ -573,7 +579,7 @@ def build(bld): def get_group(self, x): """ - Get the group x, or return the current group if x is None + Returns the build group named `x`, or the current group if `x` is None :param x: name or number or None :type x: string, int or None @@ -587,14 +593,20 @@ def get_group(self, x): return self.groups[x] def add_to_group(self, tgen, group=None): - """add a task or a task generator for the build""" - # paranoid - assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.TaskBase)) + """Adds a task or a task generator to the build; there is no attempt to remove it if it was already added.""" + assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task)) tgen.bld = self self.get_group(group).append(tgen) def get_group_name(self, g): - """name for the group g (utility)""" + """ + Returns the name of the input build group + + :param g: build group object or build group index + :type g: integer or list + :return: name + :rtype: string + """ if not isinstance(g, list): g = self.groups[g] for x in self.group_names: @@ -604,7 +616,7 @@ def get_group_name(self, g): def get_group_idx(self, tg): """ - Index of the group containing the task generator given as argument:: + Returns the index of the group containing the task generator given as argument:: def build(bld): tg = bld(name='nada') @@ -612,27 +624,28 @@ def build(bld): :param tg: Task generator object :type tg: :py:class:`waflib.TaskGen.task_gen` + :rtype: int """ se = id(tg) - for i in range(len(self.groups)): - for t in self.groups[i]: + for i, tmp in enumerate(self.groups): + for t in tmp: if id(t) == se: return i return None def add_group(self, name=None, move=True): """ - Add a new group of tasks/task generators. By default the new group becomes the default group for new task generators. + Adds a new group of tasks/task generators. By default the new group becomes + the default group for new task generators (make sure to create build groups in order). :param name: name for this group :type name: string - :param move: set the group created as default group (True by default) + :param move: set this new group as default group (True by default) :type move: bool + :raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists """ - #if self.groups and not self.groups[0].tasks: - # error('add_group: an empty group is already present') if name and name in self.group_names: - Logs.error('add_group: name %s already present' % name) + raise Errors.WafError('add_group: name %s already present' % name) g = [] self.group_names[name] = g self.groups.append(g) @@ -641,7 +654,8 @@ def add_group(self, name=None, move=True): def set_group(self, idx): """ - Set the current group to be idx: now new task generators will be added to this group by default:: + Sets the build group at position idx as current so that newly added + task generators are added to this one by default:: def build(bld): bld(rule='touch ${TGT}', target='foo.txt') @@ -655,8 +669,8 @@ def build(bld): """ if isinstance(idx, str): g = self.group_names[idx] - for i in range(len(self.groups)): - if id(g) == id(self.groups[i]): + for i, tmp in enumerate(self.groups): + if id(g) == id(tmp): self.current_group = i break else: @@ -664,8 +678,11 @@ def build(bld): def total(self): """ - Approximate task count: this value may be inaccurate if task generators are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`). + Approximate task count: this value may be inaccurate if task generators + are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`). The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution. + + :rtype: int """ total = 0 for group in self.groups: @@ -678,9 +695,16 @@ def total(self): def get_targets(self): """ - Return the task generator corresponding to the 'targets' list, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator`:: + This method returns a pair containing the index of the last build group to post, + and the list of task generator objects corresponding to the target names. + + This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + to perform partial builds:: $ waf --targets=myprogram,myshlib + + :return: the minimum build group index, and list of task generators + :rtype: tuple """ to_post = [] min_grp = 0 @@ -696,7 +720,7 @@ def get_targets(self): def get_all_task_gen(self): """ - Utility method, returns a list of all task generators - if you need something more complicated, implement your own + Returns a list of all task generators for troubleshooting purposes. """ lst = [] for g in self.groups: @@ -705,83 +729,100 @@ def get_all_task_gen(self): def post_group(self): """ - Post the task generators from the group indexed by self.cur, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + Post task generators from the group indexed by self.current_group; used internally + by :py:meth:`waflib.Build.BuildContext.get_build_iterator` """ + def tgpost(tg): + try: + f = tg.post + except AttributeError: + pass + else: + f() + if self.targets == '*': - for tg in self.groups[self.cur]: - try: - f = tg.post - except AttributeError: - pass - else: - f() + for tg in self.groups[self.current_group]: + tgpost(tg) elif self.targets: - if self.cur < self._min_grp: - for tg in self.groups[self.cur]: - try: - f = tg.post - except AttributeError: - pass - else: - f() + if self.current_group < self._min_grp: + for tg in self.groups[self.current_group]: + tgpost(tg) else: for tg in self._exact_tg: tg.post() else: ln = self.launch_node() if ln.is_child_of(self.bldnode): - Logs.warn('Building from the build directory, forcing --targets=*') + if Logs.verbose > 1: + Logs.warn('Building from the build directory, forcing --targets=*') ln = self.srcnode elif not ln.is_child_of(self.srcnode): - Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)' % (ln.abspath(), self.srcnode.abspath())) + if Logs.verbose > 1: + Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath()) ln = self.srcnode - for tg in self.groups[self.cur]: + + def is_post(tg, ln): try: - f = tg.post + p = tg.path except AttributeError: pass else: - if tg.path.is_child_of(ln): - f() + if p.is_child_of(ln): + return True + + def is_post_group(): + for i, g in enumerate(self.groups): + if i > self.current_group: + for tg in g: + if is_post(tg, ln): + return True + + if self.post_mode == POST_LAZY and ln != self.srcnode: + # partial folder builds require all targets from a previous build group + if is_post_group(): + ln = self.srcnode + + for tg in self.groups[self.current_group]: + if is_post(tg, ln): + tgpost(tg) def get_tasks_group(self, idx): """ - Return all the tasks for the group of num idx, used by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + Returns all task instances for the build group at position idx, + used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` + + :rtype: list of :py:class:`waflib.Task.Task` """ tasks = [] for tg in self.groups[idx]: try: tasks.extend(tg.tasks) - except AttributeError: # not a task generator, can be the case for installation tasks + except AttributeError: # not a task generator tasks.append(tg) return tasks def get_build_iterator(self): """ - Creates a generator object that returns lists of tasks executable in parallel (yield) + Creates a Python generator object that returns lists of tasks that may be processed in parallel. - :return: tasks which can be executed immediatly - :rtype: list of :py:class:`waflib.Task.TaskBase` + :return: tasks which can be executed immediately + :rtype: generator returning lists of :py:class:`waflib.Task.Task` """ - self.cur = 0 - if self.targets and self.targets != '*': (self._min_grp, self._exact_tg) = self.get_targets() - global lazy_post if self.post_mode != POST_LAZY: - while self.cur < len(self.groups): + for self.current_group, _ in enumerate(self.groups): self.post_group() - self.cur += 1 - self.cur = 0 - while self.cur < len(self.groups): + for self.current_group, _ in enumerate(self.groups): # first post the task generators for the group if self.post_mode != POST_AT_ONCE: self.post_group() # then extract the tasks - tasks = self.get_tasks_group(self.cur) + tasks = self.get_tasks_group(self.current_group) + # if the constraints are set properly (ext_in/ext_out, before/after) # the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds) # (but leave set_file_constraints for the installation step) @@ -792,148 +833,324 @@ def get_build_iterator(self): Task.set_precedence_constraints(tasks) self.cur_tasks = tasks - self.cur += 1 - if not tasks: # return something else the build will stop - continue - yield tasks + if tasks: + yield tasks + while 1: + # the build stops once there are no tasks to process yield [] -class inst(Task.Task): + def install_files(self, dest, files, **kw): + """ + Creates a task generator to install files on the system:: + + def build(bld): + bld.install_files('${DATADIR}', self.path.find_resource('wscript')) + + :param dest: path representing the destination directory + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param files: input files + :type files: list of strings or list of :py:class:`waflib.Node.Node` + :param env: configuration set to expand *dest* + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param relative_trick: preserve the folder hierarchy when installing whole folders + :type relative_trick: bool + :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` + :type cwd: :py:class:`waflib.Node.Node` + :param postpone: execute the task immediately to perform the installation (False by default) + :type postpone: bool + """ + assert(dest) + tg = self(features='install_task', install_to=dest, install_from=files, **kw) + tg.dest = tg.install_to + tg.type = 'install_files' + if not kw.get('postpone', True): + tg.post() + return tg + + def install_as(self, dest, srcfile, **kw): + """ + Creates a task generator to install a file on the system with a different name:: + + def build(bld): + bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755) + + :param dest: destination file + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param srcfile: input file + :type srcfile: string or :py:class:`waflib.Node.Node` + :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` + :type cwd: :py:class:`waflib.Node.Node` + :param env: configuration set for performing substitutions in dest + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param postpone: execute the task immediately to perform the installation (False by default) + :type postpone: bool + """ + assert(dest) + tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw) + tg.dest = tg.install_to + tg.type = 'install_as' + if not kw.get('postpone', True): + tg.post() + return tg + + def symlink_as(self, dest, src, **kw): + """ + Creates a task generator to install a symlink:: + + def build(bld): + bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') + + :param dest: absolute path of the symlink + :type dest: :py:class:`waflib.Node.Node` or string (absolute path) + :param src: link contents, which is a relative or absolute path which may exist or not + :type src: string + :param env: configuration set for performing substitutions in dest + :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started + :type add: bool + :param postpone: execute the task immediately to perform the installation + :type postpone: bool + :param relative_trick: make the symlink relative (default: ``False``) + :type relative_trick: bool + """ + assert(dest) + tg = self(features='install_task', install_to=dest, install_from=src, **kw) + tg.dest = tg.install_to + tg.type = 'symlink_as' + tg.link = src + # TODO if add: self.add_to_group(tsk) + if not kw.get('postpone', True): + tg.post() + return tg + +@TaskGen.feature('install_task') +@TaskGen.before_method('process_rule', 'process_source') +def process_install_task(self): + """Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally.""" + self.add_install_task(**self.__dict__) + +@TaskGen.taskgen_method +def add_install_task(self, **kw): + """ + Creates the installation task for the current task generator, and executes it immediately if necessary + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` """ - Special task used for installing files and symlinks, it behaves both like a task - and like a task generator + if not self.bld.is_install: + return + if not kw['install_to']: + return + + if kw['type'] == 'symlink_as' and Utils.is_win32: + if kw.get('win32_install'): + kw['type'] = 'install_as' + else: + # just exit + return + + tsk = self.install_task = self.create_task('inst') + tsk.chmod = kw.get('chmod', Utils.O644) + tsk.link = kw.get('link', '') or kw.get('install_from', '') + tsk.relative_trick = kw.get('relative_trick', False) + tsk.type = kw['type'] + tsk.install_to = tsk.dest = kw['install_to'] + tsk.install_from = kw['install_from'] + tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path) + tsk.install_user = kw.get('install_user') + tsk.install_group = kw.get('install_group') + tsk.init_files() + if not kw.get('postpone', True): + tsk.run_now() + return tsk + +@TaskGen.taskgen_method +def add_install_files(self, **kw): + """ + Creates an installation task for files + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'install_files' + return self.add_install_task(**kw) + +@TaskGen.taskgen_method +def add_install_as(self, **kw): + """ + Creates an installation task for a single file + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'install_as' + return self.add_install_task(**kw) + +@TaskGen.taskgen_method +def add_symlink_as(self, **kw): """ - color = 'CYAN' + Creates an installation task for a symbolic link + + :returns: An installation task + :rtype: :py:class:`waflib.Build.inst` + """ + kw['type'] = 'symlink_as' + return self.add_install_task(**kw) + +class inst(Task.Task): + """Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`""" + def __str__(self): + """Returns an empty string to disable the standard task display""" + return '' def uid(self): - lst = [self.dest, self.path] + self.source - return Utils.h_list(repr(lst)) + """Returns a unique identifier for the task""" + lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()] + return Utils.h_list(lst) - def post(self): + def init_files(self): """ - Same interface as in :py:meth:`waflib.TaskGen.task_gen.post` + Initializes the task input and output nodes """ - buf = [] - for x in self.source: - if isinstance(x, waflib.Node.Node): - y = x - else: - y = self.path.find_resource(x) - if not y: - if Logs.verbose: - Logs.warn('Could not find %s immediately (may cause broken builds)' % x) - idx = self.generator.bld.get_group_idx(self) - for tg in self.generator.bld.groups[idx]: - if not isinstance(tg, inst) and id(tg) != id(self): - tg.post() - y = self.path.find_resource(x) - if y: - break - else: - raise Errors.WafError('Could not find %r in %r' % (x, self.path)) - buf.append(y) - self.inputs = buf + if self.type == 'symlink_as': + inputs = [] + else: + inputs = self.generator.to_nodes(self.install_from) + if self.type == 'install_as': + assert len(inputs) == 1 + self.set_inputs(inputs) + + dest = self.get_install_path() + outputs = [] + if self.type == 'symlink_as': + if self.relative_trick: + self.link = os.path.relpath(self.link, os.path.dirname(dest)) + outputs.append(self.generator.bld.root.make_node(dest)) + elif self.type == 'install_as': + outputs.append(self.generator.bld.root.make_node(dest)) + else: + for y in inputs: + if self.relative_trick: + destfile = os.path.join(dest, y.path_from(self.relative_base)) + else: + destfile = os.path.join(dest, y.name) + outputs.append(self.generator.bld.root.make_node(destfile)) + self.set_outputs(outputs) def runnable_status(self): """ Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`. """ ret = super(inst, self).runnable_status() - if ret == Task.SKIP_ME: + if ret == Task.SKIP_ME and self.generator.bld.is_install: return Task.RUN_ME return ret - def __str__(self): - """Return an empty string to disable the display""" - return '' - - def run(self): - """The attribute 'exec_task' holds the method to execute""" - return self.generator.exec_task() + def post_run(self): + """ + Disables any post-run operations + """ + pass def get_install_path(self, destdir=True): """ - Installation path obtained from ``self.dest`` and prefixed by the destdir. - The variables such as '${PREFIX}/bin' are substituted. + Returns the destination path where files will be installed, pre-pending `destdir`. + + Relative paths will be interpreted relative to `PREFIX` if no `destdir` is given. + + :rtype: string """ - dest = Utils.subst_vars(self.dest, self.env) - dest = dest.replace('/', os.sep) + if isinstance(self.install_to, Node.Node): + dest = self.install_to.abspath() + else: + dest = os.path.normpath(Utils.subst_vars(self.install_to, self.env)) + if not os.path.isabs(dest): + dest = os.path.join(self.env.PREFIX, dest) if destdir and Options.options.destdir: - dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep)) + dest = Options.options.destdir.rstrip(os.sep) + os.sep + os.path.splitdrive(dest)[1].lstrip(os.sep) return dest - def exec_install_files(self): + def copy_fun(self, src, tgt): """ - Predefined method for installing files + Copies a file from src to tgt, preserving permissions and trying to work + around path limitations on Windows platforms. On Unix-like platforms, + the owner/group of the target file may be set through install_user/install_group + + :param src: absolute path + :type src: string + :param tgt: absolute path + :type tgt: string """ - destpath = self.get_install_path() - if not destpath: - raise Errors.WafError('unknown installation path %r' % self.generator) - for x, y in zip(self.source, self.inputs): - if self.relative_trick: - destfile = os.path.join(destpath, y.path_from(self.path)) - else: - destfile = os.path.join(destpath, y.name) - self.generator.bld.do_install(y.abspath(), destfile, chmod=self.chmod, tsk=self) + # override this if you want to strip executables + # kw['tsk'].source is the task that created the files in the build + if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'): + tgt = '\\\\?\\' + tgt + shutil.copy2(src, tgt) + self.fix_perms(tgt) - def exec_install_as(self): + def rm_empty_dirs(self, tgt): """ - Predefined method for installing one file with a given name + Removes empty folders recursively when uninstalling. + + :param tgt: absolute path + :type tgt: string """ - destfile = self.get_install_path() - self.generator.bld.do_install(self.inputs[0].abspath(), destfile, chmod=self.chmod, tsk=self) + while tgt: + tgt = os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break - def exec_symlink_as(self): + def run(self): """ - Predefined method for installing a symlink + Performs file or symlink installation """ - destfile = self.get_install_path() - src = self.link - if self.relative_trick: - src = os.path.relpath(src, os.path.dirname(destfile)) - self.generator.bld.do_link(src, destfile, tsk=self) - -class InstallContext(BuildContext): - '''installs the targets on the system''' - cmd = 'install' - - def __init__(self, **kw): - super(InstallContext, self).__init__(**kw) + is_install = self.generator.bld.is_install + if not is_install: # unnecessary? + return - # list of targets to uninstall for removing the empty folders after uninstalling - self.uninstall = [] - self.is_install = INSTALL + for x in self.outputs: + if is_install == INSTALL: + x.parent.mkdir() + if self.type == 'symlink_as': + fun = is_install == INSTALL and self.do_link or self.do_unlink + fun(self.link, self.outputs[0].abspath()) + else: + fun = is_install == INSTALL and self.do_install or self.do_uninstall + launch_node = self.generator.bld.launch_node() + for x, y in zip(self.inputs, self.outputs): + fun(x.abspath(), y.abspath(), x.path_from(launch_node)) - def copy_fun(self, src, tgt, **kw): - # override this if you want to strip executables - # kw['tsk'].source is the task that created the files in the build - if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'): - tgt = '\\\\?\\' + tgt - shutil.copy2(src, tgt) - os.chmod(tgt, kw.get('chmod', Utils.O644)) + def run_now(self): + """ + Try executing the installation task right now - def do_install(self, src, tgt, **kw): + :raises: :py:class:`waflib.Errors.TaskNotReady` """ - Copy a file from src to tgt with given file permissions. The actual copy is not performed - if the source and target file have the same size and the same timestamps. When the copy occurs, - the file is first removed and then copied (prevent stale inodes). + status = self.runnable_status() + if status not in (Task.RUN_ME, Task.SKIP_ME): + raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status)) + self.run() + self.hasrun = Task.SUCCESS - This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file. + def do_install(self, src, tgt, lbl, **kw): + """ + Copies a file from src to tgt with given file permissions. The actual copy is only performed + if the source and target file sizes or timestamps differ. When the copy occurs, + the file is always first removed and then copied so as to prevent stale inodes. :param src: file name as absolute path :type src: string :param tgt: file destination, as absolute path :type tgt: string + :param lbl: file source description + :type lbl: string :param chmod: installation mode :type chmod: int + :raises: :py:class:`waflib.Errors.WafError` if the file cannot be written """ - d, _ = os.path.split(tgt) - if not d: - raise Errors.WafError('Invalid installation given %r->%r' % (src, tgt)) - Utils.check_dir(d) - - srclbl = src.replace(self.srcnode.abspath() + os.sep, '') if not Options.options.force: # check if the file is already there to avoid a copy try: @@ -944,12 +1161,20 @@ def do_install(self, src, tgt, **kw): else: # same size and identical timestamps -> make no copy if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size: - if not self.progress_bar: - Logs.info('- install %s (from %s)' % (tgt, srclbl)) + if not self.generator.bld.progress_bar: + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s- install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) return False - if not self.progress_bar: - Logs.info('+ install %s (from %s)' % (tgt, srclbl)) + if not self.generator.bld.progress_bar: + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s+ install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) # Give best attempt at making destination overwritable, # like the 'install' utility used by 'make install' does. @@ -965,187 +1190,72 @@ def do_install(self, src, tgt, **kw): pass try: - self.copy_fun(src, tgt, **kw) - except IOError: - try: - os.stat(src) - except EnvironmentError: - Logs.error('File %r does not exist' % src) - raise Errors.WafError('Could not install the file %r' % tgt) + self.copy_fun(src, tgt) + except EnvironmentError as e: + if not os.path.exists(src): + Logs.error('File %r does not exist', src) + elif not os.path.isfile(src): + Logs.error('Input %r is not a file', src) + raise Errors.WafError('Could not install the file %r' % tgt, e) - def do_link(self, src, tgt, **kw): + def fix_perms(self, tgt): """ - Create a symlink from tgt to src. + Change the ownership of the file/folder/link pointed by the given path + This looks up for `install_user` or `install_group` attributes + on the task or on the task generator:: + + def build(bld): + bld.install_as('${PREFIX}/wscript', + 'wscript', + install_user='nobody', install_group='nogroup') + bld.symlink_as('${PREFIX}/wscript_link', + Utils.subst_vars('${PREFIX}/wscript', bld.env), + install_user='nobody', install_group='nogroup') + """ + if not Utils.is_win32: + user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None) + group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None) + if user or group: + Utils.lchown(tgt, user or -1, group or -1) + if not os.path.islink(tgt): + os.chmod(tgt, self.chmod) - This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink. + def do_link(self, src, tgt, **kw): + """ + Creates a symlink from tgt to src. :param src: file name as absolute path :type src: string :param tgt: file destination, as absolute path :type tgt: string """ - d, _ = os.path.split(tgt) - Utils.check_dir(d) - - link = False - if not os.path.islink(tgt): - link = True - elif os.readlink(tgt) != src: - link = True - - if link: - try: os.remove(tgt) - except OSError: pass - if not self.progress_bar: - Logs.info('+ symlink %s (to %s)' % (tgt, src)) - os.symlink(src, tgt) + if os.path.islink(tgt) and os.readlink(tgt) == src: + if not self.generator.bld.progress_bar: + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) else: - if not self.progress_bar: - Logs.info('- symlink %s (to %s)' % (tgt, src)) - - def run_task_now(self, tsk, postpone): - """ - This method is called by :py:meth:`waflib.Build.InstallContext.install_files`, - :py:meth:`waflib.Build.InstallContext.install_as` and :py:meth:`waflib.Build.InstallContext.symlink_as` immediately - after the installation task is created. Its role is to force the immediate execution if necessary, that is when - ``postpone=False`` was given. - """ - tsk.post() - if not postpone: - if tsk.runnable_status() == Task.ASK_LATER: - raise self.WafError('cannot post the task %r' % tsk) - tsk.run() - - def install_files(self, dest, files, env=None, chmod=Utils.O644, relative_trick=False, cwd=None, add=True, postpone=True, task=None): - """ - Create a task to install files on the system:: - - def build(bld): - bld.install_files('${DATADIR}', self.path.find_resource('wscript')) - - :param dest: absolute path of the destination directory - :type dest: string - :param files: input files - :type files: list of strings or list of nodes - :param env: configuration set for performing substitutions in dest - :type env: Configuration set - :param relative_trick: preserve the folder hierarchy when installing whole folders - :type relative_trick: bool - :param cwd: parent node for searching srcfile, when srcfile is not a :py:class:`waflib.Node.Node` - :type cwd: :py:class:`waflib.Node.Node` - :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started - :type add: bool - :param postpone: execute the task immediately to perform the installation - :type postpone: bool - """ - tsk = inst(env=env or self.env) - tsk.bld = self - tsk.path = cwd or self.path - tsk.chmod = chmod - tsk.task = task - if isinstance(files, waflib.Node.Node): - tsk.source = [files] - else: - tsk.source = Utils.to_list(files) - tsk.dest = dest - tsk.exec_task = tsk.exec_install_files - tsk.relative_trick = relative_trick - if add: self.add_to_group(tsk) - self.run_task_now(tsk, postpone) - return tsk - - def install_as(self, dest, srcfile, env=None, chmod=Utils.O644, cwd=None, add=True, postpone=True, task=None): - """ - Create a task to install a file on the system with a different name:: - - def build(bld): - bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755) - - :param dest: absolute path of the destination file - :type dest: string - :param srcfile: input file - :type srcfile: string or node - :param cwd: parent node for searching srcfile, when srcfile is not a :py:class:`waflib.Node.Node` - :type cwd: :py:class:`waflib.Node.Node` - :param env: configuration set for performing substitutions in dest - :type env: Configuration set - :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started - :type add: bool - :param postpone: execute the task immediately to perform the installation - :type postpone: bool - """ - tsk = inst(env=env or self.env) - tsk.bld = self - tsk.path = cwd or self.path - tsk.chmod = chmod - tsk.source = [srcfile] - tsk.task = task - tsk.dest = dest - tsk.exec_task = tsk.exec_install_as - if add: self.add_to_group(tsk) - self.run_task_now(tsk, postpone) - return tsk - - def symlink_as(self, dest, src, env=None, cwd=None, add=True, postpone=True, relative_trick=False, task=None): - """ - Create a task to install a symlink:: - - def build(bld): - bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') - - :param dest: absolute path of the symlink - :type dest: string - :param src: absolute or relative path of the link - :type src: string - :param env: configuration set for performing substitutions in dest - :type env: Configuration set - :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started - :type add: bool - :param postpone: execute the task immediately to perform the installation - :type postpone: bool - :param relative_trick: make the symlink relative (default: ``False``) - :type relative_trick: bool - """ - - if Utils.is_win32: - # symlinks *cannot* work on that platform - return - - tsk = inst(env=env or self.env) - tsk.bld = self - tsk.dest = dest - tsk.path = cwd or self.path - tsk.source = [] - tsk.task = task - tsk.link = src - tsk.relative_trick = relative_trick - tsk.exec_task = tsk.exec_symlink_as - if add: self.add_to_group(tsk) - self.run_task_now(tsk, postpone) - return tsk - -class UninstallContext(InstallContext): - '''removes the targets installed''' - cmd = 'uninstall' - - def __init__(self, **kw): - super(UninstallContext, self).__init__(**kw) - self.is_install = UNINSTALL - - def rm_empty_dirs(self, tgt): - while tgt: - tgt = os.path.dirname(tgt) try: - os.rmdir(tgt) + os.remove(tgt) except OSError: - break + pass + if not self.generator.bld.progress_bar: + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s+ symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) + os.symlink(src, tgt) + self.fix_perms(tgt) - def do_install(self, src, tgt, **kw): - """See :py:meth:`waflib.Build.InstallContext.do_install`""" - if not self.progress_bar: - Logs.info('- remove %s' % tgt) + def do_uninstall(self, src, tgt, lbl, **kw): + """ + See :py:meth:`waflib.Build.inst.do_install` + """ + if not self.generator.bld.progress_bar: + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) - self.uninstall.append(tgt) + #self.uninstall.append(tgt) try: os.remove(tgt) except OSError as e: @@ -1154,42 +1264,45 @@ def do_install(self, src, tgt, **kw): self.uninstall_error = True Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') if Logs.verbose > 1: - Logs.warn('Could not remove %s (error code %r)' % (e.filename, e.errno)) - + Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno) self.rm_empty_dirs(tgt) - def do_link(self, src, tgt, **kw): - """See :py:meth:`waflib.Build.InstallContext.do_link`""" + def do_unlink(self, src, tgt, **kw): + """ + See :py:meth:`waflib.Build.inst.do_link` + """ try: - if not self.progress_bar: - Logs.info('- remove %s' % tgt) + if not self.generator.bld.progress_bar: + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) os.remove(tgt) except OSError: pass - self.rm_empty_dirs(tgt) - def execute(self): - """ - See :py:func:`waflib.Context.Context.execute` - """ - try: - # do not execute any tasks - def runnable_status(self): - return Task.SKIP_ME - setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status) - setattr(Task.Task, 'runnable_status', runnable_status) +class InstallContext(BuildContext): + '''installs the targets on the system''' + cmd = 'install' - super(UninstallContext, self).execute() - finally: - setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back) + def __init__(self, **kw): + super(InstallContext, self).__init__(**kw) + self.is_install = INSTALL + +class UninstallContext(InstallContext): + '''removes the targets installed''' + cmd = 'uninstall' + + def __init__(self, **kw): + super(UninstallContext, self).__init__(**kw) + self.is_install = UNINSTALL class CleanContext(BuildContext): '''cleans the project''' cmd = 'clean' def execute(self): """ - See :py:func:`waflib.Context.Context.execute` + See :py:func:`waflib.Build.BuildContext.execute`. """ self.restore() if not self.all_envs: @@ -1202,30 +1315,50 @@ def execute(self): self.store() def clean(self): - """Remove files from the build directory if possible, and reset the caches""" + """ + Remove most files from the build directory, and reset all caches. + + Custom lists of files to clean can be declared as `bld.clean_files`. + For example, exclude `build/program/myprogram` from getting removed:: + + def build(bld): + bld.clean_files = bld.bldnode.ant_glob('**', + excl='.lock* config.log c4che/* config.h program/myprogram', + quiet=True, generator=True) + """ Logs.debug('build: clean called') - if self.bldnode != self.srcnode: + if hasattr(self, 'clean_files'): + for n in self.clean_files: + n.delete() + elif self.bldnode != self.srcnode: # would lead to a disaster if top == out - lst=[] - for e in self.all_envs.values(): - lst.extend(self.root.find_or_declare(f) for f in e[CFG_FILES]) - for n in self.bldnode.ant_glob('**/*', excl='.lock* *conf_check_*/** config.log c4che/*', quiet=True): + lst = [] + for env in self.all_envs.values(): + lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES]) + excluded_dirs = '.lock* *conf_check_*/** config.log %s/*' % CACHE_DIR + for n in self.bldnode.ant_glob('**/*', excl=excluded_dirs, quiet=True): if n in lst: continue n.delete() self.root.children = {} - for v in 'node_deps task_sigs raw_deps'.split(): + for v in SAVED_ATTRS: + if v == 'root': + continue setattr(self, v, {}) class ListContext(BuildContext): '''lists the targets to execute''' - cmd = 'list' + def execute(self): """ - See :py:func:`waflib.Context.Context.execute`. + In addition to printing the name of each build target, + a description column will include text for each task + generator which has a "description" field set. + + See :py:func:`waflib.Build.BuildContext.execute`. """ self.restore() if not self.all_envs: @@ -1249,12 +1382,25 @@ def execute(self): try: # force the cache initialization self.get_tgen_by_name('') - except Exception: + except Errors.WafError: pass - lst = list(self.task_gen_cache_names.keys()) - lst.sort() - for k in lst: - Logs.pprint('GREEN', k) + + targets = sorted(self.task_gen_cache_names) + + # figure out how much to left-justify, for largest target name + line_just = max(len(t) for t in targets) if targets else 0 + + for target in targets: + tgen = self.task_gen_cache_names[target] + + # Support displaying the description for the target + # if it was set on the tgen + descript = getattr(tgen, 'description', '') + if descript: + target = target.ljust(line_just) + descript = ': %s' % descript + + Logs.pprint('GREEN', target, label=descript) class StepContext(BuildContext): '''executes tasks in a step-by-step fashion, for debugging''' @@ -1266,7 +1412,8 @@ def __init__(self, **kw): def compile(self): """ - Compile the tasks matching the input/output files given (regular expression matching). Derived from :py:meth:`waflib.Build.BuildContext.compile`:: + Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build + on tasks matching the input/output pattern given (regular expression matching):: $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o $ waf step --files=in:foo.cpp.1.o # link task only @@ -1277,7 +1424,7 @@ def compile(self): BuildContext.compile(self) return - targets = None + targets = [] if self.targets and self.targets != '*': targets = self.targets.split(',') @@ -1296,25 +1443,32 @@ def compile(self): for pat in self.files.split(','): matcher = self.get_matcher(pat) for tg in g: - if isinstance(tg, Task.TaskBase): + if isinstance(tg, Task.Task): lst = [tg] else: lst = tg.tasks for tsk in lst: do_exec = False - for node in getattr(tsk, 'inputs', []): + for node in tsk.inputs: if matcher(node, output=False): do_exec = True break - for node in getattr(tsk, 'outputs', []): + for node in tsk.outputs: if matcher(node, output=True): do_exec = True break if do_exec: ret = tsk.run() - Logs.info('%s -> exit %r' % (str(tsk), ret)) + Logs.info('%s -> exit %r', tsk, ret) def get_matcher(self, pat): + """ + Converts a step pattern into a function + + :param: pat: pattern of the form in:truc.c,out:bar.o + :returns: Python function that uses Node objects as inputs and returns matches + :rtype: function + """ # this returns a function inn = True out = True @@ -1335,9 +1489,9 @@ def get_matcher(self, pat): pattern = re.compile(pat) def match(node, output): - if output == True and not out: + if output and not out: return False - if output == False and not inn: + if not output and not inn: return False if anode: @@ -1346,3 +1500,15 @@ def match(node, output): return pattern.match(node.abspath()) return match +class EnvContext(BuildContext): + """Subclass EnvContext to create commands that require configuration data in 'env'""" + fun = cmd = None + def execute(self): + """ + See :py:func:`waflib.Build.BuildContext.execute`. + """ + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py index de0c3aeecf..901fba6c06 100644 --- a/waflib/ConfigSet.py +++ b/waflib/ConfigSet.py @@ -1,21 +1,21 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ ConfigSet: a special dict -The values put in :py:class:`ConfigSet` must be lists +The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings) """ import copy, re, os from waflib import Logs, Utils -re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) +re_imp = re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) class ConfigSet(object): """ - A dict that honor serialization and parent relationships. The serialization format + A copy-on-write dict with human-readable serialized format. The serialization format is human-readable (python-like) and performed by using eval() and repr(). For high performance prefer pickle. Do not store functions as they are not serializable. @@ -39,17 +39,20 @@ def __init__(self, filename=None): def __contains__(self, key): """ - Enable the *in* syntax:: + Enables the *in* syntax:: if 'foo' in env: print(env['foo']) """ - if key in self.table: return True - try: return self.parent.__contains__(key) - except AttributeError: return False # parent may not exist + if key in self.table: + return True + try: + return self.parent.__contains__(key) + except AttributeError: + return False # parent may not exist def keys(self): - """Dict interface (unknown purpose)""" + """Dict interface""" keys = set() cur = self while cur: @@ -59,6 +62,9 @@ def keys(self): keys.sort() return keys + def __iter__(self): + return iter(self.keys()) + def __str__(self): """Text representation of the ConfigSet (for debugging purposes)""" return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()]) @@ -73,7 +79,7 @@ def configure(conf): """ try: while 1: - x = self.table.get(key, None) + x = self.table.get(key) if not x is None: return x self = self.parent @@ -82,13 +88,13 @@ def configure(conf): def __setitem__(self, key, value): """ - Dictionary interface: get value from key + Dictionary interface: set value from key """ self.table[key] = value def __delitem__(self, key): """ - Dictionary interface: get value from key + Dictionary interface: mark the value as missing """ self[key] = [] @@ -101,7 +107,7 @@ def configure(conf): conf.env['value'] """ if name in self.__slots__: - return object.__getattr__(self, name) + return object.__getattribute__(self, name) else: return self[name] @@ -152,7 +158,7 @@ def derive(self): def detach(self): """ - Detach self from its parent (if existing) + Detaches this instance from its parent (if present) Modifying the parent :py:class:`ConfigSet` will not change the current object Modifying this :py:class:`ConfigSet` will not modify the parent one. @@ -167,21 +173,23 @@ def detach(self): for x in keys: tbl[x] = copy.deepcopy(tbl[x]) self.table = tbl + return self def get_flat(self, key): """ - Return a value as a string. If the input is a list, the value returned is space-separated. + Returns a value as a string. If the input is a list, the value returned is space-separated. :param key: key to use :type key: string """ s = self[key] - if isinstance(s, str): return s + if isinstance(s, str): + return s return ' '.join(s) def _get_list_value_for_modification(self, key): """ - Return a list value for further modification. + Returns a list value for further modification. The list may be modified inplace and there is no need to do this afterwards:: @@ -190,16 +198,20 @@ def _get_list_value_for_modification(self, key): try: value = self.table[key] except KeyError: - try: value = self.parent[key] - except AttributeError: value = [] - if isinstance(value, list): - value = value[:] + try: + value = self.parent[key] + except AttributeError: + value = [] else: - value = [value] + if isinstance(value, list): + # force a copy + value = value[:] + else: + value = [value] + self.table[key] = value else: if not isinstance(value, list): - value = [value] - self.table[key] = value + self.table[key] = value = [value] return value def append_value(self, var, val): @@ -231,7 +243,7 @@ def configure(conf): def append_unique(self, var, val): """ - Append a value to the specified item only if it's not already present:: + Appends a value to the specified item only if it's not already present:: def build(bld): bld.env.append_unique('CFLAGS', ['-O2', '-g']) @@ -248,7 +260,7 @@ def build(bld): def get_merged_dict(self): """ - Compute the merged dictionary from the fusion of self and all its parent + Computes the merged dictionary from the fusion of self and all its parent :rtype: a ConfigSet object """ @@ -256,8 +268,10 @@ def get_merged_dict(self): env = self while 1: table_list.insert(0, env.table) - try: env = env.parent - except AttributeError: break + try: + env = env.parent + except AttributeError: + break merged_table = {} for table in table_list: merged_table.update(table) @@ -265,7 +279,7 @@ def get_merged_dict(self): def store(self, filename): """ - Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files. + Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files. :param filename: file to use :type filename: string @@ -292,31 +306,30 @@ def store(self, filename): def load(self, filename): """ - Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files + Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`. :param filename: file to use :type filename: string """ tbl = self.table - code = Utils.readf(filename, m='rU') + code = Utils.readf(filename, m='r') for m in re_imp.finditer(code): g = m.group tbl[g(2)] = eval(g(3)) - Logs.debug('env: %s' % str(self.table)) + Logs.debug('env: %s', self.table) def update(self, d): """ - Dictionary interface: replace values from another dict + Dictionary interface: replace values with the ones from another dict :param d: object to use the value from :type d: dict-like object """ - for k, v in d.items(): - self[k] = v + self.table.update(d) def stash(self): """ - Store the object state, to provide a kind of transaction support:: + Stores the object state to provide transactionality semantics:: env = ConfigSet() env.stash() @@ -334,6 +347,12 @@ def stash(self): tbl[x] = copy.deepcopy(tbl[x]) self.undo_stack = self.undo_stack + [orig] + def commit(self): + """ + Commits transactional changes. See :py:meth:`ConfigSet.stash` + """ + self.undo_stack.pop(-1) + def revert(self): """ Reverts the object to a previous state. See :py:meth:`ConfigSet.stash` diff --git a/waflib/Configure.py b/waflib/Configure.py index 1433336d2d..a8080b6f42 100644 --- a/waflib/Configure.py +++ b/waflib/Configure.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Configuration system @@ -12,15 +12,9 @@ * hold configuration routines such as ``find_program``, etc """ -import os, shlex, sys, time, re, shutil +import os, re, shlex, shutil, sys, time, traceback from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors -BREAK = 'break' -"""In case of a configuration error, break""" - -CONTINUE = 'continue' -"""In case of a configuration error, continue""" - WAF_CONFIG_LOG = 'config.log' """Name of the configuration log file""" @@ -131,7 +125,7 @@ def init_dirs(self): self.bldnode.mkdir() if not os.path.isdir(self.bldnode.abspath()): - conf.fatal('Could not create the build directory %s' % self.bldnode.abspath()) + self.fatal('Could not create the build directory %s' % self.bldnode.abspath()) def execute(self): """ @@ -151,19 +145,13 @@ def execute(self): if ver: app = "%s (%s)" % (app, ver) - now = time.ctime() - pyver = sys.hexversion - systype = sys.platform - args = " ".join(sys.argv) - wafver = Context.WAFVERSION - abi = Context.ABI - self.to_log(conf_template % vars()) - + params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app} + self.to_log(conf_template % params) self.msg('Setting top to', self.srcnode.abspath()) self.msg('Setting out to', self.bldnode.abspath()) if id(self.srcnode) == id(self.bldnode): - Logs.warn('Setting top == out (remember to use "update_outputs")') + Logs.warn('Setting top == out') elif id(self.path) != id(self.srcnode): if self.srcnode.is_child_of(self.path): Logs.warn('Are you certain that you do not want to set top="." ?') @@ -179,8 +167,9 @@ def execute(self): # consider the current path as the root directory (see prepare_impl). # to remove: use 'waf distclean' env = ConfigSet.ConfigSet() - env['argv'] = sys.argv - env['options'] = Options.options.__dict__ + env.argv = sys.argv + env.options = Options.options.__dict__ + env.config_cmd = self.cmd env.run_dir = Context.run_dir env.top_dir = Context.top_dir @@ -188,15 +177,16 @@ def execute(self): # conf.hash & conf.files hold wscript files paths and hash # (used only by Configure.autoconfig) - env['hash'] = self.hash - env['files'] = self.files - env['environ'] = dict(self.environ) + env.hash = self.hash + env.files = self.files + env.environ = dict(self.environ) + env.launch_dir = Context.launch_dir - if not self.env.NO_LOCK_IN_RUN: + if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')): env.store(os.path.join(Context.run_dir, Options.lockfile)) - if not self.env.NO_LOCK_IN_TOP: + if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')): env.store(os.path.join(Context.top_dir, Options.lockfile)) - if not self.env.NO_LOCK_IN_OUT: + if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')): env.store(os.path.join(Context.out_dir, Options.lockfile)) def prepare_env(self, env): @@ -208,17 +198,17 @@ def prepare_env(self, env): """ if not env.PREFIX: if Options.options.prefix or Utils.is_win32: - env.PREFIX = os.path.abspath(os.path.expanduser(Options.options.prefix)) + env.PREFIX = Options.options.prefix else: - env.PREFIX = '' + env.PREFIX = '/' if not env.BINDIR: if Options.options.bindir: - env.BINDIR = os.path.abspath(os.path.expanduser(Options.options.bindir)) + env.BINDIR = Options.options.bindir else: env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env) if not env.LIBDIR: if Options.options.libdir: - env.LIBDIR = os.path.abspath(os.path.expanduser(Options.options.libdir)) + env.LIBDIR = Options.options.libdir else: env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env) @@ -234,38 +224,42 @@ def store(self): tmpenv = self.all_envs[key] tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) - def load(self, input, tooldir=None, funs=None): + def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False): """ Load Waf tools, which will be imported whenever a build is started. - :param input: waf tools to import - :type input: list of string + :param tool_list: waf tools to import + :type tool_list: list of string :param tooldir: paths for the imports :type tooldir: list of string :param funs: functions to execute from the waf tools :type funs: list of string + :param cache: whether to prevent the tool from running twice + :type cache: bool """ - tools = Utils.to_list(input) - if tooldir: tooldir = Utils.to_list(tooldir) + tools = Utils.to_list(tool_list) + if tooldir: + tooldir = Utils.to_list(tooldir) for tool in tools: # avoid loading the same tool more than once with the same functions # used by composite projects - mag = (tool, id(self.env), funs) - if mag in self.tool_cache: - self.to_log('(tool %s is already loaded, skipping)' % tool) - continue - self.tool_cache.append(mag) + if cache: + mag = (tool, id(self.env), tooldir, funs) + if mag in self.tool_cache: + self.to_log('(tool %s is already loaded, skipping)' % tool) + continue + self.tool_cache.append(mag) module = None try: - module = Context.load_tool(tool, tooldir, ctx=self) + module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) except ImportError as e: - self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e)) + self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e)) except Exception as e: self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs)) - self.to_log(Utils.ex_stack()) + self.to_log(traceback.format_exc()) raise if funs is not None: @@ -273,8 +267,10 @@ def load(self, input, tooldir=None, funs=None): else: func = getattr(module, 'configure', None) if func: - if type(func) is type(Utils.readf): func(self) - else: self.eval_rules(func) + if type(func) is type(Utils.readf): + func(self) + else: + self.eval_rules(func) self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) @@ -291,8 +287,7 @@ def post_recurse(self, node): def eval_rules(self, rules): """ - Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler` - is used to process the eventual exceptions + Execute configuration tests provided as list of functions to run :param rules: list of configuration method names :type rules: list of string @@ -300,28 +295,9 @@ def eval_rules(self, rules): self.rules = Utils.to_list(rules) for x in self.rules: f = getattr(self, x) - if not f: self.fatal("No such method '%s'." % x) - try: - f() - except Exception as e: - ret = self.err_handler(x, e) - if ret == BREAK: - break - elif ret == CONTINUE: - continue - else: - raise - - def err_handler(self, fun, error): - """ - Error handler for the configuration tests, the default is to let the exception raise - - :param fun: configuration test - :type fun: method - :param error: exception - :type error: exception - """ - pass + if not f: + self.fatal('No such configuration function %r' % x) + f() def conf(f): """ @@ -336,23 +312,20 @@ def configure(conf): :type f: function """ def fun(*k, **kw): - mandatory = True - if 'mandatory' in kw: - mandatory = kw['mandatory'] - del kw['mandatory'] - + mandatory = kw.pop('mandatory', True) try: return f(*k, **kw) except Errors.ConfigurationError: if mandatory: raise + fun.__name__ = f.__name__ setattr(ConfigurationContext, f.__name__, fun) setattr(Build.BuildContext, f.__name__, fun) return f @conf -def add_os_flags(self, var, dest=None): +def add_os_flags(self, var, dest=None, dup=False): """ Import operating system environment values into ``conf.env`` dict:: @@ -363,10 +336,15 @@ def configure(conf): :type var: string :param dest: destination variable, by default the same as var :type dest: string + :param dup: add the same set of flags again + :type dup: bool """ - # do not use 'get' to make certain the variable is not defined - try: self.env.append_value(dest or var, shlex.split(self.environ[var])) - except KeyError: pass + try: + flags = shlex.split(self.environ[var]) + except KeyError: + return + if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): + self.env.append_value(dest or var, flags) @conf def cmd_to_list(self, cmd): @@ -376,21 +354,26 @@ def cmd_to_list(self, cmd): :param cmd: command :type cmd: a string or a list of string """ - if isinstance(cmd, str) and cmd.find(' '): - try: - os.stat(cmd) - except OSError: + if isinstance(cmd, str): + if os.path.isfile(cmd): + # do not take any risk + return [cmd] + if os.sep == '/': return shlex.split(cmd) else: - return [cmd] + try: + return shlex.split(cmd, posix=False) + except TypeError: + # Python 2.5 on windows? + return shlex.split(cmd) return cmd @conf -def check_waf_version(self, mini='1.7.99', maxi='1.9.0', **kw): +def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw): """ Raise a Configuration error if the Waf version does not strictly match the given bounds:: - conf.check_waf_version(mini='1.8.0', maxi='1.9.0') + conf.check_waf_version(mini='1.9.99', maxi='2.1.0') :type mini: number, tuple or string :param mini: Minimum required version @@ -412,11 +395,11 @@ def find_file(self, filename, path_list=[]): :param filename: name of the file to search for :param path_list: list of directories to search - :return: the first occurrence filename or '' if filename could not be found + :return: the first matching filename; else a configuration exception is raised """ for n in Utils.to_list(filename): for d in Utils.to_list(path_list): - p = os.path.join(d, n) + p = os.path.expanduser(os.path.join(d, n)) if os.path.exists(p): return p self.fatal('Could not find %r' % filename) @@ -432,14 +415,17 @@ def find_program(self, filename, **kw): :param path_list: paths to use for searching :type param_list: list of string - :param var: store the result to conf.env[var], by default use filename.upper() + :param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings :type var: string - :param ext: list of extensions for the binary (do not add an extension for portability) - :type ext: list of string + :param value: obtain the program from the value passed exclusively + :type value: list or string (list is preferred) + :param exts: list of extensions for the binary (do not add an extension for portability) + :type exts: list of string :param msg: name to display in the log, by default filename is used :type msg: string :param interpreter: interpreter for the program :type interpreter: ConfigSet variable key + :raises: :py:class:`waflib.Errors.ConfigurationError` """ exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py') @@ -453,7 +439,7 @@ def find_program(self, filename, **kw): var = kw.get('var', '') if not var: - var = re.sub(r'[-.]', '_', filename[0].upper()) + var = re.sub(r'\W', '_', filename[0].upper()) path_list = kw.get('path_list', '') if path_list: @@ -461,18 +447,15 @@ def find_program(self, filename, **kw): else: path_list = environ.get('PATH', '').split(os.pathsep) - if var in environ: - filename = environ[var] - if os.path.isfile(filename): - # typical CC=/usr/bin/gcc waf configure build - ret = [filename] - else: - # case CC='ccache gcc' waf configure build - ret = self.cmd_to_list(filename) + if kw.get('value'): + # user-provided in command-line options and passed to find_program + ret = self.cmd_to_list(kw['value']) + elif environ.get(var): + # user-provided in the os environment + ret = self.cmd_to_list(environ[var]) elif self.env[var]: - # set by the user in the wscript file - ret = self.env[var] - ret = self.cmd_to_list(ret) + # a default option in the wscript file + ret = self.cmd_to_list(self.env[var]) else: if not ret: ret = self.find_binary(filename, exts.split(','), path_list) @@ -482,7 +465,6 @@ def find_program(self, filename, **kw): ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename) ret = self.cmd_to_list(ret) - if ret: if len(ret) == 1: retmsg = ret[0] @@ -491,14 +473,14 @@ def find_program(self, filename, **kw): else: retmsg = False - self.msg("Checking for program '%s'" % msg, retmsg, **kw) - if not kw.get('quiet', None): + self.msg('Checking for program %r' % msg, retmsg, **kw) + if not kw.get('quiet'): self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret)) if not ret: self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename) - interpreter = kw.get('interpreter', None) + interpreter = kw.get('interpreter') if interpreter is None: if not Utils.check_exe(ret[0], env=environ): self.fatal('Program %r is not executable' % ret) @@ -514,36 +496,39 @@ def find_binary(self, filenames, exts, paths): for ext in exts: exe_name = f + ext if os.path.isabs(exe_name): - if os.path.isfile(exe_name): + if os.path.isfile(exe_name) and os.access(exe_name, os.X_OK): return exe_name else: for path in paths: x = os.path.expanduser(os.path.join(path, exe_name)) - if os.path.isfile(x): + if os.path.isfile(x) and os.access(x, os.X_OK): return x return None - @conf def run_build(self, *k, **kw): """ - Create a temporary build context to execute a build. A reference to that build - context is kept on self.test_bld for debugging purposes, and you should not rely - on it too much (read the note on the cache below). - The parameters given in the arguments to this function are passed as arguments for - a single task generator created in the build. Only three parameters are obligatory: + Create a temporary build context to execute a build. A temporary reference to that build + context is kept on self.test_bld for debugging purposes. + The arguments to this function are passed to a single task generator for that build. + Only three parameters are mandatory: :param features: features to pass to a task generator created in the build :type features: list of string :param compile_filename: file to create for the compilation (default: *test.c*) :type compile_filename: string - :param code: code to write in the filename to compile + :param code: input file contents :type code: string - Though this function returns *0* by default, the build may set an attribute named *retval* on the + Though this function returns *0* by default, the build may bind attribute named *retval* on the build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example. - This function also provides a limited cache. To use it, provide the following option:: + The temporary builds creates a temporary folder; the name of that folder is calculated + by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet` + objects which are used for both reading and writing values. + + This function also features a cache which is disabled by default; that cache relies + on the hash value calculated as indicated above:: def options(opt): opt.add_option('--confcache', dest='confcache', default=0, @@ -554,11 +539,24 @@ def options(opt): $ waf configure --confcache """ - - lst = [str(v) for (p, v) in kw.items() if p != 'env'] - h = Utils.h_list(lst) + buf = [] + for key in sorted(kw.keys()): + v = kw[key] + if isinstance(v, ConfigSet.ConfigSet): + # values are being written to, so they are excluded from contributing to the hash + continue + elif hasattr(v, '__call__'): + buf.append(Utils.h_fun(v)) + else: + buf.append(str(v)) + h = Utils.h_list(buf) dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) + cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None)) + + if not cachemode and os.path.exists(dir): + shutil.rmtree(dir) + try: os.makedirs(dir) except OSError: @@ -569,13 +567,10 @@ def options(opt): except OSError: self.fatal('cannot use the configuration test folder %r' % dir) - cachemode = getattr(Options.options, 'confcache', None) if cachemode == 1: try: proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) - except OSError: - pass - except IOError: + except EnvironmentError: pass else: ret = proj['cache_run_build'] @@ -588,7 +583,8 @@ def options(opt): if not os.path.exists(bdir): os.makedirs(bdir) - self.test_bld = bld = Build.BuildContext(top_dir=dir, out_dir=bdir) + cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build') + self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir) bld.init_dirs() bld.progress_bar = 0 bld.targets = '*' @@ -597,29 +593,26 @@ def options(opt): bld.all_envs.update(self.all_envs) # not really necessary bld.env = kw['env'] - # OMG huge hack bld.kw = kw bld.conf = self kw['build_fun'](bld) - ret = -1 try: try: bld.compile() except Errors.WafError: - ret = 'Test does not build: %s' % Utils.ex_stack() + ret = 'Test does not build: %s' % traceback.format_exc() self.fatal(ret) else: ret = getattr(bld, 'retval', 0) finally: - if cachemode == 1: + if cachemode: # cache the results each time proj = ConfigSet.ConfigSet() proj['cache_run_build'] = ret proj.store(os.path.join(dir, 'cache_run_build')) else: shutil.rmtree(dir) - return ret @conf @@ -635,7 +628,7 @@ def test(self, *k, **kw): kw['env'] = self.env.derive() # validate_c for example - if kw.get('validate', None): + if kw.get('validate'): kw['validate'](kw) self.start_msg(kw['msg'], **kw) @@ -651,7 +644,7 @@ def test(self, *k, **kw): else: kw['success'] = ret - if kw.get('post_check', None): + if kw.get('post_check'): ret = kw['post_check'](kw) if ret: @@ -661,5 +654,3 @@ def test(self, *k, **kw): self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) return ret - - diff --git a/waflib/Context.py b/waflib/Context.py old mode 100644 new mode 100755 index 974df0c7a7..971a9e6b5c --- a/waflib/Context.py +++ b/waflib/Context.py @@ -1,26 +1,36 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2010-2018 (ita) """ -Classes and functions required for waf commands +Classes and functions enabling the command system """ -import os, re, imp, sys +import os, re, sys from waflib import Utils, Errors, Logs import waflib.Node +if sys.hexversion > 0x3040000: + import types + class imp(object): + new_module = lambda x: types.ModuleType(x) +else: + import imp + # the following 3 constants are updated on each new release (do not touch) -HEXVERSION=0x1080800 +HEXVERSION=0x2001b00 """Constant updated on new releases""" -WAFVERSION="1.8.8" +WAFVERSION="2.0.27" """Constant updated on new releases""" -WAFREVISION="cca30e91f4e1796b6a9c204b1604ac52315fad56" +WAFREVISION="c3e645e395505cb5faa115172b1fc9abdaeaf146" """Git revision when the waf version is updated""" -ABI = 98 +WAFNAME="waf" +"""Application name displayed on --help""" + +ABI = 20 """Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI) @@ -41,7 +51,6 @@ WSCRIPT_FILE = 'wscript' """Name of the waf script files""" - launch_dir = '' """Directory from which waf has been called""" run_dir = '' @@ -53,23 +62,12 @@ waf_dir = '' """Directory containing the waf modules""" -local_repo = '' -"""Local repository containing additional Waf tools (plugins)""" -remote_repo = 'http://waf.googlecode.com/git/' -""" -Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: - - $ waf configure --download -""" - -remote_locs = ['waflib/extras', 'waflib/Tools'] -""" -Remote directories for use with :py:const:`waflib.Context.remote_repo` -""" +default_encoding = Utils.console_encoding() +"""Encoding to use when reading outputs from other processes""" g_module = None """ -Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`) +Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`) """ STDOUT = 1 @@ -82,20 +80,20 @@ are added automatically by a metaclass. """ - def create_context(cmd_name, *k, **kw): """ - Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command. + Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command. Used in particular by :py:func:`waflib.Scripting.run_command` - :param cmd_name: command + :param cmd_name: command name :type cmd_name: string :param k: arguments to give to the context class initializer :type k: list :param k: keyword arguments to give to the context class initializer :type k: dict + :return: Context object + :rtype: :py:class:`waflib.Context.Context` """ - global classes for x in classes: if x.cmd == cmd_name: return x(*k, **kw) @@ -105,14 +103,15 @@ def create_context(cmd_name, *k, **kw): class store_context(type): """ - Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes` - Context classes must provide an attribute 'cmd' representing the command to execute + Metaclass that registers command classes into the list :py:const:`waflib.Context.classes` + Context classes must provide an attribute 'cmd' representing the command name, and a function + attribute 'fun' representing the function name that the command uses. """ - def __init__(cls, name, bases, dict): - super(store_context, cls).__init__(name, bases, dict) + def __init__(cls, name, bases, dct): + super(store_context, cls).__init__(name, bases, dct) name = cls.__name__ - if name == 'ctx' or name == 'Context': + if name in ('ctx', 'Context'): return try: @@ -123,11 +122,10 @@ def __init__(cls, name, bases, dict): if not getattr(cls, 'fun', None): cls.fun = cls.cmd - global classes classes.insert(0, cls) ctx = store_context('ctx', (object,), {}) -"""Base class for the :py:class:`waflib.Context.Context` classes""" +"""Base class for all :py:class:`waflib.Context.Context` classes""" class Context(ctx): """ @@ -138,7 +136,7 @@ class Context(ctx): def foo(ctx): print(ctx.__class__.__name__) # waflib.Context.Context - Subclasses must define the attribute 'cmd': + Subclasses must define the class attributes 'cmd' and 'fun': :param cmd: command to execute as in ``waf cmd`` :type cmd: string @@ -146,7 +144,7 @@ def foo(ctx): :type fun: string .. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext - + :top-classes: waflib.Context.Context """ errors = Errors @@ -156,19 +154,18 @@ def foo(ctx): tools = {} """ - A cache for modules (wscript files) read by :py:meth:`Context.Context.load` + A module cache for wscript files; see :py:meth:`Context.Context.load` """ def __init__(self, **kw): try: rd = kw['run_dir'] except KeyError: - global run_dir rd = run_dir # binds the context to the nodes in use to avoid a context singleton - self.node_class = type("Nod3", (waflib.Node.Node,), {}) - self.node_class.__module__ = "waflib.Node" + self.node_class = type('Nod3', (waflib.Node.Node,), {}) + self.node_class.__module__ = 'waflib.Node' self.node_class.ctx = self self.root = self.node_class('', None) @@ -179,18 +176,9 @@ def __init__(self, **kw): self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self} self.logger = None - def __hash__(self): - """ - Return a hash value for storing context objects in dicts or sets. The value is not persistent. - - :return: hash value - :rtype: int - """ - return id(self) - def finalize(self): """ - Use to free resources such as open files potentially held by the logger + Called to free resources such as logger files """ try: logger = self.logger @@ -202,32 +190,34 @@ def finalize(self): def load(self, tool_list, *k, **kw): """ - Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it. - A ``tooldir`` value may be provided as a list of module paths. + Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` + from it. A ``tooldir`` argument may be provided as a list of module paths. + :param tool_list: list of Waf tool names to load :type tool_list: list of string or space-separated string - :param tool_list: list of Waf tools to use """ tools = Utils.to_list(tool_list) path = Utils.to_list(kw.get('tooldir', '')) + with_sys_path = kw.get('with_sys_path', True) for t in tools: - module = load_tool(t, path) + module = load_tool(t, path, with_sys_path=with_sys_path) fun = getattr(module, kw.get('name', self.fun), None) if fun: fun(self) def execute(self): """ - Execute the command. Redefine this method in subclasses. + Here, it calls the function name in the top-level wscript file. Most subclasses + redefine this method to provide additional functionality. """ - global g_module self.recurse([os.path.dirname(g_module.root_path)]) def pre_recurse(self, node): """ - Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set - as an attribute ``self.cur_script``, and as the current path ``self.path`` + Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. + The current script is bound as a Node object on ``self.cur_script``, and the current path + is bound to ``self.path`` :param node: script :type node: :py:class:`waflib.Node.Node` @@ -239,7 +229,7 @@ def pre_recurse(self, node): def post_recurse(self, node): """ - Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates. + Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates. :param node: script :type node: :py:class:`waflib.Node.Node` @@ -250,10 +240,13 @@ def post_recurse(self, node): def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): """ - Run user code from the supplied list of directories. + Runs user-provided functions from the supplied list of directories. The directories can be either absolute, or relative to the directory - of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse` - are called immediately before and after a script has been executed. + of the wscript file + + The methods :py:meth:`waflib.Context.Context.pre_recurse` and + :py:meth:`waflib.Context.Context.post_recurse` are called immediately before + and after a script has been executed. :param dirs: List of directories to visit :type dirs: list of string or space-separated string @@ -283,7 +276,7 @@ def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): cache[node] = True self.pre_recurse(node) try: - function_code = node.read('rU', encoding) + function_code = node.read('r', encoding) exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) finally: self.post_recurse(node) @@ -299,34 +292,52 @@ def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): if not user_function: if not mandatory: continue - raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath())) + raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath())) user_function(self) finally: self.post_recurse(node) elif not node: if not mandatory: continue + try: + os.listdir(d) + except OSError: + raise Errors.WafError('Cannot read the folder %r' % d) raise Errors.WafError('No wscript file in directory %s' % d) + def log_command(self, cmd, kw): + if Logs.verbose: + fmt = os.environ.get('WAF_CMD_FORMAT') + if fmt == 'string': + if not isinstance(cmd, str): + cmd = Utils.shell_escape(cmd) + Logs.debug('runner: %r', cmd) + Logs.debug('runner_env: kw=%s', kw) + def exec_command(self, cmd, **kw): """ - Execute a command and return the exit status. If the context has the attribute 'log', - capture and log the process stderr/stdout for logging purposes:: + Runs an external process and returns the exit status:: def run(tsk): ret = tsk.generator.bld.exec_command('touch foo.txt') return ret - This method captures the standard/error outputs (Issue 1101), but it does not return the values - unlike :py:meth:`waflib.Context.Context.cmd_and_log` + If the context has the attribute 'log', then captures and logs the process stderr/stdout. + Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the + stdout/stderr values captured. :param cmd: command argument for subprocess.Popen - :param kw: keyword arguments for subprocess.Popen + :type cmd: string or list + :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. + :type kw: dict + :returns: process exit status + :rtype: integer + :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process + :raises: :py:class:`waflib.Errors.WafError` in case of execution failure """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) - Logs.debug('runner: %r' % cmd) - Logs.debug('runner_env: kw=%s' % kw) + self.log_command(cmd, kw) if self.logger: self.logger.info(cmd) @@ -336,30 +347,46 @@ def run(tsk): if 'stderr' not in kw: kw['stderr'] = subprocess.PIPE - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!" % cmd[0]) + if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0], env=kw.get('env', os.environ)): + # This call isn't a shell command, and if the specified exe doesn't exist, check for a relative path being set + # with cwd and if so assume the caller knows what they're doing and don't pre-emptively fail + if not (cmd[0][0] == '.' and 'cwd' in kw): + raise Errors.WafError('Program %s not found!' % cmd[0]) + + cargs = {} + if 'timeout' in kw: + if sys.hexversion >= 0x3030000: + cargs['timeout'] = kw['timeout'] + if not 'start_new_session' in kw: + kw['start_new_session'] = True + del kw['timeout'] + if 'input' in kw: + if kw['input']: + cargs['input'] = kw['input'] + kw['stdin'] = subprocess.PIPE + del kw['input'] + + if 'cwd' in kw: + if not isinstance(kw['cwd'], str): + kw['cwd'] = kw['cwd'].abspath() + + encoding = kw.pop('decode_as', default_encoding) try: - if kw['stdout'] or kw['stderr']: - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() - ret = p.returncode - else: - out, err = (None, None) - ret = subprocess.Popen(cmd, **kw).wait() + ret, out, err = Utils.run_process(cmd, kw, cargs) except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if out: if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') + out = out.decode(encoding, errors='replace') if self.logger: - self.logger.debug('out: %s' % out) + self.logger.debug('out: %s', out) else: Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) if err: if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') + err = err.decode(encoding, errors='replace') if self.logger: self.logger.error('err: %s' % err) else: @@ -369,62 +396,82 @@ def run(tsk): def cmd_and_log(self, cmd, **kw): """ - Execute a command and return stdout if the execution is successful. + Executes a process and returns stdout/stderr if the execution is successful. An exception is thrown when the exit status is non-0. In that case, both stderr and stdout - will be bound to the WafError object:: + will be bound to the WafError object (configuration tests):: def configure(conf): out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) + (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) try: conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) - except Exception as e: + except Errors.WafError as e: print(e.stdout, e.stderr) :param cmd: args for subprocess.Popen - :param kw: keyword arguments for subprocess.Popen + :type cmd: list or string + :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. + :type kw: dict + :returns: a tuple containing the contents of stdout and stderr + :rtype: string + :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process + :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object """ subprocess = Utils.subprocess kw['shell'] = isinstance(cmd, str) - Logs.debug('runner: %r' % cmd) + self.log_command(cmd, kw) - if 'quiet' in kw: - quiet = kw['quiet'] - del kw['quiet'] - else: - quiet = None + quiet = kw.pop('quiet', None) + to_ret = kw.pop('output', STDOUT) - if 'output' in kw: - to_ret = kw['output'] - del kw['output'] - else: - to_ret = STDOUT - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!" % cmd[0]) + if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0], env=kw.get('env', os.environ)): + # This call isn't a shell command, and if the specified exe doesn't exist, check for a relative path being set + # with cwd and if so assume the caller knows what they're doing and don't pre-emptively fail + if not (cmd[0][0] == '.' and 'cwd' in kw): + raise Errors.WafError('Program %s not found!' % cmd[0]) kw['stdout'] = kw['stderr'] = subprocess.PIPE if quiet is None: self.to_log(cmd) + + cargs = {} + if 'timeout' in kw: + if sys.hexversion >= 0x3030000: + cargs['timeout'] = kw['timeout'] + if not 'start_new_session' in kw: + kw['start_new_session'] = True + del kw['timeout'] + if 'input' in kw: + if kw['input']: + cargs['input'] = kw['input'] + kw['stdin'] = subprocess.PIPE + del kw['input'] + + if 'cwd' in kw: + if not isinstance(kw['cwd'], str): + kw['cwd'] = kw['cwd'].abspath() + + encoding = kw.pop('decode_as', default_encoding) + try: - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() + ret, out, err = Utils.run_process(cmd, kw, cargs) except Exception as e: raise Errors.WafError('Execution failure: %s' % str(e), ex=e) if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') + out = out.decode(encoding, errors='replace') if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') + err = err.decode(encoding, errors='replace') if out and quiet != STDOUT and quiet != BOTH: self.to_log('out: %s' % out) if err and quiet != STDERR and quiet != BOTH: self.to_log('err: %s' % err) - if p.returncode: - e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode)) - e.returncode = p.returncode + if ret: + e = Errors.WafError('Command %r returned %r' % (cmd, ret)) + e.returncode = ret e.stderr = err e.stdout = out raise e @@ -437,7 +484,8 @@ def configure(conf): def fatal(self, msg, ex=None): """ - Raise a configuration error to interrupt the execution immediately:: + Prints an error message in red and stops command execution; this is + usually used in the configuration section:: def configure(conf): conf.fatal('a requirement is missing') @@ -446,24 +494,31 @@ def configure(conf): :type msg: string :param ex: optional exception object :type ex: exception + :raises: :py:class:`waflib.Errors.ConfigurationError` """ if self.logger: self.logger.info('from %s: %s' % (self.path.abspath(), msg)) try: - msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename) - except Exception: + logfile = self.logger.handlers[0].baseFilename + except AttributeError: pass + else: + if os.environ.get('WAF_PRINT_FAILURE_LOG'): + # see #1930 + msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile)) + else: + msg = '%s\n(complete log in %s)' % (msg, logfile) raise self.errors.ConfigurationError(msg, ex=ex) def to_log(self, msg): """ - Log some information to the logger (if present), or to stderr. If the message is empty, - it is not printed:: + Logs information to the logger (if present), or to stderr. + Empty messages are not printed:: def build(bld): bld.to_log('starting the build') - When in doubt, override this method, or provide a logger on the context class. + Provide a logger on the context class or override this method if necessary. :param msg: message :type msg: string @@ -479,9 +534,9 @@ def build(bld): def msg(self, *k, **kw): """ - Print a configuration message of the form ``msg: result``. + Prints a configuration message of the form ``msg: result``. The second part of the message will be in colors. The output - can be disabled easly by setting ``in_msg`` to a positive value:: + can be disabled easily by setting ``in_msg`` to a positive value:: def configure(conf): self.in_msg = 1 @@ -507,7 +562,7 @@ def configure(conf): except KeyError: result = k[1] - color = kw.get('color', None) + color = kw.get('color') if not isinstance(color, str): color = result and 'GREEN' or 'YELLOW' @@ -515,12 +570,12 @@ def configure(conf): def start_msg(self, *k, **kw): """ - Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg` + Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg` """ - if kw.get('quiet', None): + if kw.get('quiet'): return - msg = kw.get('msg', None) or k[0] + msg = kw.get('msg') or k[0] try: if self.in_msg: self.in_msg += 1 @@ -538,19 +593,19 @@ def start_msg(self, *k, **kw): Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='') def end_msg(self, *k, **kw): - """Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`""" - if kw.get('quiet', None): + """Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`""" + if kw.get('quiet'): return self.in_msg -= 1 if self.in_msg: return - result = kw.get('result', None) or k[0] + result = kw.get('result') or k[0] defcolor = 'GREEN' - if result == True: + if result is True: msg = 'ok' - elif result == False: + elif not result: msg = 'not found' defcolor = 'YELLOW' else: @@ -568,7 +623,17 @@ def end_msg(self, *k, **kw): Logs.pprint(color, msg) def load_special_tools(self, var, ban=[]): - global waf_dir + """ + Loads third-party extensions modules for certain programming languages + by trying to list certain files in the extras/ directory. This method + is typically called once for a programming language group, see for + example :py:mod:`waflib.Tools.compiler_c` + + :param var: glob expression, for example 'cxx\\_\\*.py' + :type var: string + :param ban: list of exact file names to exclude + :type ban: list of string + """ if os.path.isdir(waf_dir): lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) for x in lst: @@ -579,13 +644,13 @@ def load_special_tools(self, var, ban=[]): waflibs = PyZipFile(waf_dir) lst = waflibs.namelist() for x in lst: - if not re.match("waflib/extras/%s" % var.replace("*", ".*"), var): + if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var): continue f = os.path.basename(x) doban = False for b in ban: - r = b.replace("*", ".*") - if re.match(b, f): + r = b.replace('*', '.*') + if re.match(r, f): doban = True if not doban: f = f.replace('.py', '') @@ -593,13 +658,13 @@ def load_special_tools(self, var, ban=[]): cache_modules = {} """ -Dictionary holding already loaded modules, keyed by their absolute path. +Dictionary holding already loaded modules (wscript), indexed by their absolute path. The modules are added automatically by :py:func:`waflib.Context.load_module` """ def load_module(path, encoding=None): """ - Load a source file as a python module. + Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules` :param path: file path :type path: string @@ -613,55 +678,76 @@ def load_module(path, encoding=None): module = imp.new_module(WSCRIPT_FILE) try: - code = Utils.readf(path, m='rU', encoding=encoding) + code = Utils.readf(path, m='r', encoding=encoding) except EnvironmentError: raise Errors.WafError('Could not read the file %r' % path) module_dir = os.path.dirname(path) sys.path.insert(0, module_dir) - - exec(compile(code, path, 'exec'), module.__dict__) - sys.path.remove(module_dir) + try: + exec(compile(code, path, 'exec'), module.__dict__) + finally: + sys.path.remove(module_dir) cache_modules[path] = module - return module -def load_tool(tool, tooldir=None, ctx=None): +def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): """ - Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools` + Imports a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools` :type tool: string :param tool: Name of the tool :type tooldir: list :param tooldir: List of directories to search for the tool module + :type with_sys_path: boolean + :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs """ if tool == 'java': tool = 'javaw' # jython else: tool = tool.replace('++', 'xx') - if tooldir: - assert isinstance(tooldir, list) - sys.path = tooldir + sys.path - try: - __import__(tool) + if not with_sys_path: + back_path = sys.path + sys.path = [] + try: + if tooldir: + assert isinstance(tooldir, list) + sys.path = tooldir + sys.path + try: + __import__(tool) + except ImportError as e: + e.waf_sys_path = list(sys.path) + raise + finally: + for d in tooldir: + sys.path.remove(d) ret = sys.modules[tool] Context.tools[tool] = ret return ret - finally: - for d in tooldir: - sys.path.remove(d) - else: - for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): + else: + if not with_sys_path: + sys.path.insert(0, waf_dir) try: - __import__(x % tool) - break - except ImportError: - x = None - if x is None: # raise an exception - __import__(tool) - ret = sys.modules[x % tool] - Context.tools[tool] = ret - return ret + for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): + try: + __import__(x % tool) + break + except ImportError: + x = None + else: # raise an exception + __import__(tool) + except ImportError as e: + e.waf_sys_path = list(sys.path) + raise + finally: + if not with_sys_path: + sys.path.remove(waf_dir) + ret = sys.modules[x % tool] + Context.tools[tool] = ret + return ret + finally: + if not with_sys_path: + sys.path += back_path diff --git a/waflib/Errors.py b/waflib/Errors.py index 104f7d8215..bf75c1b661 100644 --- a/waflib/Errors.py +++ b/waflib/Errors.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2010-2018 (ita) """ Exceptions used in the Waf code @@ -17,6 +17,7 @@ def __init__(self, msg='', ex=None): :param ex: exception causing this error (optional) :type ex: exception """ + Exception.__init__(self) self.msg = msg assert not isinstance(msg, Exception) @@ -35,9 +36,7 @@ def __str__(self): return str(self.msg) class BuildError(WafError): - """ - Errors raised during the build and install phases - """ + """Error raised during the build and install phases""" def __init__(self, error_tasks=[]): """ :param error_tasks: tasks that could not complete normally @@ -47,24 +46,23 @@ def __init__(self, error_tasks=[]): WafError.__init__(self, self.format_error()) def format_error(self): - """format the error messages from the tasks that failed""" + """Formats the error messages from the tasks that failed""" lst = ['Build failed'] for tsk in self.tasks: txt = tsk.format_error() - if txt: lst.append(txt) + if txt: + lst.append(txt) return '\n'.join(lst) class ConfigurationError(WafError): - """ - Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal` - """ + """Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`""" pass class TaskRescan(WafError): - """task-specific exception type, trigger a signature recomputation""" + """Task-specific exception type signalling required signature recalculations""" pass class TaskNotReady(WafError): - """task-specific exception type, raised when the task signature cannot be computed""" + """Task-specific exception type signalling that task signatures cannot be computed""" pass diff --git a/waflib/Logs.py b/waflib/Logs.py index a880719913..298411db51 100644 --- a/waflib/Logs.py +++ b/waflib/Logs.py @@ -1,12 +1,12 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ logging, colors, terminal width and pretty-print """ -import os, re, traceback, sys, types +import os, re, traceback, sys from waflib import Utils, ansiterm if not os.environ.get('NOSYNC', False): @@ -20,11 +20,18 @@ # in case someone uses the root logger import logging -LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" -HOUR_FORMAT = "%H:%M:%S" +LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') +HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S') + +zones = [] +""" +See :py:class:`waflib.Logs.log_filter` +""" -zones = '' verbose = 0 +""" +Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error` +""" colors_lst = { 'USE' : True, @@ -35,6 +42,7 @@ 'PINK' :'\x1b[35m', 'BLUE' :'\x1b[01;34m', 'CYAN' :'\x1b[36m', +'GREY' :'\x1b[37m', 'NORMAL':'\x1b[0m', 'cursor_on' :'\x1b[?25h', 'cursor_off' :'\x1b[?25l', @@ -42,7 +50,21 @@ indicator = '\r\x1b[K%s%s%s' +try: + unicode +except NameError: + unicode = None + def enable_colors(use): + """ + If *1* is given, then the system will perform a few verifications + before enabling colors, such as checking whether the interpreter + is running in a terminal. A value of zero will disable colors, + and a value above *1* will force colors. + + :param use: whether to enable colors or not + :type use: integer + """ if use == 1: if not (sys.stderr.isatty() or sys.stdout.isatty()): use = 0 @@ -68,15 +90,23 @@ def get_term_cols(): return 80 get_term_cols.__doc__ = """ - Get the console width in characters. + Returns the console width in characters. :return: the number of characters per line :rtype: int """ def get_color(cl): - if not colors_lst['USE']: return '' - return colors_lst.get(cl, '') + """ + Returns the ansi sequence corresponding to the given color name. + An empty string is returned when coloring is globally disabled. + + :param cl: color name in capital letters + :type cl: string + """ + if colors_lst['USE']: + return colors_lst.get(cl, '') + return '' class color_dict(object): """attribute-based color access, eg: colors.PINK""" @@ -90,7 +120,7 @@ def __call__(self, a): re_log = re.compile(r'(\w+): (.*)', re.M) class log_filter(logging.Filter): """ - The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'. + Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'. For example, the following:: from waflib import Logs @@ -100,17 +130,14 @@ class log_filter(logging.Filter): $ waf --zones=test """ - def __init__(self, name=None): - pass + def __init__(self, name=''): + logging.Filter.__init__(self, name) def filter(self, rec): """ - filter a record, adding the colors automatically - - * error: red - * warning: yellow + Filters log records by zone and by logging level - :param rec: message to record + :param rec: log entry """ rec.zone = rec.module if rec.levelno >= logging.INFO: @@ -130,6 +157,9 @@ def filter(self, rec): class log_handler(logging.StreamHandler): """Dispatches messages to stderr/stdout depending on the severity level""" def emit(self, record): + """ + Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override` + """ # default implementation try: try: @@ -147,9 +177,12 @@ def emit(self, record): self.handleError(record) def emit_override(self, record, **kw): + """ + Writes the log record to the desired stream (stderr/stdout) + """ self.terminator = getattr(record, 'terminator', '\n') stream = self.stream - if hasattr(types, "UnicodeType"): + if unicode: # python2 msg = self.formatter.format(record) fs = '%s' + self.terminator @@ -163,7 +196,7 @@ def emit_override(self, record, **kw): else: stream.write(fs % msg) except UnicodeError: - stream.write((fs % msg).encode("UTF-8")) + stream.write((fs % msg).encode('utf-8')) else: logging.StreamHandler.emit(self, record) @@ -173,7 +206,10 @@ def __init__(self): logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) def format(self, rec): - """Messages in warning, error or info mode are displayed in color by default""" + """ + Formats records and adds colors as needed. The records do not get + a leading hour format if the logging level is above *INFO*. + """ try: msg = rec.msg.decode('utf-8') except Exception: @@ -194,10 +230,17 @@ def format(self, rec): c2 = getattr(rec, 'c2', colors.NORMAL) msg = '%s%s%s' % (c1, msg, c2) else: - msg = msg.replace('\r', '\n') - msg = re.sub(r'\x1B\[(K|.*?(m|h|l))', '', msg) + # remove single \r that make long lines in text files + # and other terminal commands + msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg) - if rec.levelno >= logging.INFO: # ?? + if rec.levelno >= logging.INFO: + # the goal of this is to format without the leading "Logs, hour" prefix + if rec.args: + try: + return msg % rec.args + except UnicodeDecodeError: + return msg.encode('utf-8') % rec.args return msg rec.msg = msg @@ -210,19 +253,17 @@ def format(self, rec): def debug(*k, **kw): """ - Wrap logging.debug, the output is filtered for performance reasons + Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0 """ if verbose: k = list(k) k[0] = k[0].replace('\n', ' ') - global log log.debug(*k, **kw) def error(*k, **kw): """ - Wrap logging.errors, display the origin of the message when '-vv' is set + Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2 """ - global log log.error(*k, **kw) if verbose > 2: st = traceback.extract_stack() @@ -230,28 +271,27 @@ def error(*k, **kw): st = st[:-1] buf = [] for filename, lineno, name, line in st: - buf.append(' File "%s", line %d, in %s' % (filename, lineno, name)) + buf.append(' File %r, line %d, in %s' % (filename, lineno, name)) if line: buf.append(' %s' % line.strip()) - if buf: log.error("\n".join(buf)) + if buf: + log.error('\n'.join(buf)) def warn(*k, **kw): """ - Wrap logging.warn + Wraps logging.warning """ - global log - log.warn(*k, **kw) + log.warning(*k, **kw) def info(*k, **kw): """ - Wrap logging.info + Wraps logging.info """ - global log log.info(*k, **kw) def init_log(): """ - Initialize the loggers globally + Initializes the logger :py:attr:`waflib.Logs.log` """ global log log = logging.getLogger('waflib') @@ -265,7 +305,7 @@ def init_log(): def make_logger(path, name): """ - Create a simple logger, which is often used to redirect the context command output:: + Creates a simple logger, which is often used to redirect the context command output:: from waflib import Logs bld.logger = Logs.make_logger('test.log', 'build') @@ -285,7 +325,11 @@ def make_logger(path, name): :type name: string """ logger = logging.getLogger(name) - hdlr = logging.FileHandler(path, 'w') + if sys.hexversion > 0x3000000: + encoding = sys.stdout.encoding + else: + encoding = None + hdlr = logging.FileHandler(path, 'w', encoding=encoding) formatter = logging.Formatter('%(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) @@ -294,7 +338,7 @@ def make_logger(path, name): def make_mem_logger(name, to_log, size=8192): """ - Create a memory logger to avoid writing concurrently to the main logger + Creates a memory logger to avoid writing concurrently to the main logger """ from logging.handlers import MemoryHandler logger = logging.getLogger(name) @@ -308,19 +352,19 @@ def make_mem_logger(name, to_log, size=8192): def free_logger(logger): """ - Free the resources held by the loggers created through make_logger or make_mem_logger. + Frees the resources held by the loggers created through make_logger or make_mem_logger. This is used for file cleanup and for handler removal (logger objects are re-used). """ try: for x in logger.handlers: x.close() logger.removeHandler(x) - except Exception as e: + except Exception: pass def pprint(col, msg, label='', sep='\n'): """ - Print messages in color immediately on stderr:: + Prints messages in color immediately on stderr:: from waflib import Logs Logs.pprint('RED', 'Something bad just happened') @@ -334,5 +378,5 @@ def pprint(col, msg, label='', sep='\n'): :param sep: a string to append at the end (line separator) :type sep: string """ - info("%s%s%s %s" % (colors(col), msg, colors.NORMAL, label), extra={'terminator':sep}) + info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep}) diff --git a/waflib/Node.py b/waflib/Node.py index b998ff2674..9a9ed0a62a 100644 --- a/waflib/Node.py +++ b/waflib/Node.py @@ -1,9 +1,9 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ -Node: filesystem structure, contains lists of nodes +Node: filesystem structure #. Each file/folder is represented by exactly one node. @@ -11,13 +11,14 @@ Unused class members can increase the `.wafpickle` file size sensibly. #. Node objects should never be created directly, use - the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` + the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations -#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` should be +#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be used when a build context is present -#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass. - (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context owning a node is held as self.ctx +#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization. + (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context + owning a node is held as *self.ctx* """ import os, re, sys, shutil @@ -29,6 +30,7 @@ **/.#* **/%*% **/._* +**/*.swp **/CVS **/CVS/** **/.cvsignore @@ -59,133 +61,273 @@ recursive traversal in :py:meth:`waflib.Node.Node.ant_glob` """ -# TODO waf 1.9 -split_path = Utils.split_path_unix -split_path_cygwin = Utils.split_path_cygwin -split_path_win32 = Utils.split_path_win32 -if sys.platform == 'cygwin': - split_path = split_path_cygwin -elif Utils.is_win32: - split_path = split_path_win32 +def ant_matcher(s, ignorecase): + reflags = re.I if ignorecase else 0 + ret = [] + for x in Utils.to_list(s): + x = x.replace('\\', '/').replace('//', '/') + if x.endswith('/'): + x += '**' + accu = [] + for k in x.split('/'): + if k == '**': + accu.append(k) + else: + k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.').replace('+', '\\+') + k = '^%s$' % k + try: + exp = re.compile(k, flags=reflags) + except Exception as e: + raise Errors.WafError('Invalid pattern: %s' % k, e) + else: + accu.append(exp) + ret.append(accu) + return ret + +def ant_sub_filter(name, nn): + ret = [] + for lst in nn: + if not lst: + pass + elif lst[0] == '**': + ret.append(lst) + if len(lst) > 1: + if lst[1].match(name): + ret.append(lst[2:]) + else: + ret.append([]) + elif lst[0].match(name): + ret.append(lst[1:]) + return ret + +def ant_sub_matcher(name, pats): + nacc = ant_sub_filter(name, pats[0]) + nrej = ant_sub_filter(name, pats[1]) + if [] in nrej: + nacc = [] + return [nacc, nrej] class Node(object): """ - This class is organized in two parts + This class is organized in two parts: * The basic methods meant for filesystem access (compute paths, create folders, etc) * The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``) - - The Node objects are not thread safe in any way. """ + dict_class = dict - __slots__ = ('name', 'sig', 'children', 'parent', 'cache_abspath', 'cache_isdir', 'cache_sig') + """ + Subclasses can provide a dict class to enable case insensitivity for example. + """ + + __slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir') def __init__(self, name, parent): + """ + .. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor + """ self.name = name self.parent = parent - if parent: if name in parent.children: raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent)) parent.children[name] = self def __setstate__(self, data): - "Deserializes from data" + "Deserializes node information, used for persistence" self.name = data[0] self.parent = data[1] if data[2] is not None: # Issue 1480 self.children = self.dict_class(data[2]) - if data[3] is not None: - self.sig = data[3] def __getstate__(self): - "Serialize the node info" - return (self.name, self.parent, getattr(self, 'children', None), getattr(self, 'sig', None)) + "Serializes node information, used for persistence" + return (self.name, self.parent, getattr(self, 'children', None)) def __str__(self): - "String representation (name), for debugging purposes" - return self.name + """ + String representation (abspath), for debugging purposes - def __repr__(self): - "String representation (abspath), for debugging purposes" + :rtype: string + """ return self.abspath() - def __hash__(self): - "Node hash, used for storage in dicts. This hash is not persistent." - return id(self) + def __repr__(self): + """ + String representation (abspath), for debugging purposes - def __eq__(self, node): - "Node comparison, based on the IDs" - return id(self) == id(node) + :rtype: string + """ + return self.abspath() def __copy__(self): - "Implemented to prevent nodes from being copied (raises an exception)" + """ + Provided to prevent nodes from being copied + + :raises: :py:class:`waflib.Errors.WafError` + """ raise Errors.WafError('nodes are not supposed to be copied') - def read(self, flags='r', encoding='ISO8859-1'): + def read(self, flags='r', encoding='latin-1'): """ - Return the contents of the file represented by this node:: + Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`:: def build(bld): bld.path.find_node('wscript').read() - :type fname: string - :param fname: Path to file - :type m: string - :param m: Open mode - :rtype: string + :param flags: Open mode + :type flags: string + :param encoding: encoding value for Python3 + :type encoding: string + :rtype: string or bytes :return: File contents """ return Utils.readf(self.abspath(), flags, encoding) - def write(self, data, flags='w', encoding='ISO8859-1'): + def write(self, data, flags='w', encoding='latin-1'): """ - Write some text to the physical file represented by this node:: + Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`:: def build(bld): bld.path.make_node('foo.txt').write('Hello, world!') - :type data: string :param data: data to write - :type flags: string + :type data: string :param flags: Write mode + :type flags: string + :param encoding: encoding value for Python3 + :type encoding: string """ Utils.writef(self.abspath(), data, flags, encoding) + def read_json(self, convert=True, encoding='utf-8'): + """ + Reads and parses the contents of this node as JSON (Python ≥ 2.6):: + + def build(bld): + bld.path.find_node('abc.json').read_json() + + Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does. + + :type convert: boolean + :param convert: Prevents decoding of unicode strings on Python2 + :type encoding: string + :param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard + :rtype: object + :return: Parsed file contents + """ + import json # Python 2.6 and up + object_pairs_hook = None + if convert and sys.hexversion < 0x3000000: + try: + _type = unicode + except NameError: + _type = str + + def convert(value): + if isinstance(value, list): + return [convert(element) for element in value] + elif isinstance(value, _type): + return str(value) + else: + return value + + def object_pairs(pairs): + return dict((str(pair[0]), convert(pair[1])) for pair in pairs) + + object_pairs_hook = object_pairs + + return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook) + + def write_json(self, data, pretty=True): + """ + Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard):: + + def build(bld): + bld.path.find_node('xyz.json').write_json(199) + + :type data: object + :param data: The data to write to disk + :type pretty: boolean + :param pretty: Determines if the JSON will be nicely space separated + """ + import json # Python 2.6 and up + indent = 2 + separators = (',', ': ') + sort_keys = pretty + newline = os.linesep + if not pretty: + indent = None + separators = (',', ':') + newline = '' + output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline + self.write(output, encoding='utf-8') + + def exists(self): + """ + Returns whether the Node is present on the filesystem + + :rtype: bool + """ + return os.path.exists(self.abspath()) + + def isdir(self): + """ + Returns whether the Node represents a folder + + :rtype: bool + """ + return os.path.isdir(self.abspath()) + def chmod(self, val): """ - Change file/dir permissions:: + Changes the file/dir permissions:: def build(bld): bld.path.chmod(493) # 0755 """ os.chmod(self.abspath(), val) - def delete(self): - """Delete the file/folder, and remove this node from the tree. Do not use this object after calling this method.""" + def delete(self, evict=True): + """ + Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree. + Do not use this object after calling this method. + """ try: try: - if hasattr(self, 'children'): + if os.path.isdir(self.abspath()): shutil.rmtree(self.abspath()) else: os.remove(self.abspath()) - except OSError as e: + except OSError: if os.path.exists(self.abspath()): - raise e + raise finally: - self.evict() + if evict: + self.evict() def evict(self): - """Internal - called when a node is removed""" + """ + Removes this node from the Node tree + """ del self.parent.children[self.name] def suffix(self): - """Return the file extension""" + """ + Returns the file rightmost extension, for example `a.b.c.d → .d` + + :rtype: string + """ k = max(0, self.name.rfind('.')) return self.name[k:] def height(self): - """Depth in the folder hierarchy from the filesystem root or from all the file drives""" + """ + Returns the depth in the folder hierarchy from the filesystem root or from all the file drives + + :returns: filesystem depth + :rtype: integer + """ d = self val = -1 while d: @@ -194,17 +336,23 @@ def height(self): return val def listdir(self): - """List the folder contents""" + """ + Lists the folder contents + + :returns: list of file/folder names ordered alphabetically + :rtype: list of string + """ lst = Utils.listdir(self.abspath()) lst.sort() return lst def mkdir(self): """ - Create a folder represented by this node, creating intermediate nodes as needed - An exception will be raised only when the folder cannot possibly exist there + Creates a folder represented by this node. Intermediate folders are created as needed. + + :raises: :py:class:`waflib.Errors.WafError` when the folder is missing """ - if getattr(self, 'cache_isdir', None): + if self.isdir(): return try: @@ -218,26 +366,31 @@ def mkdir(self): except OSError: pass - if not os.path.isdir(self.abspath()): - raise Errors.WafError('Could not create the directory %s' % self.abspath()) + if not self.isdir(): + raise Errors.WafError('Could not create the directory %r' % self) try: self.children except AttributeError: self.children = self.dict_class() - self.cache_isdir = True - def find_node(self, lst): """ - Find a node on the file system (files or folders), create intermediate nodes as needed + Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists - :param lst: path + :param lst: relative path :type lst: string or list of string + :returns: The corresponding Node object or None if no entry was found on the filesystem + :rtype: :py:class:´waflib.Node.Node´ """ if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + if lst and lst[0].startswith('\\\\') and not self.parent: + node = self.ctx.root.make_node(lst[0]) + node.cache_isdir = True + return node.find_node(lst[1:]) cur = self for x in lst: @@ -251,45 +404,33 @@ def find_node(self, lst): cur.children = self.dict_class() else: try: - cur = cur.children[x] + cur = ch[x] continue except KeyError: pass # optimistic: create the node first then look if it was correct to do so cur = self.__class__(x, cur) - try: - os.stat(cur.abspath()) - except OSError: + if not cur.exists(): cur.evict() return None - ret = cur - - try: - os.stat(ret.abspath()) - except OSError: - ret.evict() + if not cur.exists(): + cur.evict() return None - try: - while not getattr(cur.parent, 'cache_isdir', None): - cur = cur.parent - cur.cache_isdir = True - except AttributeError: - pass - - return ret + return cur def make_node(self, lst): """ - Find or create a node without looking on the filesystem + Returns or creates a Node object corresponding to the input path without considering the filesystem. - :param lst: path + :param lst: relative path :type lst: string or list of string + :rtype: :py:class:´waflib.Node.Node´ """ if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: @@ -297,24 +438,27 @@ def make_node(self, lst): cur = cur.parent or cur continue - if getattr(cur, 'children', {}): - if x in cur.children: - cur = cur.children[x] - continue - else: + try: + cur = cur.children[x] + except AttributeError: cur.children = self.dict_class() + except KeyError: + pass + else: + continue cur = self.__class__(x, cur) return cur def search_node(self, lst): """ - Search for a node without looking on the filesystem + Returns a Node previously defined in the data structure. The filesystem is not considered. - :param lst: path + :param lst: relative path :type lst: string or list of string + :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure """ if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: @@ -338,8 +482,9 @@ def build(bld): :param node: path to use as a reference :type node: :py:class:`waflib.Node.Node` + :returns: a relative path or an absolute one if that is better + :rtype: string """ - c1 = self c2 = node @@ -359,7 +504,7 @@ def build(bld): c2 = c2.parent c2h -= 1 - while id(c1) != id(c2): + while not c1 is c2: lst.append(c1.name) up += 1 @@ -367,17 +512,17 @@ def build(bld): c2 = c2.parent if c1.parent: - for i in range(up): - lst.append('..') + lst.extend(['..'] * up) + lst.reverse() + return os.sep.join(lst) or '.' else: - if lst and not Utils.is_win32: - lst.append('') - lst.reverse() - return os.sep.join(lst) or '.' + return self.abspath() def abspath(self): """ - Absolute path. A cache is kept in the context as ``cache_node_abspath`` + Returns the absolute path. A cache is kept in the context as ``cache_node_abspath`` + + :rtype: string """ try: return self.cache_abspath @@ -409,9 +554,20 @@ def abspath(self): self.cache_abspath = val return val + def relpath(self): + """ + Returns the relative path. This is used in place of abspath() to keep paths short + for environments like cygwin where path lengths to file operations are severely limited + (for example, when cross-compiling for arm-none-eabi on cygwin) + + :rtype: string + """ + return os.path.relpath(self.abspath()) + + def is_child_of(self, node): """ - Does this node belong to the subtree node?:: + Returns whether the object belongs to a subtree of the input node:: def build(bld): node = bld.path.find_node('wscript') @@ -419,17 +575,18 @@ def build(bld): :param node: path to use as a reference :type node: :py:class:`waflib.Node.Node` + :rtype: bool """ p = self diff = self.height() - node.height() while diff > 0: diff -= 1 p = p.parent - return id(p) == id(node) + return p is node - def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True): + def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): """ - Semi-private and recursive method used by ant_glob. + Recursive method used by :py:meth:`waflib.Node.ant_glob`. :param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion :type accept: function @@ -443,9 +600,12 @@ def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remov :type src: bool :param remove: remove files/folders that do not exist (True by default) :type remove: bool + :param quiet: disable build directory traversal warnings (verbose mode) + :type quiet: bool + :returns: A generator object to iterate from + :rtype: iterator """ dircont = self.listdir() - dircont.sort() try: lst = set(self.children.keys()) @@ -463,25 +623,23 @@ def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remov node = self.make_node([name]) - isdir = os.path.isdir(node.abspath()) + isdir = node.isdir() if accepted: if isdir: if dir: yield node - else: - if src: - yield node + elif src: + yield node - if getattr(node, 'cache_isdir', None) or isdir: + if isdir: node.cache_isdir = True if maxdepth: - for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove): + for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet): yield k - raise StopIteration def ant_glob(self, *k, **kw): """ - This method is used for finding files across folders. It behaves like ant patterns: + Finds files across folders and returns Node objects: * ``**/*`` find all files recursively * ``**/*.class`` find all files ending by .class @@ -490,14 +648,51 @@ def ant_glob(self, *k, **kw): For example:: def configure(cfg): - cfg.path.ant_glob('**/*.cpp') # find all .cpp files - cfg.root.ant_glob('etc/*.txt') # using the filesystem root can be slow - cfg.path.ant_glob('*.cpp', excl=['*.c'], src=True, dir=False) + # find all .cpp files + cfg.path.ant_glob('**/*.cpp') + # find particular files from the root filesystem (can be slow) + cfg.root.ant_glob('etc/*.txt') + # simple exclusion rule example + cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False) - For more information see http://ant.apache.org/manual/dirtasks.html + For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html + Please remember that the '..' sequence does not represent the parent directory:: + + def configure(cfg): + cfg.path.ant_glob('../*.h') # incorrect + cfg.path.parent.ant_glob('*.h') # correct + + The Node structure is itself a filesystem cache, so certain precautions must + be taken while matching files in the build or installation phases. + Nodes objects that do have a corresponding file or folder are garbage-collected by default. + This garbage collection is usually required to prevent returning files that do not + exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built. + + This typically happens when trying to match files in the build directory, + but there are also cases when files are created in the source directory. + Run ``waf -v`` to display any warnings, and try consider passing ``remove=False`` + when matching files in the build directory. + + Since ant_glob can traverse both source and build folders, it is a best practice + to call this method only from the most specific build node:: + + def build(bld): + # traverses the build directory, may need ``remove=False``: + bld.path.ant_glob('project/dir/**/*.h') + # better, no accidental build directory traversal: + bld.path.find_node('project/dir').ant_glob('**/*.h') # best + + In addition, files and folders are listed immediately. When matching files in the + build folders, consider passing ``generator=True`` so that the generator object + returned can defer computation to a later stage. For example:: + + def build(bld): + bld(rule='tar xvf ${SRC}', source='arch.tar') + bld.add_group() + gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True) + # files will be listed only after the arch.tar is unpacked + bld(rule='ls ${SRC}', source=gen, name='XYZ') - The nodes that correspond to files and folders that do not exist will be removed. To prevent this - behaviour, pass 'remove=False' :param incl: ant patterns or list of patterns to include :type incl: string or list of strings @@ -507,126 +702,90 @@ def configure(cfg): :type dir: bool :param src: return files (True by default) :type src: bool - :param remove: remove files/folders that do not exist (True by default) - :type remove: bool :param maxdepth: maximum depth of recursion :type maxdepth: int :param ignorecase: ignore case while matching (False by default) :type ignorecase: bool + :param generator: Whether to evaluate the Nodes lazily + :type generator: bool + :param remove: remove files/folders that do not exist (True by default) + :type remove: bool + :param quiet: disable build directory traversal warnings (verbose mode) + :type quiet: bool + :returns: The corresponding Node objects as a list or as a generator object (generator=True) + :rtype: by default, list of :py:class:`waflib.Node.Node` instances """ - src = kw.get('src', True) - dir = kw.get('dir', False) - + dir = kw.get('dir') excl = kw.get('excl', exclude_regs) incl = k and k[0] or kw.get('incl', '**') - reflags = kw.get('ignorecase', 0) and re.I - - def to_pat(s): - lst = Utils.to_list(s) - ret = [] - for x in lst: - x = x.replace('\\', '/').replace('//', '/') - if x.endswith('/'): - x += '**' - lst2 = x.split('/') - accu = [] - for k in lst2: - if k == '**': - accu.append(k) - else: - k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+') - k = '^%s$' % k - try: - #print "pattern", k - accu.append(re.compile(k, flags=reflags)) - except Exception as e: - raise Errors.WafError("Invalid pattern: %s" % k, e) - ret.append(accu) - return ret - - def filtre(name, nn): - ret = [] - for lst in nn: - if not lst: - pass - elif lst[0] == '**': - ret.append(lst) - if len(lst) > 1: - if lst[1].match(name): - ret.append(lst[2:]) - else: - ret.append([]) - elif lst[0].match(name): - ret.append(lst[1:]) - return ret - - def accept(name, pats): - nacc = filtre(name, pats[0]) - nrej = filtre(name, pats[1]) - if [] in nrej: - nacc = [] - return [nacc, nrej] - - ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))] - if kw.get('flat', False): - return ' '.join([x.path_from(self) for x in ret]) + remove = kw.get('remove', True) + maxdepth = kw.get('maxdepth', 25) + ignorecase = kw.get('ignorecase', False) + quiet = kw.get('quiet', False) + pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase)) - return ret + if kw.get('generator'): + return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)) + + it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet) + if kw.get('flat'): + # returns relative paths as a space-delimited string + # prefer Node objects whenever possible + return ' '.join(x.path_from(self) for x in it) + return list(it) - # -------------------------------------------------------------------------------- - # the following methods require the source/build folders (bld.srcnode/bld.bldnode) - # using a subclass is a possibility, but is that really necessary? - # -------------------------------------------------------------------------------- + # ---------------------------------------------------------------------------- + # the methods below require the source/build folders (bld.srcnode/bld.bldnode) def is_src(self): """ - True if the node is below the source directory - note: !is_src does not imply is_bld() + Returns True if the node is below the source directory. Note that ``!is_src() ≠ is_bld()`` :rtype: bool """ cur = self - x = id(self.ctx.srcnode) - y = id(self.ctx.bldnode) + x = self.ctx.srcnode + y = self.ctx.bldnode while cur.parent: - if id(cur) == y: + if cur is y: return False - if id(cur) == x: + if cur is x: return True cur = cur.parent return False def is_bld(self): """ - True if the node is below the build directory - note: !is_bld does not imply is_src + Returns True if the node is below the build directory. Note that ``!is_bld() ≠ is_src()`` :rtype: bool """ cur = self - y = id(self.ctx.bldnode) + y = self.ctx.bldnode while cur.parent: - if id(cur) == y: + if cur is y: return True cur = cur.parent return False def get_src(self): """ - Return the equivalent src node (or self if not possible) + Returns the corresponding Node object in the source directory (or self if already + under the source directory). Use this method only if the purpose is to create + a Node object (this is common with folders but not with files, see ticket 1937) :rtype: :py:class:`waflib.Node.Node` """ cur = self - x = id(self.ctx.srcnode) - y = id(self.ctx.bldnode) + x = self.ctx.srcnode + y = self.ctx.bldnode lst = [] while cur.parent: - if id(cur) == y: + if cur is y: lst.reverse() - return self.ctx.srcnode.make_node(lst) - if id(cur) == x: + return x.make_node(lst) + if cur is x: return self lst.append(cur.name) cur = cur.parent @@ -634,18 +793,20 @@ def get_src(self): def get_bld(self): """ - Return the equivalent bld node (or self if not possible) + Return the corresponding Node object in the build directory (or self if already + under the build directory). Use this method only if the purpose is to create + a Node object (this is common with folders but not with files, see ticket 1937) :rtype: :py:class:`waflib.Node.Node` """ cur = self - x = id(self.ctx.srcnode) - y = id(self.ctx.bldnode) + x = self.ctx.srcnode + y = self.ctx.bldnode lst = [] while cur.parent: - if id(cur) == y: + if cur is y: return self - if id(cur) == x: + if cur is x: lst.reverse() return self.ctx.bldnode.make_node(lst) lst.append(cur.name) @@ -658,75 +819,66 @@ def get_bld(self): def find_resource(self, lst): """ - Try to find a declared build node or a source file + Use this method in the build phase to find source files corresponding to the relative path given. - :param lst: path + First it looks up the Node data structure to find any declared Node object in the build directory. + If None is found, it then considers the filesystem in the source directory. + + :param lst: relative path :type lst: string or list of string + :returns: the corresponding Node object or None + :rtype: :py:class:`waflib.Node.Node` """ if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if not node: - self = self.get_src() - node = self.find_node(lst) - if node: - if os.path.isdir(node.abspath()): - return None + node = self.get_src().find_node(lst) + if node and node.isdir(): + return None return node def find_or_declare(self, lst): """ - if 'self' is in build directory, try to return an existing node - if no node is found, go to the source directory - try to find an existing node in the source directory - if no node is found, create it in the build directory + Use this method in the build phase to declare output files which + are meant to be written in the build directory. + + This method creates the Node object and its parent folder + as needed. - :param lst: path + :param lst: relative path :type lst: string or list of string """ - if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] - - node = self.get_bld().search_node(lst) - if node: - if not os.path.isfile(node.abspath()): - node.sig = None - node.parent.mkdir() - return node - self = self.get_src() - node = self.find_node(lst) - if node: - if not os.path.isfile(node.abspath()): - node.sig = None - node.parent.mkdir() - return node - node = self.get_bld().make_node(lst) + if isinstance(lst, str) and os.path.isabs(lst): + node = self.ctx.root.make_node(lst) + else: + node = self.get_bld().make_node(lst) node.parent.mkdir() return node def find_dir(self, lst): """ - Search for a folder in the filesystem + Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`) - :param lst: path + :param lst: relative path :type lst: string or list of string + :returns: The corresponding Node object or None if there is no such folder + :rtype: :py:class:`waflib.Node.Node` """ if isinstance(lst, str): - lst = [x for x in split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.find_node(lst) - try: - if not os.path.isdir(node.abspath()): - return None - except (OSError, AttributeError): - # the node might be None, and raise an AttributeError + if node and not node.isdir(): return None return node # helpers for building things def change_ext(self, ext, ext_in=None): """ + Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare` + :return: A build node of the same path, but with a different extension :rtype: :py:class:`waflib.Node.Node` """ @@ -743,39 +895,79 @@ def change_ext(self, ext, ext_in=None): return self.parent.find_or_declare([name]) def bldpath(self): - "Path seen from the build directory default/src/foo.cpp" + """ + Returns the relative path seen from the build directory ``src/foo.cpp`` + + :rtype: string + """ return self.path_from(self.ctx.bldnode) def srcpath(self): - "Path seen from the source directory ../src/foo.cpp" + """ + Returns the relative path seen from the source directory ``../src/foo.cpp`` + + :rtype: string + """ return self.path_from(self.ctx.srcnode) def relpath(self): - "If a file in the build directory, bldpath, else srcpath" + """ + If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`, + else returns :py:meth:`waflib.Node.Node.srcpath` + + :rtype: string + """ cur = self - x = id(self.ctx.bldnode) + x = self.ctx.bldnode while cur.parent: - if id(cur) == x: + if cur is x: return self.bldpath() cur = cur.parent return self.srcpath() def bld_dir(self): - "Build path without the file name" + """ + Equivalent to self.parent.bldpath() + + :rtype: string + """ return self.parent.bldpath() + def h_file(self): + """ + See :py:func:`waflib.Utils.h_file` + + :return: a hash representing the file contents + :rtype: string or bytes + """ + return Utils.h_file(self.abspath()) + def get_bld_sig(self): """ - Node signature, assuming the file is in the build directory + Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose + of build dependency calculation. This method uses a per-context cache. + + :return: a hash representing the object contents + :rtype: string or bytes """ + # previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node try: - return self.cache_sig + cache = self.ctx.cache_sig except AttributeError: - pass - - if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: - self.sig = Utils.h_file(self.abspath()) - self.cache_sig = ret = self.sig + cache = self.ctx.cache_sig = {} + try: + ret = cache[self] + except KeyError: + p = self.abspath() + try: + ret = cache[self] = self.h_file() + except EnvironmentError: + if self.isdir(): + # allow folders as build nodes, do not use the creation time + st = os.stat(p) + ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode]) + return ret + raise return ret pickle_lock = Utils.threading.Lock() diff --git a/waflib/Options.py b/waflib/Options.py index ad6a44ff3d..c1ee381a5e 100644 --- a/waflib/Options.py +++ b/waflib/Options.py @@ -1,66 +1,90 @@ #!/usr/bin/env python # encoding: utf-8 # Scott Newton, 2005 (scottn) -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ Support for waf command-line options -Provides default command-line options, -as well as custom ones, used by the ``options`` wscript function. - +Provides default and command-line options, as well the command +that reads the ``options`` wscript function. """ import os, tempfile, optparse, sys, re -from waflib import Logs, Utils, Context +from waflib import Logs, Utils, Context, Errors -cmds = 'distclean configure build install clean uninstall check dist distcheck'.split() +options = optparse.Values() """ -Constant representing the default waf commands displayed in:: - - $ waf --help - -""" - -options = {} -""" -A dictionary representing the command-line options:: +A global dictionary representing user-provided command-line options:: $ waf --foo=bar - """ commands = [] """ -List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`. +List of commands to execute extracted from the command-line. This list +is consumed during the execution by :py:func:`waflib.Scripting.run_commands`. """ envvars = [] """ List of environment variable declarations placed after the Waf executable name. -These are detected by searching for "=" in the rest arguments. +These are detected by searching for "=" in the remaining arguments. +You probably do not want to use this. """ lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform) -platform = Utils.unversioned_sys_platform() - +""" +Name of the lock file that marks a project as configured +""" class opt_parser(optparse.OptionParser): """ Command-line options parser. """ - def __init__(self, ctx): - optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION)) - + def __init__(self, ctx, allow_unknown=False): + optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False, + version='%s %s (%s)' % (Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION)) self.formatter.width = Logs.get_term_cols() self.ctx = ctx + self.allow_unknown = allow_unknown + + def _process_args(self, largs, rargs, values): + """ + Custom _process_args to allow unknown options according to the allow_unknown status + """ + while rargs: + try: + optparse.OptionParser._process_args(self,largs,rargs,values) + except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e: + if self.allow_unknown: + largs.append(e.opt_str) + else: + self.error(str(e)) + + def _process_long_opt(self, rargs, values): + # --custom-option=-ftxyz is interpreted as -f -t... see #2280 + if self.allow_unknown: + back = [] + rargs + try: + optparse.OptionParser._process_long_opt(self, rargs, values) + except optparse.BadOptionError: + while rargs: + rargs.pop() + rargs.extend(back) + rargs.pop(0) + raise + else: + optparse.OptionParser._process_long_opt(self, rargs, values) def print_usage(self, file=None): return self.print_help(file) def get_usage(self): """ - Return the message to print on ``waf --help`` + Builds the message to print on ``waf --help`` + + :rtype: string """ cmds_str = {} for cls in Context.classes: @@ -87,19 +111,18 @@ def get_usage(self): lst.sort() ret = '\n'.join(lst) - return '''waf [commands] [options] + return '''%s [commands] [options] -Main commands (example: ./waf build -j4) +Main commands (example: ./%s build -j4) %s -''' % ret +''' % (Context.WAFNAME, Context.WAFNAME, ret) class OptionsContext(Context.Context): """ - Collect custom options from wscript files and parses the command line. - Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values. + Collects custom options from wscript files and parses the command line. + Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values. """ - cmd = 'options' fun = 'options' @@ -114,11 +137,18 @@ def __init__(self, **kw): jobs = self.jobs() p = self.add_option color = os.environ.get('NOCOLOR', '') and 'no' or 'auto' + if os.environ.get('CLICOLOR', '') == '0': + color = 'no' + elif os.environ.get('CLICOLOR_FORCE', '') == '1': + color = 'yes' p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto')) - p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs) + p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs) p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)') p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]') p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)') + p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP) + p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP) + p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit") gr = self.add_option_group('Configuration options') self.option_groups['configure options'] = gr @@ -126,9 +156,13 @@ def __init__(self, **kw): gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') + gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_run') + gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_out') + gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_top') + default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) if not default_prefix: - if platform == 'win32': + if Utils.unversioned_sys_platform() == 'win32': d = tempfile.gettempdir() default_prefix = d[0].upper() + d[1:] # win32 preserves the case, but gettempdir does not @@ -157,8 +191,8 @@ def __init__(self, **kw): def jobs(self): """ - Find the amount of cpu cores to set the default amount of tasks executed in parallel. At - runtime the options can be obtained from :py:const:`waflib.Options.options` :: + Finds the optimal amount of cpu cores to use for parallel jobs. + At runtime the options can be obtained from :py:const:`waflib.Options.options` :: from waflib.Options import options njobs = options.jobs @@ -181,7 +215,7 @@ def jobs(self): if not count and os.name not in ('nt', 'java'): try: tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0) - except Exception: + except Errors.WafError: pass else: if re.match('^[0-9]+$', tmp): @@ -194,21 +228,25 @@ def jobs(self): def add_option(self, *k, **kw): """ - Wrapper for optparse.add_option:: + Wraps ``optparse.add_option``:: def options(ctx): - ctx.add_option('-u', '--use', dest='use', default=False, action='store_true', - help='a boolean option') + ctx.add_option('-u', '--use', dest='use', default=False, + action='store_true', help='a boolean option') + + :rtype: optparse option object """ return self.parser.add_option(*k, **kw) def add_option_group(self, *k, **kw): """ - Wrapper for optparse.add_option_group:: + Wraps ``optparse.add_option_group``:: def options(ctx): gr = ctx.add_option_group('some options') gr.add_option('-u', '--use', dest='use', default=False, action='store_true') + + :rtype: optparse option group object """ try: gr = self.option_groups[k[0]] @@ -219,13 +257,14 @@ def options(ctx): def get_option_group(self, opt_str): """ - Wrapper for optparse.get_option_group:: + Wraps ``optparse.get_option_group``:: def options(ctx): gr = ctx.get_option_group('configure options') gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') + :rtype: optparse option group object """ try: return self.option_groups[opt_str] @@ -235,35 +274,85 @@ def options(ctx): return group return None - def parse_args(self, _args=None): - """ - Parse arguments from a list (not bound to the command-line). + def sanitize_path(self, path, cwd=None): + if not cwd: + cwd = Context.launch_dir + p = os.path.expanduser(path) + p = os.path.join(cwd, p) + p = os.path.normpath(p) + p = os.path.abspath(p) + return p - :param _args: arguments - :type _args: list of strings + def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False): """ - global options, commands, envvars + Just parse the arguments + """ + self.parser.allow_unknown = allow_unknown (options, leftover_args) = self.parser.parse_args(args=_args) - + envvars = [] + commands = [] for arg in leftover_args: if '=' in arg: envvars.append(arg) - else: + elif arg != 'options': commands.append(arg) - if options.destdir: - options.destdir = os.path.abspath(os.path.expanduser(options.destdir)) - + if options.jobs < 1: + options.jobs = 1 + for name in 'top out destdir prefix bindir libdir'.split(): + # those paths are usually expanded from Context.launch_dir + if getattr(options, name, None): + path = self.sanitize_path(getattr(options, name), cwd) + setattr(options, name, path) + return options, commands, envvars + + def init_module_vars(self, arg_options, arg_commands, arg_envvars): + options.__dict__.clear() + del commands[:] + del envvars[:] + + options.__dict__.update(arg_options.__dict__) + commands.extend(arg_commands) + envvars.extend(arg_envvars) + + for var in envvars: + (name, value) = var.split('=', 1) + os.environ[name.strip()] = value + + def init_logs(self, options, commands, envvars): + Logs.verbose = options.verbose if options.verbose >= 1: self.load('errcheck') colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors] Logs.enable_colors(colors) + if options.zones: + Logs.zones = options.zones.split(',') + if not Logs.verbose: + Logs.verbose = 1 + elif Logs.verbose > 0: + Logs.zones = ['runner'] + if Logs.verbose > 2: + Logs.zones = ['*'] + + def parse_args(self, _args=None): + """ + Parses arguments from a list which is not necessarily the command-line. + Initializes the module variables options, commands and envvars + If help is requested, prints it and exit the application + + :param _args: arguments + :type _args: list of strings + """ + options, commands, envvars = self.parse_cmd_args(_args) + self.init_logs(options, commands, envvars) + self.init_module_vars(options, commands, envvars) + def execute(self): """ See :py:func:`waflib.Context.Context.execute` """ super(OptionsContext, self).execute() self.parse_args() - + Utils.alloc_process_pool(options.jobs) diff --git a/waflib/Runner.py b/waflib/Runner.py index db3c3ed333..350c86a22c 100644 --- a/waflib/Runner.py +++ b/waflib/Runner.py @@ -1,98 +1,127 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Runner.py: Task scheduling and execution - """ -import random, atexit +import heapq, traceback try: - from queue import Queue + from queue import Queue, PriorityQueue except ImportError: from Queue import Queue + try: + from Queue import PriorityQueue + except ImportError: + class PriorityQueue(Queue): + def _init(self, maxsize): + self.maxsize = maxsize + self.queue = [] + def _put(self, item): + heapq.heappush(self.queue, item) + def _get(self): + return heapq.heappop(self.queue) + from waflib import Utils, Task, Errors, Logs -GAP = 10 +GAP = 5 """ -Wait for free tasks if there are at least ``GAP * njobs`` in queue +Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run """ -class TaskConsumer(Utils.threading.Thread): - """ - Task consumers belong to a pool of workers +class PriorityTasks(object): + def __init__(self): + self.lst = [] + def __len__(self): + return len(self.lst) + def __iter__(self): + return iter(self.lst) + def __str__(self): + return 'PriorityTasks: [%s]' % '\n '.join(str(x) for x in self.lst) + def clear(self): + self.lst = [] + def append(self, task): + heapq.heappush(self.lst, task) + def appendleft(self, task): + "Deprecated, do not use" + heapq.heappush(self.lst, task) + def pop(self): + return heapq.heappop(self.lst) + def extend(self, lst): + if self.lst: + for x in lst: + self.append(x) + else: + if isinstance(lst, list): + self.lst = lst + heapq.heapify(lst) + else: + self.lst = lst.lst - They wait for tasks in the queue and then use ``task.process(...)`` +class Consumer(Utils.threading.Thread): """ - def __init__(self): + Daemon thread object that executes a task. It shares a semaphore with + the coordinator :py:class:`waflib.Runner.Spawner`. There is one + instance per task to consume. + """ + def __init__(self, spawner, task): Utils.threading.Thread.__init__(self) - self.ready = Queue() + self.task = task + """Task to execute""" + self.spawner = spawner + """Coordinator object""" + self.daemon = True + self.start() + def run(self): """ - Obtain :py:class:`waflib.Task.TaskBase` instances from this queue. + Processes a single task """ - self.setDaemon(1) + try: + if not self.spawner.master.stop: + self.spawner.master.process_task(self.task) + finally: + self.spawner.sem.release() + self.spawner.master.out.put(self.task) + self.task = None + self.spawner = None + +class Spawner(Utils.threading.Thread): + """ + Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and + spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each + :py:class:`waflib.Task.Task` instance. + """ + def __init__(self, master): + Utils.threading.Thread.__init__(self) + self.master = master + """:py:class:`waflib.Runner.Parallel` producer instance""" + self.sem = Utils.threading.Semaphore(master.numjobs) + """Bounded semaphore that prevents spawning more than *n* concurrent consumers""" + self.daemon = True self.start() - def run(self): """ - Loop over the tasks to execute + Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop` """ try: self.loop() except Exception: + # Python 2 prints unnecessary messages when shutting down + # we also want to stop the thread properly pass - def loop(self): """ - Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call - :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it. + Consumes task objects from the producer; ends when the producer has no more + task to provide. """ + master = self.master while 1: - tsk = self.ready.get() - if not isinstance(tsk, Task.TaskBase): - tsk(self) - else: - tsk.process() - -pool = Queue() -""" -Pool of task consumer objects -""" - -def get_pool(): - """ - Obtain a task consumer from :py:attr:`waflib.Runner.pool`. - Do not forget to put it back by using :py:func:`waflib.Runner.put_pool` - and reset properly (original waiting queue). - - :rtype: :py:class:`waflib.Runner.TaskConsumer` - """ - try: - return pool.get(False) - except Exception: - return TaskConsumer() - -def put_pool(x): - """ - Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool` - - :param x: task consumer object - :type x: :py:class:`waflib.Runner.TaskConsumer` - """ - pool.put(x) - -def _free_resources(): - global pool - lst = [] - while pool.qsize(): - lst.append(pool.get()) - for x in lst: - x.ready.put(None) - for x in lst: - x.join() - pool = None -atexit.register(_free_resources) + task = master.ready.get() + self.sem.acquire() + if not master.stop: + task.log_display(task.generator.bld) + Consumer(self, task) class Parallel(object): """ @@ -106,7 +135,7 @@ def __init__(self, bld, j=2): self.numjobs = j """ - Number of consumers in the pool + Amount of parallel consumers to use """ self.bld = bld @@ -114,19 +143,25 @@ def __init__(self, bld, j=2): Instance of :py:class:`waflib.Build.BuildContext` """ - self.outstanding = [] - """List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed""" + self.outstanding = PriorityTasks() + """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed""" - self.frozen = [] - """List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately""" + self.postponed = PriorityTasks() + """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons""" + + self.incomplete = set() + """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)""" + + self.ready = PriorityQueue(0) + """List of :py:class:`waflib.Task.Task` ready to be executed by consumers""" self.out = Queue(0) - """List of :py:class:`waflib.Task.TaskBase` returned by the task consumers""" + """List of :py:class:`waflib.Task.Task` returned by the task consumers""" self.count = 0 """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" - self.processed = 1 + self.processed = 0 """Amount of tasks processed""" self.stop = False @@ -139,33 +174,46 @@ def __init__(self, bld, j=2): """Task iterator which must give groups of parallelizable tasks when calling ``next()``""" self.dirty = False - """Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)""" + """ + Flag that indicates that the build cache must be saved when a task was executed + (calls :py:meth:`waflib.Build.BuildContext.store`)""" + + self.revdeps = Utils.defaultdict(set) + """ + The reverse dependency graph of dependencies obtained from Task.run_after + """ + + self.spawner = None + """ + Coordinating daemon thread that spawns thread consumers + """ + if self.numjobs > 1: + self.spawner = Spawner(self) def get_next_task(self): """ - Obtain the next task to execute. + Obtains the next Task instance to run - :rtype: :py:class:`waflib.Task.TaskBase` + :rtype: :py:class:`waflib.Task.Task` """ if not self.outstanding: return None - return self.outstanding.pop(0) + return self.outstanding.pop() def postpone(self, tsk): """ - A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`. + Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`. + The order is scrambled so as to consume as many tasks in parallel as possible. - :param tsk: task - :type tsk: :py:class:`waflib.Task.TaskBase` + :param tsk: task instance + :type tsk: :py:class:`waflib.Task.Task` """ - if random.randint(0, 1): - self.frozen.insert(0, tsk) - else: - self.frozen.append(tsk) + self.postponed.append(tsk) def refill_task_list(self): """ - Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. + Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. + Ensures that all tasks in the current build group are complete before processing the next one. """ while self.count > self.numjobs * GAP: self.get_out() @@ -173,125 +221,231 @@ def refill_task_list(self): while not self.outstanding: if self.count: self.get_out() - elif self.frozen: + if self.outstanding: + break + elif self.postponed: try: cond = self.deadlock == self.processed except AttributeError: pass else: if cond: - msg = 'check the build order for the tasks' - for tsk in self.frozen: - if not tsk.run_after: - msg = 'check the methods runnable_status' - break + # The most common reason is conflicting build order declaration + # for example: "X run_after Y" and "Y run_after X" + # Another can be changing "run_after" dependencies while the build is running + # for example: updating "tsk.run_after" in the "runnable_status" method lst = [] - for tsk in self.frozen: - lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after])) - raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst))) + for tsk in self.postponed: + deps = [id(x) for x in tsk.run_after if not x.hasrun] + lst.append('%s\t-> %r' % (repr(tsk), deps)) + if not deps: + lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk)) + raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst)) self.deadlock = self.processed - if self.frozen: - self.outstanding += self.frozen - self.frozen = [] + if self.postponed: + self.outstanding.extend(self.postponed) + self.postponed.clear() elif not self.count: - self.outstanding.extend(next(self.biter)) - self.total = self.bld.total() - break + if self.incomplete: + for x in self.incomplete: + for k in x.run_after: + if not k.hasrun: + break + else: + # dependency added after the build started without updating revdeps + self.incomplete.remove(x) + self.outstanding.append(x) + break + else: + if self.stop or self.error: + break + raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete) + else: + tasks = next(self.biter) + ready, waiting = self.prio_and_split(tasks) + self.outstanding.extend(ready) + self.incomplete.update(waiting) + self.total = self.bld.total() + break def add_more_tasks(self, tsk): """ - Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks` + If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained + in that list are added to the current build and will be processed before the next build group. + + The priorities for dependent tasks are not re-calculated globally - :param tsk: task - :type tsk: :py:attr:`waflib.Task.TaskBase` + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` """ if getattr(tsk, 'more_tasks', None): - self.outstanding += tsk.more_tasks + more = set(tsk.more_tasks) + groups_done = set() + def iteri(a, b): + for x in a: + yield x + for x in b: + yield x + + # Update the dependency tree + # this assumes that task.run_after values were updated + for x in iteri(self.outstanding, self.incomplete): + for k in x.run_after: + if isinstance(k, Task.TaskGroup): + if k not in groups_done: + groups_done.add(k) + for j in k.prev & more: + self.revdeps[j].add(k) + elif k in more: + self.revdeps[k].add(x) + + ready, waiting = self.prio_and_split(tsk.more_tasks) + self.outstanding.extend(ready) + self.incomplete.update(waiting) self.total += len(tsk.more_tasks) + def mark_finished(self, tsk): + def try_unfreeze(x): + # DAG ancestors are likely to be in the incomplete set + # This assumes that the run_after contents have not changed + # after the build starts, else a deadlock may occur + if x in self.incomplete: + # TODO remove dependencies to free some memory? + # x.run_after.remove(tsk) + for k in x.run_after: + if not k.hasrun: + break + else: + self.incomplete.remove(x) + self.outstanding.append(x) + + if tsk in self.revdeps: + for x in self.revdeps[tsk]: + if isinstance(x, Task.TaskGroup): + x.prev.remove(tsk) + if not x.prev: + for k in x.next: + # TODO necessary optimization? + k.run_after.remove(x) + try_unfreeze(k) + # TODO necessary optimization? + x.next = [] + else: + try_unfreeze(x) + del self.revdeps[tsk] + + if hasattr(tsk, 'semaphore'): + sem = tsk.semaphore + try: + sem.release(tsk) + except KeyError: + # TODO + pass + else: + while sem.waiting and not sem.is_locked(): + # take a frozen task, make it ready to run + x = sem.waiting.pop() + self._add_task(x) + def get_out(self): """ - Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through - :py:attr:`waflib.Runner.Parallel.add_more_tasks`. + Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution. + Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`. - :rtype: :py:attr:`waflib.Task.TaskBase` + :rtype: :py:attr:`waflib.Task.Task` """ tsk = self.out.get() if not self.stop: self.add_more_tasks(tsk) + self.mark_finished(tsk) + self.count -= 1 self.dirty = True return tsk def add_task(self, tsk): """ - Pass a task to a consumer. + Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them. - :param tsk: task - :type tsk: :py:attr:`waflib.Task.TaskBase` + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` """ - try: - self.pool - except AttributeError: - self.init_task_pool() + # TODO change in waf 2.1 self.ready.put(tsk) - def init_task_pool(self): - # lazy creation, and set a common pool for all task consumers - pool = self.pool = [get_pool() for i in range(self.numjobs)] - self.ready = Queue(0) - def setq(consumer): - consumer.ready = self.ready - for x in pool: - x.ready.put(setq) - return pool - - def free_task_pool(self): - # return the consumers, setting a different queue for each of them - def setq(consumer): - consumer.ready = Queue(0) - self.out.put(self) - try: - pool = self.pool - except AttributeError: - pass + def _add_task(self, tsk): + if hasattr(tsk, 'semaphore'): + sem = tsk.semaphore + try: + sem.acquire(tsk) + except IndexError: + sem.waiting.add(tsk) + return + + self.count += 1 + self.processed += 1 + if self.numjobs == 1: + tsk.log_display(tsk.generator.bld) + try: + self.process_task(tsk) + finally: + self.out.put(tsk) else: - for x in pool: - self.ready.put(setq) - for x in pool: - self.get_out() - for x in pool: - put_pool(x) - self.pool = [] + self.add_task(tsk) + + def process_task(self, tsk): + """ + Processes a task and attempts to stop the build in case of errors + """ + tsk.process() + if tsk.hasrun != Task.SUCCESS: + self.error_handler(tsk) def skip(self, tsk): + """ + Mark a task as skipped/up-to-date + """ tsk.hasrun = Task.SKIPPED + self.mark_finished(tsk) + + def cancel(self, tsk): + """ + Mark a task as failed because of unsatisfiable dependencies + """ + tsk.hasrun = Task.CANCELED + self.mark_finished(tsk) def error_handler(self, tsk): """ - Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless - the build is executed with:: + Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, + unless the build is executed with:: $ waf build -k - :param tsk: task - :type tsk: :py:attr:`waflib.Task.TaskBase` + :param tsk: task instance + :type tsk: :py:attr:`waflib.Task.Task` """ if not self.bld.keep: self.stop = True self.error.append(tsk) def task_status(self, tsk): + """ + Obtains the task status to decide whether to run it immediately or not. + + :return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER` + :rtype: integer + """ try: return tsk.runnable_status() except Exception: self.processed += 1 - tsk.err_msg = Utils.ex_stack() + tsk.err_msg = traceback.format_exc() if not self.stop and self.bld.keep: self.skip(tsk) if self.bld.keep == 1: - # if -k stop at the first exception, if -kk try to go as far as possible + # if -k stop on the first exception, if -kk try to go as far as possible if Logs.verbose > 1 or not self.error: self.error.append(tsk) self.stop = True @@ -299,17 +453,20 @@ def task_status(self, tsk): if Logs.verbose > 1: self.error.append(tsk) return Task.EXCEPTION - tsk.hasrun = Task.EXCEPTION + tsk.hasrun = Task.EXCEPTION self.error_handler(tsk) + return Task.EXCEPTION def start(self): """ - Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set. - If only one job is used, then execute the tasks one by one, without consumers. + Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to + :py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread + has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out` + and marks the build as failed by setting the ``stop`` flag. + If only one job is used, then executes the tasks one by one, without consumers. """ - self.total = self.bld.total() while not self.stop: @@ -331,36 +488,135 @@ def start(self): self.processed += 1 continue - if self.stop: # stop immediately after a failure was detected + if self.stop: # stop immediately after a failure is detected break - st = self.task_status(tsk) if st == Task.RUN_ME: - tsk.position = (self.processed, self.total) - self.count += 1 - tsk.master = self - self.processed += 1 - - if self.numjobs == 1: - tsk.process() - else: - self.add_task(tsk) - if st == Task.ASK_LATER: + self._add_task(tsk) + elif st == Task.ASK_LATER: self.postpone(tsk) elif st == Task.SKIP_ME: self.processed += 1 self.skip(tsk) self.add_more_tasks(tsk) + elif st == Task.CANCEL_ME: + # A dependency problem has occurred, and the + # build is most likely run with `waf -k` + if Logs.verbose > 1: + self.error.append(tsk) + self.processed += 1 + self.cancel(tsk) # self.count represents the tasks that have been made available to the consumer threads # collect all the tasks after an error else the message may be incomplete while self.error and self.count: self.get_out() - #print loop - assert (self.count == 0 or self.stop) + self.ready.put(None) + if not self.stop: + assert not self.count + assert not self.postponed + assert not self.incomplete + + def prio_and_split(self, tasks): + """ + Label input tasks with priority values, and return a pair containing + the tasks that are ready to run and the tasks that are necessarily + waiting for other tasks to complete. + + The priority system is really meant as an optional layer for optimization: + dependency cycles are found quickly, and builds should be more efficient. + A high priority number means that a task is processed first. + + This method can be overridden to disable the priority system:: + + def prio_and_split(self, tasks): + return tasks, [] - # free the task pool, if any - self.free_task_pool() + :return: A pair of task lists + :rtype: tuple + """ + # to disable: + #return tasks, [] + for x in tasks: + x.visited = 0 + + reverse = self.revdeps + + groups_done = set() + for x in tasks: + for k in x.run_after: + if isinstance(k, Task.TaskGroup): + if k not in groups_done: + groups_done.add(k) + for j in k.prev: + reverse[j].add(k) + else: + reverse[k].add(x) + + # the priority number is not the tree depth + def visit(n): + if isinstance(n, Task.TaskGroup): + return sum(visit(k) for k in n.next) + + if n.visited == 0: + n.visited = 1 + + if n in reverse: + rev = reverse[n] + n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev) + else: + n.prio_order = n.tree_weight + + n.visited = 2 + elif n.visited == 1: + raise Errors.WafError('Dependency cycle found!') + return n.prio_order + + for x in tasks: + if x.visited != 0: + # must visit all to detect cycles + continue + try: + visit(x) + except Errors.WafError: + self.debug_cycles(tasks, reverse) + + ready = [] + waiting = [] + for x in tasks: + for k in x.run_after: + if not k.hasrun: + waiting.append(x) + break + else: + ready.append(x) + return (ready, waiting) + + def debug_cycles(self, tasks, reverse): + tmp = {} + for x in tasks: + tmp[x] = 0 + + def visit(n, acc): + if isinstance(n, Task.TaskGroup): + for k in n.next: + visit(k, acc) + return + if tmp[n] == 0: + tmp[n] = 1 + for k in reverse.get(n, []): + visit(k, [n] + acc) + tmp[n] = 2 + elif tmp[n] == 1: + lst = [] + for tsk in acc: + lst.append(repr(tsk)) + if tsk is n: + # exclude prior nodes, we want the minimum cycle + break + raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst)) + for x in tasks: + visit(x, []) diff --git a/waflib/Scripting.py b/waflib/Scripting.py index 4a78557c57..a80cb36786 100644 --- a/waflib/Scripting.py +++ b/waflib/Scripting.py @@ -1,9 +1,11 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) "Module called for configuring, compiling and installing targets" +from __future__ import with_statement + import os, shlex, shutil, traceback, errno, sys, stat from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node @@ -24,58 +26,66 @@ def waf_entry_point(current_directory, version, wafdir): :param wafdir: absolute path representing the directory of the waf library :type wafdir: string """ - Logs.init_log() if Context.WAFVERSION != version: - Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir)) + Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir) sys.exit(1) - if '--version' in sys.argv: - Context.run_dir = current_directory - ctx = Context.create_context('options') - ctx.curdir = current_directory - ctx.parse_args() - sys.exit(0) + # Store current directory before any chdir + Context.waf_dir = wafdir + Context.run_dir = Context.launch_dir = current_directory + start_dir = current_directory + no_climb = os.environ.get('NOCLIMB') if len(sys.argv) > 1: - # os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones) + # os.path.join handles absolute paths # if sys.argv[1] is not an absolute path, then it is relative to the current working directory potential_wscript = os.path.join(current_directory, sys.argv[1]) - # maybe check if the file is executable - # perhaps extract 'wscript' as a constant - if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript): + if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript): # need to explicitly normalize the path, as it may contain extra '/.' - current_directory = os.path.normpath(os.path.dirname(potential_wscript)) + path = os.path.normpath(os.path.dirname(potential_wscript)) + start_dir = os.path.abspath(path) + no_climb = True sys.argv.pop(1) - Context.waf_dir = wafdir - Context.launch_dir = current_directory + ctx = Context.create_context('options') + (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True) + if options.top: + start_dir = Context.run_dir = Context.top_dir = options.top + no_climb = True + if options.out: + Context.out_dir = options.out # if 'configure' is in the commands, do not search any further - no_climb = os.environ.get('NOCLIMB', None) if not no_climb: for k in no_climb_commands: - for y in sys.argv: + for y in commands: if y.startswith(k): no_climb = True break # try to find a lock file (if the project was configured) # at the same time, store the first wscript file seen - cur = current_directory + cur = start_dir while cur: - lst = os.listdir(cur) + try: + lst = os.listdir(cur) + except OSError: + lst = [] + Logs.error('Directory %r is unreadable!', cur) if Options.lockfile in lst: env = ConfigSet.ConfigSet() try: env.load(os.path.join(cur, Options.lockfile)) ino = os.stat(cur)[stat.ST_INO] - except Exception: + except EnvironmentError: pass else: # check if the folder was not moved for x in (env.run_dir, env.top_dir, env.out_dir): + if not x: + continue if Utils.is_win32: if cur == x: load = True @@ -91,7 +101,7 @@ def waf_entry_point(current_directory, version, wafdir): load = True break else: - Logs.warn('invalid lock file in %s' % cur) + Logs.warn('invalid lock file in %s', cur) load = False if load: @@ -112,56 +122,62 @@ def waf_entry_point(current_directory, version, wafdir): if no_climb: break - if not Context.run_dir: - if '-h' in sys.argv or '--help' in sys.argv: - Logs.warn('No wscript file found: the help message may be incomplete') - Context.run_dir = current_directory - ctx = Context.create_context('options') - ctx.curdir = current_directory - ctx.parse_args() + wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)) + if not os.path.exists(wscript): + if options.whelp: + Logs.warn('These are the generic options (no wscript/project found)') + ctx.parser.print_help() sys.exit(0) - Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE) + Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE) sys.exit(1) try: os.chdir(Context.run_dir) except OSError: - Logs.error('Waf: The folder %r is unreadable' % Context.run_dir) + Logs.error('Waf: The folder %r is unreadable', Context.run_dir) sys.exit(1) try: - set_main_module(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)) + set_main_module(wscript) except Errors.WafError as e: Logs.pprint('RED', e.verbose_msg) Logs.error(str(e)) sys.exit(1) except Exception as e: - Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e) + Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir) traceback.print_exc(file=sys.stdout) sys.exit(2) - """ - import cProfile, pstats - cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt') - p = pstats.Stats('profi.txt') - p.sort_stats('time').print_stats(75) # or 'cumulative' - """ - try: - run_commands() - except Errors.WafError as e: - if Logs.verbose > 1: - Logs.pprint('RED', e.verbose_msg) - Logs.error(e.msg) - sys.exit(1) - except SystemExit: - raise - except Exception as e: - traceback.print_exc(file=sys.stdout) - sys.exit(2) - except KeyboardInterrupt: - Logs.pprint('RED', 'Interrupted') - sys.exit(68) - #""" + if options.profile: + import cProfile, pstats + cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt') + p = pstats.Stats('profi.txt') + p.sort_stats('time').print_stats(75) # or 'cumulative' + else: + try: + try: + run_commands() + except: + if options.pdb: + import pdb + type, value, tb = sys.exc_info() + traceback.print_exc() + pdb.post_mortem(tb) + else: + raise + except Errors.WafError as e: + if Logs.verbose > 1: + Logs.pprint('RED', e.verbose_msg) + Logs.error(e.msg) + sys.exit(1) + except SystemExit: + raise + except Exception as e: + traceback.print_exc(file=sys.stdout) + sys.exit(2) + except KeyboardInterrupt: + Logs.pprint('RED', 'Interrupted') + sys.exit(68) def set_main_module(file_path): """ @@ -182,7 +198,7 @@ def set_def(obj): name = obj.__name__ if not name in Context.g_module.__dict__: setattr(Context.g_module, name, obj) - for k in (update, dist, distclean, distcheck, update): + for k in (dist, distclean, distcheck): set_def(k) # add dummy init and shutdown functions if they're not defined if not 'init' in Context.g_module.__dict__: @@ -194,36 +210,23 @@ def set_def(obj): def parse_options(): """ - Parse the command-line options and initialize the logging system. + Parses the command-line options and initialize the logging system. Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. """ - Context.create_context('options').execute() - - for var in Options.envvars: - (name, value) = var.split('=', 1) - os.environ[name.strip()] = value - + ctx = Context.create_context('options') + ctx.execute() if not Options.commands: - Options.commands = [default_cmd] - Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076 - - # process some internal Waf options - Logs.verbose = Options.options.verbose - #Logs.init_log() - - if Options.options.zones: - Logs.zones = Options.options.zones.split(',') - if not Logs.verbose: - Logs.verbose = 1 - elif Logs.verbose > 0: - Logs.zones = ['runner'] - - if Logs.verbose > 2: - Logs.zones = ['*'] + if isinstance(default_cmd, list): + Options.commands.extend(default_cmd) + else: + Options.commands.append(default_cmd) + if Options.options.whelp: + ctx.parser.print_help() + sys.exit(0) def run_command(cmd_name): """ - Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`. + Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`. :param cmd_name: command to execute, like ``build`` :type cmd_name: string @@ -241,7 +244,7 @@ def run_command(cmd_name): def run_commands(): """ - Execute the commands that were given on the command-line, and the other options + Execute the Waf commands that were given on the command-line, and the other options Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed after :py:func:`waflib.Scripting.parse_options`. """ @@ -250,18 +253,11 @@ def run_commands(): while Options.commands: cmd_name = Options.commands.pop(0) ctx = run_command(cmd_name) - Logs.info('%r finished successfully (%s)' % (cmd_name, str(ctx.log_timer))) + Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer) run_command('shutdown') ########################################################################################### -def _can_distclean(name): - # WARNING: this method may disappear anytime - for k in '.o .moc .exe'.split(): - if name.endswith(k): - return True - return False - def distclean_dir(dirname): """ Distclean function called in the particular case when:: @@ -273,12 +269,12 @@ def distclean_dir(dirname): """ for (root, dirs, files) in os.walk(dirname): for f in files: - if _can_distclean(f): + if f.endswith(('.o', '.moc', '.exe')): fname = os.path.join(root, f) try: os.remove(fname) except OSError: - Logs.warn('Could not remove %r' % fname) + Logs.warn('Could not remove %r', fname) for x in (Context.DBFILE, 'config.log'): try: @@ -287,45 +283,63 @@ def distclean_dir(dirname): pass try: - shutil.rmtree('c4che') + shutil.rmtree(Build.CACHE_DIR) except OSError: pass def distclean(ctx): - '''removes the build directory''' - lst = os.listdir('.') - for f in lst: - if f == Options.lockfile: - try: - proj = ConfigSet.ConfigSet(f) - except IOError: - Logs.warn('Could not read %r' % f) - continue + '''removes build folders and data''' - if proj['out_dir'] != proj['top_dir']: - try: - shutil.rmtree(proj['out_dir']) - except IOError: - pass - except OSError as e: - if e.errno != errno.ENOENT: - Logs.warn('Could not remove %r' % proj['out_dir']) - else: - distclean_dir(proj['out_dir']) + def remove_and_log(k, fun): + try: + fun(k) + except EnvironmentError as e: + if e.errno != errno.ENOENT: + Logs.warn('Could not remove %r', k) - for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']): - p = os.path.join(k, Options.lockfile) - try: - os.remove(p) - except OSError as e: - if e.errno != errno.ENOENT: - Logs.warn('Could not remove %r' % p) + # remove waf cache folders on the top-level + if not Options.commands: + for k in os.listdir('.'): + for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split(): + if k.startswith(x): + remove_and_log(k, shutil.rmtree) - # remove local waf cache folders - if not Options.commands: - for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split(): - if f.startswith(x): - shutil.rmtree(f, ignore_errors=True) + # remove a build folder, if any + cur = '.' + if os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top: + cur = ctx.options.out + + try: + lst = os.listdir(cur) + except OSError: + Logs.warn('Could not read %r', cur) + return + + if Options.lockfile in lst: + f = os.path.join(cur, Options.lockfile) + try: + env = ConfigSet.ConfigSet(f) + except EnvironmentError: + Logs.warn('Could not read %r', f) + return + + if not env.out_dir or not env.top_dir: + Logs.warn('Invalid lock file %r', f) + return + + if env.out_dir == env.top_dir: + distclean_dir(env.out_dir) + else: + remove_and_log(env.out_dir, shutil.rmtree) + + env_dirs = [env.out_dir] + if not (os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top): + env_dirs.append(env.top_dir) + if not (os.environ.get('NO_LOCK_IN_RUN') or ctx.options.no_lock_in_run): + env_dirs.append(env.run_dir) + for k in env_dirs: + p = os.path.join(k, Options.lockfile) + remove_and_log(p, os.remove) class Dist(Context.Context): '''creates an archive containing the project source code''' @@ -343,7 +357,7 @@ def execute(self): def archive(self): """ - Create the archive. + Creates the source archive. """ import tarfile @@ -363,43 +377,51 @@ def archive(self): files = self.get_files() if self.algo.startswith('tar.'): - tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', '')) + tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', '')) for x in files: self.add_tar_file(x, tar) tar.close() elif self.algo == 'zip': import zipfile - zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED) + zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED) for x in files: archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) - zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) + if os.environ.get('SOURCE_DATE_EPOCH'): + # TODO: parse that timestamp + zip.writestr(zipfile.ZipInfo(archive_name), x.read(), zipfile.ZIP_DEFLATED) + else: + zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) zip.close() else: self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') try: - from hashlib import sha1 as sha + from hashlib import sha256 except ImportError: - from sha import sha - try: - digest = " (sha=%r)" % sha(node.read()).hexdigest() - except Exception: digest = '' + else: + digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest() - Logs.info('New archive created: %s%s' % (self.arch_name, digest)) + Logs.info('New archive created: %s%s', self.arch_name, digest) def get_tar_path(self, node): """ - return the path to use for a node in the tar archive, the purpose of this + Return the path to use for a node in the tar archive, the purpose of this is to let subclases resolve symbolic links or to change file names + + :return: absolute path + :rtype: string """ return node.abspath() def add_tar_file(self, x, tar): """ - Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree. + Adds a file to the tar archive. Symlinks are not verified. + + :param x: file path + :param tar: tar file object """ p = self.get_tar_path(x) tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) @@ -407,16 +429,21 @@ def add_tar_file(self, x, tar): tinfo.gid = 0 tinfo.uname = 'root' tinfo.gname = 'root' + if os.environ.get('SOURCE_DATE_EPOCH'): + tinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH')) - fu = None - try: - fu = open(p, 'rb') - tar.addfile(tinfo, fileobj=fu) - finally: - if fu: - fu.close() + if os.path.isfile(p): + with open(p, 'rb') as f: + tar.addfile(tinfo, fileobj=f) + else: + tar.addfile(tinfo) def get_tar_prefix(self): + """ + Returns the base path for files added into the archive tar file + + :rtype: string + """ try: return self.tar_prefix except AttributeError: @@ -424,7 +451,8 @@ def get_tar_prefix(self): def get_arch_name(self): """ - Return the name of the archive to create. Change the default value by setting *arch_name*:: + Returns the archive file name. + Set the attribute *arch_name* to change the default value:: def dist(ctx): ctx.arch_name = 'ctx.tar.bz2' @@ -439,7 +467,7 @@ def dist(ctx): def get_base_name(self): """ - Return the default name of the main directory in the archive, which is set to *appname-version*. + Returns the default name of the main directory in the archive, which is set to *appname-version*. Set the attribute *base_name* to change the default value:: def dist(ctx): @@ -457,8 +485,8 @@ def dist(ctx): def get_excl(self): """ - Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl* - to change the default value:: + Returns the patterns to exclude for finding the files in the top-level directory. + Set the attribute *excl* to change the default value:: def dist(ctx): ctx.excl = 'build **/*.o **/*.class' @@ -468,7 +496,7 @@ def dist(ctx): try: return self.excl except AttributeError: - self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' if Context.out_dir: nd = self.root.find_node(Context.out_dir) if nd: @@ -477,13 +505,13 @@ def dist(ctx): def get_files(self): """ - The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set - *files* to prevent this behaviour:: + Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. + Set *files* to prevent this behaviour:: def dist(ctx): ctx.files = ctx.path.find_node('wscript') - The files are searched from the directory 'base_path', to change it, set:: + Files are also searched from the directory 'base_path', to change it, set:: def dist(ctx): ctx.base_path = path @@ -496,18 +524,12 @@ def dist(ctx): files = self.base_path.ant_glob('**/*', excl=self.get_excl()) return files - def dist(ctx): '''makes a tarball for redistributing the sources''' pass class DistCheck(Dist): - """ - Create an archive of the project, and try to build the project in a temporary directory:: - - $ waf distcheck - """ - + """creates an archive with dist, then tries to build it""" fun = 'distcheck' cmd = 'distcheck' @@ -519,32 +541,30 @@ def execute(self): self.archive() self.check() + def make_distcheck_cmd(self, tmpdir): + cfg = [] + if Options.options.distcheck_args: + cfg = shlex.split(Options.options.distcheck_args) + else: + cfg = [x for x in sys.argv if x.startswith('-')] + cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg + return cmd + def check(self): """ - Create the archive, uncompress it and try to build the project + Creates the archive, uncompresses it and tries to build the project """ import tempfile, tarfile - t = None - try: - t = tarfile.open(self.get_arch_name()) + with tarfile.open(self.get_arch_name()) as t: for x in t: t.extract(x) - finally: - if t: - t.close() - - cfg = [] - - if Options.options.distcheck_args: - cfg = shlex.split(Options.options.distcheck_args) - else: - cfg = [x for x in sys.argv if x.startswith('-')] instdir = tempfile.mkdtemp('.inst', self.get_base_name()) - ret = Utils.subprocess.Popen([sys.executable, sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir] + cfg, cwd=self.get_base_name()).wait() + cmd = self.make_distcheck_cmd(instdir) + ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait() if ret: - raise Errors.WafError('distcheck failed with code %i' % ret) + raise Errors.WafError('distcheck failed with code %r' % ret) if os.path.exists(instdir): raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir) @@ -556,23 +576,14 @@ def distcheck(ctx): '''checks if the project compiles (tarball from 'dist')''' pass -def update(ctx): - '''updates the plugins from the *waflib/extras* directory''' - lst = Options.options.files.split(',') - if not lst: - lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')] - for x in lst: - tool = x.replace('.py', '') - try: - Configure.download_tool(tool, force=True, ctx=ctx) - except Errors.WafError: - Logs.error('Could not find the tool %s in the remote repository' % x) - def autoconfigure(execute_method): """ - Decorator used to set the commands that can be configured automatically + Decorator that enables context commands to run *configure* as needed. """ def execute(self): + """ + Wraps :py:func:`waflib.Context.Context.execute` on the context class + """ if not Configure.autoconfig: return execute_method(self) @@ -580,7 +591,7 @@ def execute(self): do_config = False try: env.load(os.path.join(Context.top_dir, Options.lockfile)) - except Exception: + except EnvironmentError: Logs.warn('Configuring the project') do_config = True else: @@ -588,18 +599,33 @@ def execute(self): do_config = True else: h = 0 - for f in env['files']: - h = Utils.h_list((h, Utils.readf(f, 'rb'))) - do_config = h != env.hash + for f in env.files: + try: + h = Utils.h_list((h, Utils.readf(f, 'rb'))) + except EnvironmentError: + do_config = True + break + else: + do_config = h != env.hash if do_config: - Options.commands.insert(0, self.cmd) - Options.commands.insert(0, 'configure') + cmd = env.config_cmd or 'configure' if Configure.autoconfig == 'clobber': - Options.options.__dict__ = env.options - return - - return execute_method(self) + tmp = Options.options.__dict__ + launch_dir_tmp = Context.launch_dir + if env.options: + Options.options.__dict__ = env.options + Context.launch_dir = env.launch_dir + try: + run_command(cmd) + finally: + Options.options.__dict__ = tmp + Context.launch_dir = launch_dir_tmp + else: + run_command(cmd) + run_command(self.cmd) + else: + return execute_method(self) return execute Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute) diff --git a/waflib/Task.py b/waflib/Task.py index b70532e5fb..c2d7c69503 100644 --- a/waflib/Task.py +++ b/waflib/Task.py @@ -1,12 +1,12 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Tasks represent atomic operations such as processes. """ -import os, re, sys +import os, re, sys, tempfile, traceback from waflib import Utils, Logs, Errors # task states @@ -20,7 +20,10 @@ """The task execution returned a non-zero exit status""" EXCEPTION = 3 -"""An exception occured in the task execution""" +"""An exception occurred in the task execution""" + +CANCELED = 4 +"""A dependency for the task is missing so it was cancelled""" SKIPPED = 8 """The task did not have to be executed""" @@ -37,15 +40,21 @@ RUN_ME = -3 """The task must be executed""" +CANCEL_ME = -4 +"""The task cannot be executed because of a dependency problem""" + COMPILE_TEMPLATE_SHELL = ''' def f(tsk): env = tsk.env gen = tsk.generator bld = gen.bld - wd = getattr(tsk, 'cwd', None) + cwdx = tsk.get_cwd() p = env.get_flat + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s - return tsk.exec_command(cmd, cwd=wd, env=env.env or None) + return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None) ''' COMPILE_TEMPLATE_NOSHELL = ''' @@ -53,51 +62,72 @@ def f(tsk): env = tsk.env gen = tsk.generator bld = gen.bld - wd = getattr(tsk, 'cwd', None) + cwdx = tsk.get_cwd() def to_list(xx): if isinstance(xx, str): return [xx] return xx - tsk.last_cmd = lst = [] + def merge(lst1, lst2): + if lst1 and lst2: + return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:] + return lst1 + lst2 + lst = [] %s - lst = [x for x in lst if x] - return tsk.exec_command(lst, cwd=wd, env=env.env or None) + if '' in lst: + lst = [x for x in lst if x] + tsk.last_cmd = lst + return tsk.exec_command(lst, cwd=cwdx, env=env.env or None) +''' + +COMPILE_TEMPLATE_SIG_VARS = ''' +def f(tsk): + sig = tsk.generator.bld.hash_env_vars(tsk.env, tsk.vars) + tsk.m.update(sig) + env = tsk.env + gen = tsk.generator + bld = gen.bld + cwdx = tsk.get_cwd() + p = env.get_flat + buf = [] + %s + tsk.m.update(repr(buf).encode()) ''' classes = {} -"class tasks created by user scripts or Waf tools are kept in this dict name -> class object" +""" +The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks +created by user scripts or Waf tools to this dict. It maps class names to class objects. +""" class store_task_type(type): """ - Metaclass: store the task classes into :py:const:`waflib.Task.classes`, or to the dict pointed - by the class attribute 'register'. - The attribute 'run_str' will be processed to compute a method 'run' on the task class - The decorator :py:func:`waflib.Task.cache_outputs` is also applied to the class + Metaclass: store the task classes into the dict pointed by the + class attribute 'register' which defaults to :py:const:`waflib.Task.classes`, + + The attribute 'run_str' is compiled into a method 'run' bound to the task class. """ def __init__(cls, name, bases, dict): super(store_task_type, cls).__init__(name, bases, dict) name = cls.__name__ - if name.endswith('_task'): - name = name.replace('_task', '') - if name != 'evil' and name != 'TaskBase': - global classes - + if name != 'evil' and name != 'Task': if getattr(cls, 'run_str', None): # if a string is provided, convert it to a method (f, dvars) = compile_fun(cls.run_str, cls.shell) - cls.hcode = cls.run_str + cls.hcode = Utils.h_cmd(cls.run_str) cls.orig_run_str = cls.run_str # change the name of run_str or it is impossible to subclass with a function cls.run_str = None cls.run = f + # process variables cls.vars = list(set(cls.vars + dvars)) cls.vars.sort() + if cls.vars: + fun = compile_sig_vars(cls.vars) + if fun: + cls.sig_vars = fun elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__: # getattr(cls, 'hcode') would look in the upper classes - cls.hcode = Utils.h_fun(cls.run) - - if sys.hexversion > 0x3000000: - cls.hcode = cls.hcode.encode('iso8859-1', 'xmlcharrefreplace') + cls.hcode = Utils.h_cmd(cls.run) # be creative getattr(cls, 'register', classes)[name] = cls @@ -105,157 +135,246 @@ def __init__(cls, name, bases, dict): evil = store_task_type('evil', (object,), {}) "Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified" -class TaskBase(evil): +class Task(evil): """ - Base class for all Waf tasks, which should be seen as an interface. - For illustration purposes, instances of this class will execute the attribute - 'fun' in :py:meth:`waflib.Task.TaskBase.run`. When in doubt, create - subclasses of :py:class:`waflib.Task.Task` instead. + Task objects represents actions to perform such as commands to execute by calling the `run` method. - Subclasses should override these methods: + Detecting when to execute a task occurs in the method :py:meth:`waflib.Task.Task.runnable_status`. - #. __str__: string to display to the user - #. runnable_status: ask the task if it should be run, skipped, or if we have to ask later - #. run: let threads execute the task - #. post_run: let threads update the data regarding the task (cache) + Detecting which tasks to execute is performed through a hash value returned by + :py:meth:`waflib.Task.Task.signature`. The task signature is persistent from build to build. """ + vars = [] + """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" + + always_run = False + """Specify whether task instances must always be executed or not (class attribute)""" + + shell = False + """Execute the command with the shell (class attribute)""" color = 'GREEN' """Color for the console display, see :py:const:`waflib.Logs.colors_lst`""" ext_in = [] - """File extensions that objects of this task class might use""" + """File extensions that objects of this task class may use""" ext_out = [] - """File extensions that objects of this task class might create""" + """File extensions that objects of this task class may create""" before = [] - """List of task class names to execute before instances of this class""" + """The instances of this class are executed before the instances of classes whose names are in this list""" after = [] - """List of task class names to execute after instances of this class""" + """The instances of this class are executed after the instances of classes whose names are in this list""" - hcode = '' + hcode = Utils.SIG_NIL """String representing an additional hash for the class representation""" + keep_last_cmd = False + """Whether to keep the last command executed on the instance after execution. + This may be useful for certain extensions but it can a lot of memory. + """ + + weight = 0 + """Optional weight to tune the priority for task instances. + The higher, the earlier. The weight only applies to single task objects.""" + + tree_weight = 0 + """Optional weight to tune the priority of task instances and whole subtrees. + The higher, the earlier.""" + + prio_order = 0 + """Priority order set by the scheduler on instances during the build phase. + You most likely do not need to set it. + """ + + __slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after') + def __init__(self, *k, **kw): - """ - The base task class requires a task generator, which will be itself if missing - """ self.hasrun = NOT_RUN try: self.generator = kw['generator'] except KeyError: self.generator = self - def __repr__(self): - "for debugging purposes" - return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self), str(getattr(self, 'fun', ''))) + self.env = kw['env'] + """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)""" - def __str__(self): - "string to display to the user" - if hasattr(self, 'fun'): - return self.fun.__name__ - return self.__class__.__name__ + self.inputs = [] + """List of input nodes, which represent the files used by the task instance""" - def __hash__(self): - "Very fast hashing scheme but not persistent (replace/implement in subclasses and see :py:meth:`waflib.Task.Task.uid`)" - return id(self) + self.outputs = [] + """List of output nodes, which represent the files created by the task instance""" - def keyword(self): - if hasattr(self, 'fun'): - return 'Function' - return 'Processing' + self.dep_nodes = [] + """List of additional nodes to depend on""" - def exec_command(self, cmd, **kw): - """ - Wrapper for :py:meth:`waflib.Context.Context.exec_command` which sets a current working directory to ``build.variant_dir`` + self.run_after = set() + """Set of tasks that must be executed before this one""" - :return: the return code - :rtype: int + def __lt__(self, other): + return self.priority() > other.priority() + def __le__(self, other): + return self.priority() >= other.priority() + def __gt__(self, other): + return self.priority() < other.priority() + def __ge__(self, other): + return self.priority() <= other.priority() + + def get_cwd(self): + """ + :return: current working directory + :rtype: :py:class:`waflib.Node.Node` """ bld = self.generator.bld - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir - return bld.exec_command(cmd, **kw) + ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode) + if isinstance(ret, str): + if os.path.isabs(ret): + ret = bld.root.make_node(ret) + else: + ret = self.generator.path.make_node(ret) + return ret - def runnable_status(self): + def quote_flag(self, x): + """ + Surround a process argument by quotes so that a list of arguments can be written to a file + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ + old = x + if '\\' in x: + x = x.replace('\\', '\\\\') + if '"' in x: + x = x.replace('"', '\\"') + if old != x or ' ' in x or '\t' in x or "'" in x: + x = '"%s"' % x + return x + + def priority(self): + """ + Priority of execution; the higher, the earlier + + :return: the priority value + :rtype: a tuple of numeric values + """ + return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0)) + + def split_argfile(self, cmd): + """ + Splits a list of process commands into the executable part and its list of arguments + + :return: a tuple containing the executable first and then the rest of arguments + :rtype: tuple """ - State of the task + return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]]) - :return: a task state in :py:const:`waflib.Task.RUN_ME`, :py:const:`waflib.Task.SKIP_ME` or :py:const:`waflib.Task.ASK_LATER`. + def exec_command(self, cmd, **kw): + """ + Wrapper for :py:meth:`waflib.Context.Context.exec_command`. + This version set the current working directory (``build.variant_dir``), + applies PATH settings (if self.env.PATH is provided), and can run long + commands through a temporary ``@argfile``. + + :param cmd: process command to execute + :type cmd: list of string (best) or string (process will use a shell) + :return: the return code :rtype: int + + Optional parameters: + + #. cwd: current working directory (Node or string) + #. stdout: set to None to prevent waf from capturing the process standard output + #. stderr: set to None to prevent waf from capturing the process standard error + #. timeout: timeout value (Python 3) """ - return RUN_ME + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() + + if hasattr(self, 'timeout'): + kw['timeout'] = self.timeout + + if self.env.PATH: + env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) + env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH) + + if hasattr(self, 'stdout'): + kw['stdout'] = self.stdout + if hasattr(self, 'stderr'): + kw['stderr'] = self.stderr + + if not isinstance(cmd, str): + if Utils.is_win32: + # win32 compares the resulting length http://support.microsoft.com/kb/830473 + too_long = sum([len(arg) for arg in cmd]) + len(cmd) > 8192 + else: + # non-win32 counts the amount of arguments (200k) + too_long = len(cmd) > 200000 + + if too_long and getattr(self, 'allow_argsfile', True): + # Shunt arguments to a temporary file if the command is too long. + cmd, args = self.split_argfile(cmd) + try: + (fd, tmp) = tempfile.mkstemp() + os.write(fd, '\r\n'.join(args).encode()) + os.close(fd) + if Logs.verbose: + Logs.debug('argfile: @%r -> %r', tmp, args) + return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw) + finally: + try: + os.remove(tmp) + except OSError: + # anti-virus and indexers can keep files open -_- + pass + return self.generator.bld.exec_command(cmd, **kw) def process(self): """ - Assume that the task has had a new attribute ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`. - Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing). - """ - m = self.master - if m.stop: - m.out.put(self) - return + Runs the task and handles errors + :return: 0 or None if everything is fine + :rtype: integer + """ # remove the task signature immediately before it is executed - # in case of failure the task will be executed again + # so that the task will be executed again in case of failure try: del self.generator.bld.task_sigs[self.uid()] except KeyError: pass try: - self.generator.bld.returned_tasks.append(self) - self.log_display(self.generator.bld) ret = self.run() except Exception: - self.err_msg = Utils.ex_stack() + self.err_msg = traceback.format_exc() self.hasrun = EXCEPTION - - # TODO cleanup - m.error_handler(self) - m.out.put(self) - return - - if ret: - self.err_code = ret - self.hasrun = CRASHED else: + if ret: + self.err_code = ret + self.hasrun = CRASHED + else: + try: + self.post_run() + except Errors.WafError: + pass + except Exception: + self.err_msg = traceback.format_exc() + self.hasrun = EXCEPTION + else: + self.hasrun = SUCCESS + + if self.hasrun != SUCCESS and self.scan: + # rescan dependencies on next run try: - self.post_run() - except Errors.WafError: + del self.generator.bld.imp_sigs[self.uid()] + except KeyError: pass - except Exception: - self.err_msg = Utils.ex_stack() - self.hasrun = EXCEPTION - else: - self.hasrun = SUCCESS - if self.hasrun != SUCCESS: - m.error_handler(self) - - m.out.put(self) - - def run(self): - """ - Called by threads to execute the tasks. The default is empty and meant to be overridden in subclasses. - It is a bad idea to create nodes in this method (so, no node.ant_glob) - - :rtype: int - """ - if hasattr(self, 'fun'): - return self.fun(self) - return 0 - - def post_run(self): - "Update the cache files (executed by threads). Override in subclasses." - pass def log_display(self, bld): - "Write the execution status on the context logger" + "Writes the execution status on the context logger" if self.generator.bld.progress_bar == 3: return @@ -275,20 +394,17 @@ def log_display(self, bld): def display(self): """ - Return an execution status for the console, the progress bar, or the IDE output. + Returns an execution status for the console, the progress bar, or the IDE output. :rtype: string """ col1 = Logs.colors(self.color) col2 = Logs.colors.NORMAL - master = self.master + master = self.generator.bld.producer def cur(): # the current task position, computed as late as possible - tmp = -1 - if hasattr(master, 'ready'): - tmp -= master.ready.qsize() - return master.processed + tmp + return master.processed - master.ready.qsize() if self.generator.bld.progress_bar == 1: return self.generator.bld.progress_line(cur(), master.total, col1, col2) @@ -317,65 +433,60 @@ def cur(): kw += ' ' return fs % (cur(), total, kw, col1, s, col2) - def attr(self, att, default=None): - """ - Retrieve an attribute from the instance or from the class. - - :param att: variable name - :type att: string - :param default: default value - """ - ret = getattr(self, att, self) - if ret is self: return getattr(self.__class__, att, default) - return ret - def hash_constraints(self): """ - Identify a task type for all the constraints relevant for the scheduler: precedence, file production + Identifies a task type for all the constraints relevant for the scheduler: precedence, file production :return: a hash value :rtype: string """ - cls = self.__class__ - tup = (str(cls.before), str(cls.after), str(cls.ext_in), str(cls.ext_out), cls.__name__, cls.hcode) - h = hash(tup) - return h + return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode) def format_error(self): """ - Error message to display to the user when a build fails + Returns an error message to display the build failure reasons :rtype: string """ - msg = getattr(self, 'last_cmd', '') + if Logs.verbose: + msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', '')) + else: + msg = ' (run with -v to display more information)' name = getattr(self.generator, 'name', '') if getattr(self, "err_msg", None): return self.err_msg elif not self.hasrun: return 'task in %r was not executed for some reason: %r' % (name, self) elif self.hasrun == CRASHED: + if isinstance(msg, str): + txt = msg + else: + txt = ' '.join(repr(x) if ' ' in x else x for x in msg) + try: - return ' -> task in %r failed (exit status %r): %r\n%r' % (name, self.err_code, self, msg) + return ' -> task in %r failed (exit status %r): %r\n%s' % (name, self.err_code, self, txt) except AttributeError: - return ' -> task in %r failed: %r\n%r' % (name, self, msg) + return ' -> task in %r failed: %r\n%s' % (name, self, txt) elif self.hasrun == MISSING: - return ' -> missing files in %r: %r\n%r' % (name, self, msg) + return ' -> missing files in %r%s' % (name, msg) + elif self.hasrun == CANCELED: + return ' -> %r canceled because of missing dependencies' % name else: return 'invalid status for task in %r: %r' % (name, self.hasrun) def colon(self, var1, var2): """ - Support code for scriptlet expressions such as ${FOO_ST:FOO} + Enable scriptlet expressions of the form ${FOO_ST:FOO} If the first variable (FOO_ST) is empty, then an empty list is returned The results will be slightly different if FOO_ST is a list, for example:: - env.FOO_ST = ['-a', '-b'] + env.FOO = ['p1', 'p2'] env.FOO_ST = '-I%s' # ${FOO_ST:FOO} returns ['-Ip1', '-Ip2'] - env.FOO = ['p1', 'p2'] + env.FOO_ST = ['-a', '-b'] # ${FOO_ST:FOO} returns ['-a', '-b', 'p1', '-a', '-b', 'p2'] """ @@ -396,45 +507,11 @@ def colon(self, var1, var2): lst.append(y) return lst -class Task(TaskBase): - """ - This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status` - uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes, - the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output - nodes (if present). - """ - vars = [] - """Variables to depend on (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" - - shell = False - """Execute the command with the shell (class attribute)""" - - def __init__(self, *k, **kw): - TaskBase.__init__(self, *k, **kw) - - self.env = kw['env'] - """ConfigSet object (make sure to provide one)""" - - self.inputs = [] - """List of input nodes, which represent the files used by the task instance""" - - self.outputs = [] - """List of output nodes, which represent the files created by the task instance""" - - self.dep_nodes = [] - """List of additional nodes to depend on""" - - self.run_after = set([]) - """Set of tasks that must be executed before this one""" - - # Additionally, you may define the following - #self.dep_vars = 'PREFIX DATADIR' - def __str__(self): "string to display to the user" name = self.__class__.__name__ if self.outputs: - if (name.endswith('lib') or name.endswith('program')) or not self.inputs: + if name.endswith(('lib', 'program')) or not self.inputs: node = self.outputs[0] return node.path_from(node.ctx.launch_node()) if not (self.inputs or self.outputs): @@ -445,13 +522,16 @@ def __str__(self): src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs]) tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs]) - if self.outputs: sep = ' -> ' - else: sep = '' - return '%s: %s%s%s' % (self.__class__.__name__.replace('_task', ''), src_str, sep, tgt_str) + if self.outputs: + sep = ' -> ' + else: + sep = '' + return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str) def keyword(self): + "Display keyword used to prettify the console outputs" name = self.__class__.__name__ - if name.endswith('lib') or name.endswith('program'): + if name.endswith(('lib', 'program')): return 'Linking' if len(self.inputs) == 1 and len(self.outputs) == 1: return 'Compiling' @@ -474,10 +554,10 @@ def __repr__(self): def uid(self): """ - Return an identifier used to determine if tasks are up-to-date. Since the + Returns an identifier used to determine if tasks are up-to-date. Since the identifier will be stored between executions, it must be: - - unique: no two tasks return the same value (for a given build context) + - unique for a task: no two tasks return the same value (for a given build context) - the same for a given task instance By default, the node paths, the class name, and the function are used @@ -492,44 +572,48 @@ def uid(self): try: return self.uid_ except AttributeError: - m = Utils.md5() + m = Utils.md5(self.__class__.__name__) up = m.update - up(self.__class__.__name__) for x in self.inputs + self.outputs: up(x.abspath()) self.uid_ = m.digest() return self.uid_ - def set_inputs(self, inp): """ - Append the nodes to the *inputs* + Appends the nodes to the *inputs* list :param inp: input nodes :type inp: node or list of nodes """ - if isinstance(inp, list): self.inputs += inp - else: self.inputs.append(inp) + if isinstance(inp, list): + self.inputs += inp + else: + self.inputs.append(inp) def set_outputs(self, out): """ - Append the nodes to the *outputs* + Appends the nodes to the *outputs* list :param out: output nodes :type out: node or list of nodes """ - if isinstance(out, list): self.outputs += out - else: self.outputs.append(out) + if isinstance(out, list): + self.outputs += out + else: + self.outputs.append(out) def set_run_after(self, task): """ - Run this task only after *task*. Affect :py:meth:`waflib.Task.runnable_status` - You probably want to use tsk.run_after.add(task) directly + Run this task only after the given *task*. + + Calling this method from :py:meth:`waflib.Task.Task.runnable_status` may cause + build deadlocks; see :py:meth:`waflib.Tools.fc.fc.runnable_status` for details. :param task: task :type task: :py:class:`waflib.Task.Task` """ - assert isinstance(task, TaskBase) + assert isinstance(task, Task) self.run_after.add(task) def signature(self): @@ -539,7 +623,7 @@ def signature(self): * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps` * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps` - * hashed data: variables/values read from task.__class__.vars/task.env :py:meth:`waflib.Task.Task.sig_vars` + * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars` If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``:: @@ -549,12 +633,16 @@ def signature(self): sig = super(Task.Task, self).signature() delattr(self, 'cache_sig') return super(Task.Task, self).signature() + + :return: the signature value + :rtype: string or bytes """ - try: return self.cache_sig - except AttributeError: pass + try: + return self.cache_sig + except AttributeError: + pass - self.m = Utils.md5() - self.m.update(self.hcode) + self.m = Utils.md5(self.hcode) # explicit deps self.sig_explicit_deps() @@ -574,16 +662,22 @@ def signature(self): def runnable_status(self): """ - Override :py:meth:`waflib.Task.TaskBase.runnable_status` to determine if the task is ready - to be run (:py:attr:`waflib.Task.Task.run_after`) + Returns the Task status + + :return: a task state in :py:const:`waflib.Task.RUN_ME`, + :py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`. + :rtype: int """ - #return 0 # benchmarking + bld = self.generator.bld + if bld.is_install < 0: + return SKIP_ME for t in self.run_after: if not t.hasrun: return ASK_LATER - - bld = self.generator.bld + elif t.hasrun < SKIPPED: + # a dependency has an error + return CANCEL_ME # first compute the signature try: @@ -596,105 +690,112 @@ def runnable_status(self): try: prev_sig = bld.task_sigs[key] except KeyError: - Logs.debug("task: task %r must run as it was never run before or the task code changed" % self) + Logs.debug('task: task %r must run: it was never run before or the task code changed', self) + return RUN_ME + + if new_sig != prev_sig: + Logs.debug('task: task %r must run: the task signature changed', self) return RUN_ME # compare the signatures of the outputs for node in self.outputs: - try: - if node.sig != new_sig: - return RUN_ME - except AttributeError: - Logs.debug("task: task %r must run as the output nodes do not exist" % self) + sig = bld.node_sigs.get(node) + if not sig: + Logs.debug('task: task %r must run: an output node has no signature', self) + return RUN_ME + if sig != key: + Logs.debug('task: task %r must run: an output node was produced by another task', self) + return RUN_ME + if not node.exists(): + Logs.debug('task: task %r must run: an output node does not exist', self) return RUN_ME - if new_sig != prev_sig: - return RUN_ME - return SKIP_ME + return (self.always_run and RUN_ME) or SKIP_ME def post_run(self): """ - Called after successful execution to update the cache data :py:class:`waflib.Node.Node` sigs - and :py:attr:`waflib.Build.BuildContext.task_sigs`. - - The node signature is obtained from the task signature, but the output nodes may also get the signature - of their contents. See the class decorator :py:func:`waflib.Task.update_outputs` if you need this behaviour. + Called after successful execution to record that the task has run by + updating the entry in :py:attr:`waflib.Build.BuildContext.task_sigs`. """ bld = self.generator.bld - sig = self.signature() - for node in self.outputs: - # check if the node exists .. - try: - os.stat(node.abspath()) - except OSError: + if not node.exists(): self.hasrun = MISSING self.err_msg = '-> missing file: %r' % node.abspath() raise Errors.WafError(self.err_msg) - - # important, store the signature for the next run - node.sig = node.cache_sig = sig - - bld.task_sigs[self.uid()] = self.cache_sig + bld.node_sigs[node] = self.uid() # make sure this task produced the files in question + bld.task_sigs[self.uid()] = self.signature() + if not self.keep_last_cmd: + try: + del self.last_cmd + except AttributeError: + pass def sig_explicit_deps(self): """ - Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.inputs` + Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.inputs` and :py:attr:`waflib.Task.Task.dep_nodes` signatures. - - :rtype: hash value """ bld = self.generator.bld upd = self.m.update # the inputs for x in self.inputs + self.dep_nodes: - try: - upd(x.get_bld_sig()) - except (AttributeError, TypeError): - raise Errors.WafError('Missing node signature for %r (required by %r)' % (x, self)) + upd(x.get_bld_sig()) # manual dependencies, they can slow down the builds if bld.deps_man: additional_deps = bld.deps_man for x in self.inputs + self.outputs: try: - d = additional_deps[id(x)] + d = additional_deps[x] except KeyError: continue for v in d: - if isinstance(v, bld.root.__class__): - try: - v = v.get_bld_sig() - except AttributeError: - raise Errors.WafError('Missing node signature for %r (required by %r)' % (v, self)) - elif hasattr(v, '__call__'): - v = v() # dependency is a function, call it + try: + v = v.get_bld_sig() + except AttributeError: + if hasattr(v, '__call__'): + v = v() # dependency is a function, call it upd(v) - return self.m.digest() - - def sig_vars(self): + def sig_deep_inputs(self): """ - Used by :py:meth:`waflib.Task.Task.signature`, hash :py:attr:`waflib.Task.Task.env` variables/values + Enable rebuilds on input files task signatures. Not used by default. - :rtype: hash value + Example: hashes of output programs can be unchanged after being re-linked, + despite the libraries being different. This method can thus prevent stale unit test + results (waf_unit_test.py). + + Hashing input file timestamps is another possibility for the implementation. + This may cause unnecessary rebuilds when input tasks are frequently executed. + Here is an implementation example:: + + lst = [] + for node in self.inputs + self.dep_nodes: + st = os.stat(node.abspath()) + lst.append(st.st_mtime) + lst.append(st.st_size) + self.m.update(Utils.h_list(lst)) + + The downside of the implementation is that it absolutely requires all build directory + files to be declared within the current build. """ bld = self.generator.bld - env = self.env - upd = self.m.update + lst = [bld.task_sigs[bld.node_sigs[node]] for node in (self.inputs + self.dep_nodes) if node.is_bld()] + self.m.update(Utils.h_list(lst)) - # dependencies on the environment vars - act_sig = bld.hash_env_vars(env, self.__class__.vars) - upd(act_sig) - - # additional variable dependencies, if provided - dep_vars = getattr(self, 'dep_vars', None) - if dep_vars: - upd(bld.hash_env_vars(env, dep_vars)) + def sig_vars(self): + """ + Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.env` variables/values + When overriding this method, and if scriptlet expressions are used, make sure to follow + the code in :py:meth:`waflib.Task.Task.compile_sig_vars` to enable dependencies on scriptlet results. - return self.m.digest() + This method may be replaced on subclasses by the metaclass to force dependencies on scriptlet code. + """ + sig = self.generator.bld.hash_env_vars(self.env, self.vars) + self.m.update(sig) scan = None """ @@ -708,29 +809,26 @@ def sig_vars(self): from waflib.Task import Task class mytask(Task): def scan(self, node): - return ((), ()) + return ([], []) - The first and second lists are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and + The first and second lists in the tuple are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and :py:attr:`waflib.Build.BuildContext.raw_deps` respectively. """ def sig_implicit_deps(self): """ - Used by :py:meth:`waflib.Task.Task.signature` hashes node signatures obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`). + Used by :py:meth:`waflib.Task.Task.signature`; it hashes node signatures + obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`). The exception :py:class:`waflib.Errors.TaskRescan` is thrown - when a file has changed. When this occurs, :py:meth:`waflib.Task.Task.signature` is called - once again, and this method will be executed once again, this time calling :py:meth:`waflib.Task.Task.scan` - for searching the dependencies. - - :rtype: hash value + when a file has changed. In this case, the method :py:meth:`waflib.Task.Task.signature` is called + once again, and return here to call :py:meth:`waflib.Task.Task.scan` and searching for dependencies. """ - bld = self.generator.bld # get the task signatures from previous runs key = self.uid() - prev = bld.task_sigs.get((key, 'imp'), []) + prev = bld.imp_sigs.get(key, []) # for issue #379 if prev: @@ -740,74 +838,56 @@ def sig_implicit_deps(self): except Errors.TaskNotReady: raise except EnvironmentError: - # when a file was renamed (IOError usually), remove the stale nodes (headers in folders without source files) + # when a file was renamed, remove the stale nodes (headers in folders without source files) # this will break the order calculation for headers created during the build in the source directory (should be uncommon) # the behaviour will differ when top != out for x in bld.node_deps.get(self.uid(), []): - if not x.is_bld(): + if not x.is_bld() and not x.exists(): try: - os.stat(x.abspath()) - except OSError: - try: - del x.parent.children[x.name] - except KeyError: - pass - del bld.task_sigs[(key, 'imp')] + del x.parent.children[x.name] + except KeyError: + pass + del bld.imp_sigs[key] raise Errors.TaskRescan('rescan') # no previous run or the signature of the dependencies has changed, rescan the dependencies - (nodes, names) = self.scan() + (bld.node_deps[key], bld.raw_deps[key]) = self.scan() if Logs.verbose: - Logs.debug('deps: scanner for %s returned %s %s' % (str(self), str(nodes), str(names))) - - # store the dependencies in the cache - bld.node_deps[key] = nodes - bld.raw_deps[key] = names - - # might happen - self.are_implicit_nodes_ready() + Logs.debug('deps: scanner for %s: %r; unresolved: %r', self, bld.node_deps[key], bld.raw_deps[key]) # recompute the signature and return it try: - bld.task_sigs[(key, 'imp')] = sig = self.compute_sig_implicit_deps() - except Exception: - if Logs.verbose: - for k in bld.node_deps.get(self.uid(), []): - try: - k.get_bld_sig() - except Exception: - Logs.warn('Missing signature for node %r (may cause rebuilds)' % k) - else: - return sig + bld.imp_sigs[key] = self.compute_sig_implicit_deps() + except EnvironmentError: + for k in bld.node_deps.get(self.uid(), []): + if not k.exists(): + Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!', k, self) + raise def compute_sig_implicit_deps(self): """ Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the :py:class:`waflib.Node.Node` returned by the scanner. - :return: hash value - :rtype: string + :return: a hash value for the implicit dependencies + :rtype: string or bytes """ - upd = self.m.update - - bld = self.generator.bld - self.are_implicit_nodes_ready() # scanner returns a node that does not have a signature # just *ignore* the error and let them figure out from the compiler output # waf -k behaviour - for k in bld.node_deps.get(self.uid(), []): + for k in self.generator.bld.node_deps.get(self.uid(), []): upd(k.get_bld_sig()) return self.m.digest() def are_implicit_nodes_ready(self): """ - For each node returned by the scanner, see if there is a task behind it, and force the build order + For each node returned by the scanner, see if there is a task that creates it, + and infer the build order - The performance impact on null builds is nearly invisible (1.66s->1.86s), but this is due to - agressive caching (1.86s->28s) + This has a low performance impact on null builds (1.86s->1.66s) thanks to caching (28s->1.86s) """ bld = self.generator.bld try: @@ -815,10 +895,11 @@ def are_implicit_nodes_ready(self): except AttributeError: bld.dct_implicit_nodes = cache = {} + # one cache per build group try: - dct = cache[bld.cur] + dct = cache[bld.current_group] except KeyError: - dct = cache[bld.cur] = {} + dct = cache[bld.current_group] = {} for tsk in bld.cur_tasks: for x in tsk.outputs: dct[x] = tsk @@ -839,11 +920,10 @@ def uid(self): try: return self.uid_ except AttributeError: - m = Utils.md5() + m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace')) up = m.update - up(self.__class__.__name__.encode('iso8859-1', 'xmlcharrefreplace')) for x in self.inputs + self.outputs: - up(x.abspath().encode('iso8859-1', 'xmlcharrefreplace')) + up(x.abspath().encode('latin-1', 'xmlcharrefreplace')) self.uid_ = m.digest() return self.uid_ uid.__doc__ = Task.uid.__doc__ @@ -851,7 +931,7 @@ def uid(self): def is_before(t1, t2): """ - Return a non-zero value if task t1 is to be executed before task t2:: + Returns a non-zero value if task t1 is to be executed before task t2:: t1.ext_out = '.h' t2.ext_in = '.h' @@ -859,10 +939,10 @@ def is_before(t1, t2): t1.before = ['t2'] waflib.Task.is_before(t1, t2) # True - :param t1: task - :type t1: :py:class:`waflib.Task.TaskBase` - :param t2: task - :type t2: :py:class:`waflib.Task.TaskBase` + :param t1: Task object + :type t1: :py:class:`waflib.Task.Task` + :param t2: Task object + :type t2: :py:class:`waflib.Task.Task` """ to_list = Utils.to_list for k in to_list(t2.ext_in): @@ -879,30 +959,53 @@ def is_before(t1, t2): def set_file_constraints(tasks): """ - Adds tasks to the task 'run_after' attribute based on the task inputs and outputs + Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs :param tasks: tasks - :type tasks: list of :py:class:`waflib.Task.TaskBase` + :type tasks: list of :py:class:`waflib.Task.Task` """ ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: - for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []): - ins[id(a)].add(x) - for a in getattr(x, 'outputs', []): - outs[id(a)].add(x) + for a in x.inputs: + ins[a].add(x) + for a in x.dep_nodes: + ins[a].add(x) + for a in x.outputs: + outs[a].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k]) + +class TaskGroup(object): + """ + Wrap nxm task order constraints into a single object + to prevent the creation of large list/set objects + + This is an optimization + """ + def __init__(self, prev, next): + self.prev = prev + self.next = next + self.done = False + + def get_hasrun(self): + for k in self.prev: + if not k.hasrun: + return NOT_RUN + return SUCCESS + + hasrun = property(get_hasrun, None) + def set_precedence_constraints(tasks): """ - Add tasks to the task 'run_after' attribute based on the after/before/ext_out/ext_in attributes + Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes :param tasks: tasks - :type tasks: list of :py:class:`waflib.Task.TaskBase` + :type tasks: list of :py:class:`waflib.Task.Task` """ cstr_groups = Utils.defaultdict(list) for x in tasks: @@ -928,13 +1031,20 @@ def set_precedence_constraints(tasks): else: continue - aval = set(cstr_groups[keys[a]]) - for x in cstr_groups[keys[b]]: - x.run_after.update(aval) + a = cstr_groups[keys[a]] + b = cstr_groups[keys[b]] + + if len(a) < 2 or len(b) < 2: + for x in b: + x.run_after.update(a) + else: + group = TaskGroup(set(a), set(b)) + for x in b: + x.run_after.add(group) def funex(c): """ - Compile a function by 'exec' + Compiles a scriptlet expression into a Python function :param c: function to compile :type c: string @@ -945,118 +1055,183 @@ def funex(c): exec(c, dc) return dc['f'] -reg_act = re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})", re.M) +re_cond = re.compile(r'(?P\w+)|(?P\|)|(?P&)') +re_novar = re.compile(r'^(SRC|TGT)\W+.*?$') +reg_act = re.compile(r'(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})', re.M) def compile_fun_shell(line): """ - Create a compiled function to execute a process with the shell - WARNING: this method may disappear anytime, so use compile_fun instead + Creates a compiled function to execute a process through a sub-shell """ - extr = [] def repl(match): g = match.group - if g('dollar'): return "$" - elif g('backslash'): return '\\\\' - elif g('subst'): extr.append((g('var'), g('code'))); return "%s" + if g('dollar'): + return "$" + elif g('backslash'): + return '\\\\' + elif g('subst'): + extr.append((g('var'), g('code'))) + return "%s" return None - line = reg_act.sub(repl, line) or line + dvars = [] + def add_dvar(x): + if x not in dvars: + dvars.append(x) + + def replc(m): + # performs substitutions and populates dvars + if m.group('and'): + return ' and ' + elif m.group('or'): + return ' or ' + else: + x = m.group('var') + add_dvar(x) + return 'env[%r]' % x parm = [] - dvars = [] app = parm.append for (var, meth) in extr: if var == 'SRC': - if meth: app('tsk.inputs%s' % meth) - else: app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])') + if meth: + app('tsk.inputs%s' % meth) + else: + app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') elif var == 'TGT': - if meth: app('tsk.outputs%s' % meth) - else: app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])') + if meth: + app('tsk.outputs%s' % meth) + else: + app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') elif meth: if meth.startswith(':'): + add_dvar(var) m = meth[1:] if m == 'SRC': - m = '[a.path_from(bld.bldnode) for a in tsk.inputs]' + m = '[a.path_from(cwdx) for a in tsk.inputs]' elif m == 'TGT': - m = '[a.path_from(bld.bldnode) for a in tsk.outputs]' - elif m[:3] not in ('tsk', 'gen', 'bld'): - dvars.extend([var, meth[1:]]) - m = '%r' % m + m = '[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m = '[tsk.inputs%s]' % m[3:] + elif re_novar.match(m): + m = '[tsk.outputs%s]' % m[3:] + else: + add_dvar(m) + if m[:3] not in ('tsk', 'gen', 'bld'): + m = '%r' % m app('" ".join(tsk.colon(%r, %s))' % (var, m)) + elif meth.startswith('?'): + # In A?B|C output env.A if one of env.B or env.C is non-empty + expr = re_cond.sub(replc, meth[1:]) + app('p(%r) if (%s) else ""' % (var, expr)) else: - app('%s%s' % (var, meth)) + call = '%s%s' % (var, meth) + add_dvar(call) + app(call) else: - if not var in dvars: dvars.append(var) + add_dvar(var) app("p('%s')" % var) - if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm)) - else: parm = '' + if parm: + parm = "%% (%s) " % (',\n\t\t'.join(parm)) + else: + parm = '' c = COMPILE_TEMPLATE_SHELL % (line, parm) - - Logs.debug('action: %s' % c.strip().splitlines()) + Logs.debug('action: %s', c.strip().splitlines()) return (funex(c), dvars) +reg_act_noshell = re.compile(r"(?P\s+)|(?P\$\{(?P\w+)(?P.*?)\})|(?P([^$ \t\n\r\f\v]|\$\$)+)", re.M) def compile_fun_noshell(line): """ - Create a compiled function to execute a process without the shell - WARNING: this method may disappear anytime, so use compile_fun instead + Creates a compiled function to execute a process without a sub-shell """ - extr = [] - def repl(match): - g = match.group - if g('dollar'): return "$" - elif g('backslash'): return '\\' - elif g('subst'): extr.append((g('var'), g('code'))); return "<<|@|>>" - return None - - line2 = reg_act.sub(repl, line) - params = line2.split('<<|@|>>') - assert(extr) - buf = [] dvars = [] + merge = False app = buf.append - for x in range(len(extr)): - params[x] = params[x].strip() - if params[x]: - app("lst.extend(%r)" % params[x].split()) - (var, meth) = extr[x] - if var == 'SRC': - if meth: app('lst.append(tsk.inputs%s)' % meth) - else: app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])") - elif var == 'TGT': - if meth: app('lst.append(tsk.outputs%s)' % meth) - else: app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])") - elif meth: - if meth.startswith(':'): - m = meth[1:] - if m == 'SRC': - m = '[a.path_from(bld.bldnode) for a in tsk.inputs]' - elif m == 'TGT': - m = '[a.path_from(bld.bldnode) for a in tsk.outputs]' - elif m[:3] not in ('tsk', 'gen', 'bld'): - dvars.extend([var, m]) - m = '%r' % m - app('lst.extend(tsk.colon(%r, %s))' % (var, m)) - else: - app('lst.extend(gen.to_list(%s%s))' % (var, meth)) - else: - app('lst.extend(to_list(env[%r]))' % var) - if not var in dvars: dvars.append(var) - if extr: - if params[-1]: - app("lst.extend(%r)" % params[-1].split()) + def add_dvar(x): + if x not in dvars: + dvars.append(x) + + def replc(m): + # performs substitutions and populates dvars + if m.group('and'): + return ' and ' + elif m.group('or'): + return ' or ' + else: + x = m.group('var') + add_dvar(x) + return 'env[%r]' % x + + for m in reg_act_noshell.finditer(line): + if m.group('space'): + merge = False + continue + elif m.group('text'): + app('[%r]' % m.group('text').replace('$$', '$')) + elif m.group('subst'): + var = m.group('var') + code = m.group('code') + if var == 'SRC': + if code: + app('[tsk.inputs%s]' % code) + else: + app('[a.path_from(cwdx) for a in tsk.inputs]') + elif var == 'TGT': + if code: + app('[tsk.outputs%s]' % code) + else: + app('[a.path_from(cwdx) for a in tsk.outputs]') + elif code: + if code.startswith(':'): + # a composed variable ${FOO:OUT} + add_dvar(var) + m = code[1:] + if m == 'SRC': + m = '[a.path_from(cwdx) for a in tsk.inputs]' + elif m == 'TGT': + m = '[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m = '[tsk.inputs%s]' % m[3:] + elif re_novar.match(m): + m = '[tsk.outputs%s]' % m[3:] + else: + add_dvar(m) + if m[:3] not in ('tsk', 'gen', 'bld'): + m = '%r' % m + app('tsk.colon(%r, %s)' % (var, m)) + elif code.startswith('?'): + # In A?B|C output env.A if one of env.B or env.C is non-empty + expr = re_cond.sub(replc, code[1:]) + app('to_list(env[%r] if (%s) else [])' % (var, expr)) + else: + # plain code such as ${tsk.inputs[0].abspath()} + call = '%s%s' % (var, code) + add_dvar(call) + app('to_list(%s)' % call) + else: + # a plain variable such as # a plain variable like ${AR} + app('to_list(env[%r])' % var) + add_dvar(var) + if merge: + tmp = 'merge(%s, %s)' % (buf[-2], buf[-1]) + del buf[-1] + buf[-1] = tmp + merge = True # next turn + + buf = ['lst.extend(%s)' % x for x in buf] fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf) - Logs.debug('action: %s' % fun.strip().splitlines()) + Logs.debug('action: %s', fun.strip().splitlines()) return (funex(fun), dvars) def compile_fun(line, shell=False): """ - Parse a string expression such as "${CC} ${SRC} -o ${TGT}" and return a pair containing: + Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing: - * the function created (compiled) for use as :py:meth:`waflib.Task.TaskBase.run` - * the list of variables that imply a dependency from self.env + * The function created (compiled) for use as :py:meth:`waflib.Task.Task.run` + * The list of variables that must cause rebuilds when *env* data is modified for example:: @@ -1066,18 +1241,66 @@ def compile_fun(line, shell=False): def build(bld): bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"') - The env variables (CXX, ..) on the task must not hold dicts (order) - The reserved keywords *TGT* and *SRC* represent the task input and output nodes + The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order. + The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes """ - if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0: - shell = True - + if isinstance(line, str): + if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0: + shell = True + else: + dvars_lst = [] + funs_lst = [] + for x in line: + if isinstance(x, str): + fun, dvars = compile_fun(x, shell) + dvars_lst += dvars + funs_lst.append(fun) + else: + # assume a function to let through + funs_lst.append(x) + def composed_fun(task): + for x in funs_lst: + ret = x(task) + if ret: + return ret + return None + return composed_fun, dvars_lst if shell: return compile_fun_shell(line) else: return compile_fun_noshell(line) +def compile_sig_vars(vars): + """ + This method produces a sig_vars method suitable for subclasses that provide + scriptlet code in their run_str code. + If no such method can be created, this method returns None. + + The purpose of the sig_vars method returned is to ensures + that rebuilds occur whenever the contents of the expression changes. + This is the case B below:: + + import time + # case A: regular variables + tg = bld(rule='echo ${FOO}') + tg.env.FOO = '%s' % time.time() + # case B + bld(rule='echo ${gen.foo}', foo='%s' % time.time()) + + :param vars: env variables such as CXXFLAGS or gen.foo + :type vars: list of string + :return: A sig_vars method relevant for dependencies if adequate, else None + :rtype: A function, or None in most cases + """ + buf = [] + for x in sorted(vars): + if x[:3] in ('tsk', 'gen', 'bld'): + buf.append('buf.append(%s)' % x) + if buf: + return funex(COMPILE_TEMPLATE_SIG_VARS % '\n\t'.join(buf)) + return None + def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None): """ Returns a new task subclass with the function ``run`` compiled from the line given. @@ -1099,94 +1322,90 @@ def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[ 'vars': vars or [], # function arguments are static, and this one may be modified by the class 'color': color, 'name': name, - 'ext_in': Utils.to_list(ext_in), - 'ext_out': Utils.to_list(ext_out), - 'before': Utils.to_list(before), - 'after': Utils.to_list(after), 'shell': shell, 'scan': scan, } - if isinstance(func, str): + if isinstance(func, str) or isinstance(func, tuple): params['run_str'] = func else: params['run'] = func cls = type(Task)(name, (Task,), params) - global classes classes[name] = cls - return cls + if ext_in: + cls.ext_in = Utils.to_list(ext_in) + if ext_out: + cls.ext_out = Utils.to_list(ext_out) + if before: + cls.before = Utils.to_list(before) + if after: + cls.after = Utils.to_list(after) -def always_run(cls): - """ - Task class decorator + return cls - Set all task instances of this class to be executed whenever a build is started - The task signature is calculated, but the result of the comparation between - task signatures is bypassed +def deep_inputs(cls): """ - old = cls.runnable_status - def always(self): - ret = old(self) - if ret == SKIP_ME: - ret = RUN_ME - return ret - cls.runnable_status = always + Task class decorator to enable rebuilds on input files task signatures + """ + def sig_explicit_deps(self): + Task.sig_explicit_deps(self) + Task.sig_deep_inputs(self) + cls.sig_explicit_deps = sig_explicit_deps return cls -def update_outputs(cls): +TaskBase = Task +"Provided for compatibility reasons, TaskBase should not be used" + +class TaskSemaphore(object): """ - Task class decorator + Task semaphores provide a simple and efficient way of throttling the amount of + a particular task to run concurrently. The throttling value is capped + by the amount of maximum jobs, so for example, a `TaskSemaphore(10)` + has no effect in a `-j2` build. - If you want to create files in the source directory. For example, to keep *foo.txt* in the source - directory, create it first and declare:: + Task semaphores are typically specified on the task class level:: - def build(bld): - bld(rule='cp ${SRC} ${TGT}', source='wscript', target='foo.txt', update_outputs=True) + class compile(waflib.Task.Task): + semaphore = waflib.Task.TaskSemaphore(2) + run_str = 'touch ${TGT}' + + Task semaphores are meant to be used by the build scheduler in the main + thread, so there are no guarantees of thread safety. """ - old_post_run = cls.post_run - def post_run(self): - old_post_run(self) - for node in self.outputs: - node.sig = node.cache_sig = Utils.h_file(node.abspath()) - self.generator.bld.task_sigs[node.abspath()] = self.uid() # issue #1017 - cls.post_run = post_run + def __init__(self, num): + """ + :param num: maximum value of concurrent tasks + :type num: int + """ + self.num = num + self.locking = set() + self.waiting = set() + def is_locked(self): + """Returns True if this semaphore cannot be acquired by more tasks""" + return len(self.locking) >= self.num - old_runnable_status = cls.runnable_status - def runnable_status(self): - status = old_runnable_status(self) - if status != RUN_ME: - return status + def acquire(self, tsk): + """ + Mark the semaphore as used by the given task (not re-entrant). - try: - # by default, we check that the output nodes have the signature of the task - # perform a second check, returning 'SKIP_ME' as we are expecting that - # the signatures do not match - bld = self.generator.bld - prev_sig = bld.task_sigs[self.uid()] - if prev_sig == self.signature(): - for x in self.outputs: - if not x.is_child_of(bld.bldnode): - # special case of files created in the source directory - # hash them here for convenience -_- - x.sig = Utils.h_file(x.abspath()) - if not x.sig or bld.task_sigs[x.abspath()] != self.uid(): - return RUN_ME - return SKIP_ME - except OSError: - pass - except IOError: - pass - except KeyError: - pass - except IndexError: - pass - except AttributeError: - pass - return RUN_ME - cls.runnable_status = runnable_status + :param tsk: task object + :type tsk: :py:class:`waflib.Task.Task` + :raises: :py:class:`IndexError` in case the resource is already acquired + """ + if self.is_locked(): + raise IndexError('Cannot lock more %r' % self.locking) + self.locking.add(tsk) - return cls + def release(self, tsk): + """ + Mark the semaphore as unused by the given task. + + :param tsk: task object + :type tsk: :py:class:`waflib.Task.Task` + :raises: :py:class:`KeyError` in case the resource is not acquired by the task + """ + self.locking.remove(tsk) diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py index b1110080de..32468f03d3 100644 --- a/waflib/TaskGen.py +++ b/waflib/TaskGen.py @@ -1,18 +1,16 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Task generators The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) The instances can have various parameters, but the creation of task nodes (Task.py) -is always postponed. To achieve this, various methods are called from the method "apply" - - +is deferred. To achieve this, various methods are called from the method "apply" """ -import copy, re, os +import copy, re, os, functools from waflib import Task, Utils, Logs, Errors, ConfigSet, Node feats = Utils.defaultdict(set) @@ -22,7 +20,7 @@ class task_gen(object): """ - Instances of this class create :py:class:`waflib.Task.TaskBase` when + Instances of this class create :py:class:`waflib.Task.Task` when calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. A few notes: @@ -34,42 +32,28 @@ class task_gen(object): """ mappings = Utils.ordered_iter_dict() - """Mappings are global file extension mappings, they are retrieved in the order of definition""" + """Mappings are global file extension mappings that are retrieved in the order of definition""" - prec = Utils.defaultdict(list) - """Dict holding the precedence rules for task generator methods""" + prec = Utils.defaultdict(set) + """Dict that holds the precedence execution rules for task generator methods""" def __init__(self, *k, **kw): """ - The task generator objects predefine various attributes (source, target) for possible + Task generator objects predefine various attributes (source, target) for possible processing by process_rule (make-like rules) or process_source (extensions, misc methods) - The tasks are stored on the attribute 'tasks'. They are created by calling methods - listed in self.meths *or* referenced in the attribute features - A topological sort is performed to ease the method re-use. + Tasks are stored on the attribute 'tasks'. They are created by calling methods + listed in ``self.meths`` or referenced in the attribute ``features`` + A topological sort is performed to execute the methods in correct order. - The extra key/value elements passed in kw are set as attributes + The extra key/value elements passed in ``kw`` are set as attributes """ - - # so we will have to play with directed acyclic graphs - # detect cycles, etc - self.source = '' + self.source = [] self.target = '' self.meths = [] """ - List of method names to execute (it is usually a good idea to avoid touching this) - """ - - self.prec = Utils.defaultdict(list) - """ - Precedence table for sorting the methods in self.meths - """ - - self.mappings = {} - """ - List of mappings {extension -> function} for processing files by extension - This is very rarely used, so we do not use an ordered dict here + List of method names to execute (internal) """ self.features = [] @@ -79,7 +63,7 @@ def __init__(self, *k, **kw): self.tasks = [] """ - List of tasks created. + Tasks created are added to this list """ if not 'bld' in kw: @@ -90,33 +74,52 @@ def __init__(self, *k, **kw): else: self.bld = kw['bld'] self.env = self.bld.env.derive() - self.path = self.bld.path # emulate chdir when reading scripts + self.path = kw.get('path', self.bld.path) # by default, emulate chdir when reading scripts - # provide a unique id + # Provide a unique index per folder + # This is part of a measure to prevent output file name collisions + path = self.path.abspath() try: - self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1 + self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1 except AttributeError: self.bld.idx = {} - self.idx = self.bld.idx[id(self.path)] = 1 + self.idx = self.bld.idx[path] = 1 + + # Record the global task generator count + try: + self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1 + except AttributeError: + self.tg_idx_count = self.bld.tg_idx_count = 1 for key, val in kw.items(): setattr(self, key, val) def __str__(self): - """for debugging purposes""" + """Debugging helper""" return "" % (self.name, self.path.abspath()) def __repr__(self): - """for debugging purposes""" + """Debugging helper""" lst = [] - for x in self.__dict__.keys(): + for x in self.__dict__: if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): lst.append("%s=%s" % (x, repr(getattr(self, x)))) return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) + def get_cwd(self): + """ + Current working directory for the task generator, defaults to the build directory. + This is still used in a few places but it should disappear at some point as the classes + define their own working directory. + + :rtype: :py:class:`waflib.Node.Node` + """ + return self.bld.bldnode + def get_name(self): """ - If not set, the name is computed from the target name:: + If the attribute ``name`` is not set on the instance, + the name is computed from the target name:: def build(bld): x = bld(name='foo') @@ -143,18 +146,20 @@ def set_name(self, name): def to_list(self, val): """ - Ensure that a parameter is a list + Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list` :type val: string or list of string :param val: input to return as a list :rtype: list """ - if isinstance(val, str): return val.split() - else: return val + if isinstance(val, str): + return val.split() + else: + return val def post(self): """ - Create task objects. The following operations are performed: + Creates tasks for this task generators. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` @@ -162,27 +167,25 @@ def post(self): #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ - - # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support if getattr(self, 'posted', None): - #error("OBJECT ALREADY POSTED" + str( self)) return False self.posted = True keys = set(self.meths) + keys.update(feats['*']) # add the methods listed in the features self.features = Utils.to_list(self.features) - for x in self.features + ['*']: + for x in self.features: st = feats[x] - if not st: - if not x in Task.classes: - Logs.warn('feature %r does not exist - bind at least one method to it' % x) - keys.update(list(st)) # ironpython 2.7 wants the cast to list + if st: + keys.update(st) + elif not x in Task.classes: + Logs.warn('feature %r does not exist - bind at least one method to it?', x) # copy the precedence table prec = {} - prec_tbl = self.prec or task_gen.prec + prec_tbl = self.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] @@ -191,17 +194,19 @@ def post(self): tmp = [] for a in keys: for x in prec.values(): - if a in x: break + if a in x: + break else: tmp.append(a) - tmp.sort() + tmp.sort(reverse=True) # topological sort out = [] while tmp: e = tmp.pop() - if e in keys: out.append(e) + if e in keys: + out.append(e) try: nlst = prec[e] except KeyError: @@ -214,46 +219,52 @@ def post(self): break else: tmp.append(x) + tmp.sort(reverse=True) if prec: - raise Errors.WafError('Cycle detected in the method execution %r' % prec) - out.reverse() + buf = ['Cycle detected in the method execution:'] + for k, v in prec.items(): + buf.append('- %s after %s' % (k, [x for x in v if x in prec])) + raise Errors.WafError('\n'.join(buf)) self.meths = out # then we run the methods in order - Logs.debug('task_gen: posting %s %d' % (self, id(self))) + Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError('%r is not a valid task generator method' % x) - Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) + Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() - Logs.debug('task_gen: posted %s' % self.name) + Logs.debug('task_gen: posted %s', self.name) return True def get_hook(self, node): """ + Returns the ``@extension`` method to call for a Node of a particular extension. + :param node: Input file to process :type node: :py:class:`waflib.Tools.Node.Node` :return: A method able to process the input node by looking at the extension :rtype: function """ name = node.name - if self.mappings: - for k in self.mappings: + for k in self.mappings: + try: if name.endswith(k): return self.mappings[k] - for k in task_gen.mappings: - if name.endswith(k): - return task_gen.mappings[k] - raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)" % (node, task_gen.mappings.keys())) + except TypeError: + # regexps objects + if k.match(name): + return self.mappings[k] + keys = list(self.mappings.keys()) + raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys)) def create_task(self, name, src=None, tgt=None, **kw): """ - Wrapper for creating task instances. The classes are retrieved from the - context class if possible, then from the global dict Task.classes. + Creates task instances. :param name: task class name :type name: string @@ -262,7 +273,7 @@ def create_task(self, name, src=None, tgt=None, **kw): :param tgt: output nodes :type tgt: list of :py:class:`waflib.Tools.Node.Node` :return: A task object - :rtype: :py:class:`waflib.Task.TaskBase` + :rtype: :py:class:`waflib.Task.Task` """ task = Task.classes[name](env=self.env.derive(), generator=self) if src: @@ -275,7 +286,7 @@ def create_task(self, name, src=None, tgt=None, **kw): def clone(self, env): """ - Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the + Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the it does not create the same output files as the original, or the same files may be compiled several times. @@ -304,7 +315,7 @@ def clone(self, env): def declare_chain(name='', rule=None, reentrant=None, color='BLUE', ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False): """ - Create a new mapping and a task class for processing files by extension. + Creates a new mapping and a task class for processing files by extension. See Tools/flex.py for an example. :param name: name for the task class @@ -323,7 +334,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE', :type before: list of string :param after: execute instances of this task after classes of the given names :type after: list of string - :param decider: if present, use it to create the output nodes for the task + :param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order) :type decider: function :param scan: scanner function for the task :type scan: function @@ -337,14 +348,13 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE', cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell) def x_file(self, node): - ext = decider and decider(self, node) or cls.ext_out if ext_in: _ext_in = ext_in[0] tsk = self.create_task(name, node) cnt = 0 - keys = set(self.mappings.keys()) | set(self.__class__.mappings.keys()) + ext = decider(self, node) if decider else cls.ext_out for x in ext: k = node.change_ext(x, ext_in=_ext_in) tsk.outputs.append(k) @@ -354,14 +364,14 @@ def x_file(self, node): self.source.append(k) else: # reinject downstream files into the build - for y in keys: # ~ nfile * nextensions :-/ + for y in self.mappings: # ~ nfile * nextensions :-/ if k.name.endswith(y): self.source.append(k) break cnt += 1 if install_path: - self.bld.install_files(install_path, tsk.outputs) + self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs) return tsk for x in cls.ext_in: @@ -370,7 +380,7 @@ def x_file(self, node): def taskgen_method(func): """ - Decorator: register a method as a task generator method. + Decorator that registers method as a task generator method. The function must accept a task generator as first parameter:: from waflib.TaskGen import taskgen_method @@ -387,10 +397,10 @@ def mymethod(self): def feature(*k): """ - Decorator: register a task generator method that will be executed when the - object attribute 'feature' contains the corresponding key(s):: + Decorator that registers a task generator method that will be executed when the + object attribute ``feature`` contains the corresponding key(s):: - from waflib.Task import feature + from waflib.TaskGen import feature @feature('myfeature') def myfunction(self): print('that is my feature!') @@ -409,7 +419,7 @@ def deco(func): def before_method(*k): """ - Decorator: register a task generator method which will be executed + Decorator that registera task generator method which will be executed before the functions of given name(s):: from waflib.TaskGen import feature, before @@ -429,16 +439,14 @@ def build(bld): def deco(func): setattr(task_gen, func.__name__, func) for fun_name in k: - if not func.__name__ in task_gen.prec[fun_name]: - task_gen.prec[fun_name].append(func.__name__) - #task_gen.prec[fun_name].sort() + task_gen.prec[func.__name__].add(fun_name) return func return deco before = before_method def after_method(*k): """ - Decorator: register a task generator method which will be executed + Decorator that registers a task generator method which will be executed after the functions of given name(s):: from waflib.TaskGen import feature, after @@ -458,16 +466,14 @@ def build(bld): def deco(func): setattr(task_gen, func.__name__, func) for fun_name in k: - if not fun_name in task_gen.prec[func.__name__]: - task_gen.prec[func.__name__].append(fun_name) - #task_gen.prec[func.__name__].sort() + task_gen.prec[fun_name].add(func.__name__) return func return deco after = after_method def extension(*k): """ - Decorator: register a task generator method which will be invoked during + Decorator that registers a task generator method which will be invoked during the processing of source files for the extension given:: from waflib import Task @@ -486,14 +492,11 @@ def deco(func): return func return deco -# --------------------------------------------------------------- -# The following methods are task generator methods commonly used -# they are almost examples, the rest of waf core does not depend on them - @taskgen_method def to_nodes(self, lst, path=None): """ - Convert the input list into a list of nodes. + Flatten the input list of string/nodes/lists into a list of nodes. + It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`. It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`: @@ -510,21 +513,23 @@ def to_nodes(self, lst, path=None): if isinstance(lst, Node.Node): lst = [lst] - # either a list or a string, convert to a list of nodes for x in Utils.to_list(lst): if isinstance(x, str): node = find(x) - else: + elif hasattr(x, 'name'): node = x + else: + tmp.extend(self.to_nodes(x)) + continue if not node: - raise Errors.WafError("source not found: %r in %r" % (x, self)) + raise Errors.WafError('source not found: %r in %r' % (x, self)) tmp.append(node) return tmp @feature('*') def process_source(self): """ - Process each element in the attribute ``source`` by extension. + Processes each element in the attribute ``source`` by extension. #. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first. #. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension` @@ -540,10 +545,29 @@ def process_source(self): @before_method('process_source') def process_rule(self): """ - Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled:: + Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled:: def build(bld): bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') + + Main attributes processed: + + * rule: command to execute, it can be a tuple of strings for multiple commands + * chmod: permissions for the resulting files (integer value such as Utils.O755) + * shell: set to False to execute the command directly (default is True to use a shell) + * scan: scanner function + * vars: list of variables to trigger rebuilds, such as CFLAGS + * cls_str: string to display when executing the task + * cls_keyword: label to display when executing the task + * cache_rule: by default, try to re-use similar classes, set to False to disable + * source: list of Node or string objects representing the source files required by this task + * target: list of Node or string objects representing the files that this task creates + * cwd: current working directory (Node or string) + * stdout: standard output, set to None to prevent waf from capturing the text + * stderr: standard error, set to None to prevent waf from capturing the text + * timeout: timeout for command execution (Python 3) + * always: whether to always run the command (False by default) + * deep_inputs: whether the task must depend on the input file tasks too (False by default) """ if not getattr(self, 'rule', None): return @@ -557,44 +581,81 @@ def build(bld): except AttributeError: cache = self.bld.cache_rule_attr = {} + chmod = getattr(self, 'chmod', None) + shell = getattr(self, 'shell', True) + color = getattr(self, 'color', 'BLUE') + scan = getattr(self, 'scan', None) + _vars = getattr(self, 'vars', []) + cls_str = getattr(self, 'cls_str', None) + cls_keyword = getattr(self, 'cls_keyword', None) + use_cache = getattr(self, 'cache_rule', 'True') + deep_inputs = getattr(self, 'deep_inputs', False) + + scan_val = has_deps = hasattr(self, 'deps') + if scan: + scan_val = id(scan) + + key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs)) + cls = None - if getattr(self, 'cache_rule', 'True'): + if use_cache: try: - cls = cache[(name, self.rule)] + cls = cache[key] except KeyError: pass if not cls: - cls = Task.task_factory(name, self.rule, - getattr(self, 'vars', []), - shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'), - scan = getattr(self, 'scan', None)) - if getattr(self, 'scan', None): + rule = self.rule + if chmod is not None: + def chmod_fun(tsk): + for x in tsk.outputs: + os.chmod(x.abspath(), tsk.generator.chmod) + if isinstance(rule, tuple): + rule = list(rule) + rule.append(chmod_fun) + rule = tuple(rule) + else: + rule = (rule, chmod_fun) + + cls = Task.task_factory(name, rule, _vars, shell=shell, color=color) + + if cls_str: + setattr(cls, '__str__', self.cls_str) + + if cls_keyword: + setattr(cls, 'keyword', self.cls_keyword) + + if deep_inputs: + Task.deep_inputs(cls) + + if scan: cls.scan = self.scan - elif getattr(self, 'deps', None): + elif has_deps: def scan(self): - nodes = [] - for x in self.generator.to_list(getattr(self.generator, 'deps', None)): - node = self.generator.path.find_resource(x) - if not node: - self.generator.bld.fatal('Could not find %r (was it declared?)' % x) - nodes.append(node) + deps = getattr(self.generator, 'deps', None) + nodes = self.generator.to_nodes(deps) return [nodes, []] cls.scan = scan - if getattr(self, 'update_outputs', None): - Task.update_outputs(cls) + if use_cache: + cache[key] = cls + + # now create one instance + tsk = self.create_task(name) + + for x in ('after', 'before', 'ext_in', 'ext_out'): + setattr(tsk, x, getattr(self, x, [])) - if getattr(self, 'always', None): - Task.always_run(cls) + if hasattr(self, 'stdout'): + tsk.stdout = self.stdout - for x in ('after', 'before', 'ext_in', 'ext_out'): - setattr(cls, x, getattr(self, x, [])) + if hasattr(self, 'stderr'): + tsk.stderr = self.stderr - if getattr(self, 'cache_rule', 'True'): - cache[(name, self.rule)] = cls + if getattr(self, 'timeout', None): + tsk.timeout = self.timeout - # now create one instance - tsk = self.create_task(name) + if getattr(self, 'always', None): + tsk.always_run = True if getattr(self, 'target', None): if isinstance(self.target, str): @@ -608,7 +669,8 @@ def scan(self): x.parent.mkdir() # if a node was given, create the required folders tsk.outputs.append(x) if getattr(self, 'install_path', None): - self.bld.install_files(self.install_path, tsk.outputs) + self.install_task = self.add_install_files(install_to=self.install_path, + install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644)) if getattr(self, 'source', None): tsk.inputs = self.to_nodes(self.source) @@ -618,10 +680,16 @@ def scan(self): if getattr(self, 'cwd', None): tsk.cwd = self.cwd + if isinstance(tsk.run, functools.partial): + # Python documentation says: "partial objects defined in classes + # behave like static methods and do not transform into bound + # methods during instance attribute look-up." + tsk.run = functools.partial(tsk.run, tsk) + @feature('seq') def sequence_order(self): """ - Add a strict sequential constraint between the tasks generated by task generators. + Adds a strict sequential constraint between the tasks generated by task generators. It works because task generators are posted in order. It will not post objects which belong to other folders. @@ -655,32 +723,44 @@ def sequence_order(self): self.bld.prev = self -re_m4 = re.compile('@(\w+)@', re.M) +re_m4 = re.compile(r'@(\w+)@', re.M) class subst_pc(Task.Task): """ - Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used + Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used in the substitution changes. """ + def force_permissions(self): + "Private for the time being, we will probably refactor this into run_str=[run1,chmod]" + if getattr(self.generator, 'chmod', None): + for x in self.outputs: + os.chmod(x.abspath(), self.generator.chmod) + def run(self): "Substitutes variables in a .in file" if getattr(self.generator, 'is_copy', None): - self.outputs[0].write(self.inputs[0].read('rb'), 'wb') - if getattr(self.generator, 'chmod', None): - os.chmod(self.outputs[0].abspath(), self.generator.chmod) + for i, x in enumerate(self.outputs): + x.write(self.inputs[i].read('rb'), 'wb') + stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy + os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime)) + self.force_permissions() return None if getattr(self.generator, 'fun', None): - return self.generator.fun(self) + ret = self.generator.fun(self) + if not ret: + self.force_permissions() + return ret - code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) + code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1')) if getattr(self.generator, 'subst_fun', None): code = self.generator.subst_fun(self, code) if code is not None: - self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) - return + self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1')) + self.force_permissions() + return None # replace all % by %% to prevent errors by % signs code = code.replace('%', '%%') @@ -693,7 +773,6 @@ def repl(match): lst.append(g(1)) return "%%(%s)s" % g(1) return '' - global re_m4 code = getattr(self.generator, 're_m4', re_m4).sub(repl, code) try: @@ -709,15 +788,16 @@ def repl(match): d[x] = tmp code = code % d - self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1')) - self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst + self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1')) + self.generator.bld.raw_deps[self.uid()] = lst # make sure the signature is updated - try: delattr(self, 'cache_sig') - except AttributeError: pass + try: + delattr(self, 'cache_sig') + except AttributeError: + pass - if getattr(self.generator, 'chmod', None): - os.chmod(self.outputs[0].abspath(), self.generator.chmod) + self.force_permissions() def sig_vars(self): """ @@ -747,13 +827,14 @@ def sig_vars(self): @extension('.pc.in') def add_pcfile(self, node): """ - Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/`` + Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default def build(bld): bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/') """ tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in')) - self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs) + self.install_task = self.add_install_files( + install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs) class subst(subst_pc): pass @@ -762,7 +843,7 @@ class subst(subst_pc): @before_method('process_source', 'process_rule') def process_subst(self): """ - Define a transformation that substitutes the contents of *source* files to *target* files:: + Defines a transformation that substitutes the contents of *source* files to *target* files:: def build(bld): bld( @@ -797,7 +878,6 @@ def build(bld): a = self.path.find_node(x) b = self.path.get_bld().make_node(y) if not os.path.isfile(b.abspath()): - b.sig = None b.parent.mkdir() else: if isinstance(x, str): @@ -810,27 +890,24 @@ def build(bld): b = y if not a: - raise Errors.WafError('cound not find %r for %r' % (x, self)) + raise Errors.WafError('could not find %r for %r' % (x, self)) - has_constraints = False tsk = self.create_task('subst', a, b) for k in ('after', 'before', 'ext_in', 'ext_out'): val = getattr(self, k, None) if val: - has_constraints = True setattr(tsk, k, val) # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies - if not has_constraints: - global HEADER_EXTS - for xt in HEADER_EXTS: - if b.name.endswith(xt): - tsk.before = [k for k in ('c', 'cxx') if k in Task.classes] - break + for xt in HEADER_EXTS: + if b.name.endswith(xt): + tsk.ext_out = tsk.ext_out + ['.h'] + break inst_to = getattr(self, 'install_path', None) if inst_to: - self.bld.install_files(inst_to, b, chmod=getattr(self, 'chmod', Utils.O644)) + self.install_task = self.add_install_files(install_to=inst_to, + install_from=b, chmod=getattr(self, 'chmod', Utils.O644)) self.source = [] diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py index c8a3c34928..079df358f5 100644 --- a/waflib/Tools/__init__.py +++ b/waflib/Tools/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py index aac39c0ca0..b39b645926 100644 --- a/waflib/Tools/ar.py +++ b/waflib/Tools/ar.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) """ @@ -16,7 +16,7 @@ def find_ar(conf): conf.load('ar') def configure(conf): - """Find the ar program and set the default flags in ``conf.env.ARFLAGS``""" + """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``""" conf.find_program('ar', var='AR') conf.add_os_flags('ARFLAGS') if not conf.env.ARFLAGS: diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py index 46955c826b..1d34ddaca7 100644 --- a/waflib/Tools/asm.py +++ b/waflib/Tools/asm.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2008-2010 (ita) +# Thomas Nagy, 2008-2018 (ita) """ Assembly support, used by tools such as gas and nasm @@ -34,23 +34,54 @@ def build(bld): target = 'asmtest') """ -import os, sys -from waflib import Task, Utils -import waflib.Task +import re +from waflib import Errors, Logs, Task from waflib.Tools.ccroot import link_task, stlink_task -from waflib.TaskGen import extension, feature +from waflib.TaskGen import extension +from waflib.Tools import c_preproc + +re_lines = re.compile( + '^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) + +class asm_parser(c_preproc.c_parser): + def filter_comments(self, node): + code = node.read() + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + return re_lines.findall(code) class asm(Task.Task): """ - Compile asm files by gas/nasm/yasm/... + Compiles asm files by gas/nasm/yasm/... """ color = 'BLUE' - run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' + run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${ASMDEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' + + def scan(self): + if self.env.ASM_NAME == 'gas': + return c_preproc.scan(self) + elif self.env.ASM_NAME == 'nasm': + Logs.warn('The Nasm dependency scanner is incomplete!') + + try: + incn = self.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing the "asm" feature' % self.generator) + + if c_preproc.go_absolute: + nodepaths = incn + else: + nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)] + + tmp = asm_parser(nodepaths) + tmp.start(self.inputs[0], self.env) + return (tmp.nodes, tmp.names) @extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP') def asm_hook(self, node): """ - Bind the asm extension to the asm task + Binds the asm extension to the asm task :param node: input file :type node: :py:class:`waflib.Node.Node` @@ -58,18 +89,19 @@ def asm_hook(self, node): return self.create_compiled_task('asm', node) class asmprogram(link_task): - "Link object files into a c program" + "Links object files into a c program" run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' ext_out = ['.bin'] inst_to = '${BINDIR}' class asmshlib(asmprogram): - "Link object files into a c shared library" + "Links object files into a c shared library" inst_to = '${LIBDIR}' class asmstlib(stlink_task): - "Link object files into a c static library" + "Links object files into a c static library" pass # do not remove def configure(conf): - conf.env['ASMPATH_ST'] = '-I%s' + conf.env.ASMPATH_ST = '-I%s' + conf.env.ASMDEFINES_ST = '-D%s' diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py index 9b90317489..eef56dcdd7 100644 --- a/waflib/Tools/bison.py +++ b/waflib/Tools/bison.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # John O'Meara, 2006 -# Thomas Nagy 2009-2010 (ita) +# Thomas Nagy 2009-2018 (ita) """ The **bison** program is a code generator which creates C or C++ files. @@ -12,7 +12,7 @@ from waflib.TaskGen import extension class bison(Task.Task): - """Compile bison files""" + """Compiles bison files""" color = 'BLUE' run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' ext_out = ['.h'] # just to make sure @@ -20,9 +20,9 @@ class bison(Task.Task): @extension('.y', '.yc', '.yy') def big_bison(self, node): """ - Create a bison task, which must be executed from the directory of the output file. + Creates a bison task, which must be executed from the directory of the output file. """ - has_h = '-d' in self.env['BISONFLAGS'] + has_h = '-d' in self.env.BISONFLAGS outs = [] if node.name.endswith('.yc'): @@ -35,14 +35,14 @@ def big_bison(self, node): outs.append(node.change_ext('.tab.h')) tsk = self.create_task('bison', node, outs) - tsk.cwd = node.parent.get_bld().abspath() + tsk.cwd = node.parent.get_bld() # and the c/cxx file must be compiled too self.source.append(outs[0]) def configure(conf): """ - Detect the *bison* program + Detects the *bison* program """ conf.find_program('bison', var='BISON') conf.env.BISONFLAGS = ['-d'] diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py index b97eee15d9..be7c746d71 100644 --- a/waflib/Tools/c.py +++ b/waflib/Tools/c.py @@ -1,39 +1,39 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) "Base for c programs/libraries" -from waflib import TaskGen, Task, Utils +from waflib import TaskGen, Task from waflib.Tools import c_preproc from waflib.Tools.ccroot import link_task, stlink_task @TaskGen.extension('.c') def c_hook(self, node): - "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance" + "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances" if not self.env.CC and self.env.CXX: return self.create_compiled_task('cxx', node) return self.create_compiled_task('c', node) class c(Task.Task): - "Compile C files into object files" - run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()}' + "Compiles C files into object files" + run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].relpath()} ${CPPFLAGS}' vars = ['CCDEPS'] # unused variable to depend on, just in case ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] scan = c_preproc.scan class cprogram(link_task): - "Link object files into a c program" - run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + "Links object files into c programs" + run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].relpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' ext_out = ['.bin'] vars = ['LINKDEPS'] inst_to = '${BINDIR}' class cshlib(cprogram): - "Link object files into a c shared library" + "Links object files into c shared libraries" inst_to = '${LIBDIR}' class cstlib(stlink_task): - "Link object files into a c static library" + "Links object files into a c static libraries" pass # do not remove diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py index 65677801c8..928cfe29ca 100644 --- a/waflib/Tools/c_aliases.py +++ b/waflib/Tools/c_aliases.py @@ -1,15 +1,16 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2015 (ita) "base for all c/c++ programs and libraries" -import os, sys, re -from waflib import Utils, Build +from waflib import Utils, Errors from waflib.Configure import conf def get_extensions(lst): """ + Returns the file extensions for the list of files given as input + :param lst: files to process :list lst: list of string or :py:class:`waflib.Node.Node` :return: list of file extensions @@ -17,17 +18,15 @@ def get_extensions(lst): """ ret = [] for x in Utils.to_list(lst): - try: - if not isinstance(x, str): - x = x.name - ret.append(x[x.rfind('.') + 1:]) - except Exception: - pass + if not isinstance(x, str): + x = x.name + ret.append(x[x.rfind('.') + 1:]) return ret def sniff_features(**kw): """ - Look at the source files and return the features for a task generator (mainly cc and cxx):: + Computes and returns the features required for a task generator by + looking at the file extensions. This aimed for C/C++ mainly:: snif_features(source=['foo.c', 'foo.cxx'], type='shlib') # returns ['cxx', 'c', 'cxxshlib', 'cshlib'] @@ -39,35 +38,54 @@ def sniff_features(**kw): :return: the list of features for a task generator processing the source files :rtype: list of string """ - exts = get_extensions(kw['source']) - type = kw['_type'] + exts = get_extensions(kw.get('source', [])) + typ = kw['typ'] feats = [] # watch the order, cxx will have the precedence - if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts: - feats.append('cxx') - - if 'c' in exts or 'vala' in exts: + for x in 'cxx cpp c++ cc C'.split(): + if x in exts: + feats.append('cxx') + break + if 'c' in exts or 'vala' in exts or 'gs' in exts: feats.append('c') + if 's' in exts or 'S' in exts: + feats.append('asm') + + for x in 'f f90 F F90 for FOR'.split(): + if x in exts: + feats.append('fc') + break + if 'd' in exts: feats.append('d') if 'java' in exts: feats.append('java') - - if 'java' in exts: return 'java' - if type in ('program', 'shlib', 'stlib'): + if typ in ('program', 'shlib', 'stlib'): + will_link = False for x in feats: - if x in ('cxx', 'd', 'c'): - feats.append(x + type) - + if x in ('cxx', 'd', 'fc', 'c', 'asm'): + feats.append(x + typ) + will_link = True + if not will_link and not kw.get('features', []): + raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?' % kw) return feats -def set_features(kw, _type): - kw['_type'] = _type +def set_features(kw, typ): + """ + Inserts data in the input dict *kw* based on existing data and on the type of target + required (typ). + + :param kw: task generator parameters + :type kw: dict + :param typ: type of target + :type typ: string + """ + kw['typ'] = typ kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw)) @conf diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py old mode 100755 new mode 100644 index f989a4ade5..4d98da512c --- a/waflib/Tools/c_config.py +++ b/waflib/Tools/c_config.py @@ -1,13 +1,15 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ C/C++/D configuration helpers """ -import os, re, shlex, sys -from waflib import Build, Utils, Task, Options, Logs, Errors, ConfigSet, Runner +from __future__ import with_statement + +import os, re, shlex +from waflib import Build, Utils, Task, Options, Logs, Errors, Runner from waflib.TaskGen import after_method, feature from waflib.Configure import conf @@ -17,32 +19,6 @@ DEFKEYS = 'define_key' INCKEYS = 'include_key' -cfg_ver = { - 'atleast-version': '>=', - 'exact-version': '==', - 'max-version': '<=', -} - -SNIP_FUNCTION = ''' -int main(int argc, char **argv) { - void *p; - (void)argc; (void)argv; - p=(void*)(%s); - return (int)p; -} -''' -"""Code template for checking for functions""" - -SNIP_TYPE = ''' -int main(int argc, char **argv) { - (void)argc; (void)argv; - if ((%(type_name)s *) 0) return 0; - if (sizeof (%(type_name)s)) return 0; - return 1; -} -''' -"""Code template for checking for types""" - SNIP_EMPTY_PROGRAM = ''' int main(int argc, char **argv) { (void)argc; (void)argv; @@ -50,15 +26,6 @@ } ''' -SNIP_FIELD = ''' -int main(int argc, char **argv) { - char *off; - (void)argc; (void)argv; - off = (char*) &((%(type_name)s*)0)->%(field_name)s; - return (size_t) off < sizeof(%(type_name)s); -} -''' - MACRO_TO_DESTOS = { '__linux__' : 'linux', '__GNU__' : 'gnu', # hurd @@ -75,7 +42,7 @@ '_WIN64' : 'win32', '_WIN32' : 'win32', # Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file. -'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin', +'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin', '__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone '__QNX__' : 'qnx', '__native_client__' : 'nacl' # google native client platform @@ -100,12 +67,15 @@ '__s390x__' : 's390x', '__s390__' : 's390', '__sh__' : 'sh', +'__xtensa__' : 'xtensa', +'__e2k__' : 'e2k', +'__riscv' : 'riscv', } @conf def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None): """ - Parse the flags from the input lines, and add them to the relevant use variables:: + Parses flags from the input lines, and adds them to the relevant use variables:: def configure(conf): conf.parse_flags('-O3', 'FOO') @@ -118,6 +88,10 @@ def configure(conf): :type uselib_store: string :param env: config set or conf.env by default :type env: :py:class:`waflib.ConfigSet.ConfigSet` + :param force_static: force usage of static libraries + :type force_static: bool default False + :param posix: usage of POSIX mode for shlex lexical analiysis library + :type posix: bool default True """ assert(isinstance(line, str)) @@ -135,11 +109,15 @@ def configure(conf): lex.commenters = '' lst = list(lex) + so_re = re.compile(r"\.so(?:\.[0-9]+)*$") + # append_unique is not always possible # for example, apple flags may require both -arch i386 and -arch ppc - app = env.append_value - appu = env.append_unique uselib = uselib_store + def app(var, val): + env.append_value('%s_%s' % (var, uselib), val) + def appu(var, val): + env.append_unique('%s_%s' % (var, uselib), val) static = False while lst: x = lst.pop(0) @@ -147,67 +125,79 @@ def configure(conf): ot = x[2:] if st == '-I' or st == '/I': - if not ot: ot = lst.pop(0) - appu('INCLUDES_' + uselib, [ot]) + if not ot: + ot = lst.pop(0) + appu('INCLUDES', ot) elif st == '-i': tmp = [x, lst.pop(0)] app('CFLAGS', tmp) app('CXXFLAGS', tmp) elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but.. - if not ot: ot = lst.pop(0) - app('DEFINES_' + uselib, [ot]) + if not ot: + ot = lst.pop(0) + app('DEFINES', ot) elif st == '-l': - if not ot: ot = lst.pop(0) - prefix = (force_static or static) and 'STLIB_' or 'LIB_' - appu(prefix + uselib, [ot]) + if not ot: + ot = lst.pop(0) + prefix = 'STLIB' if (force_static or static) else 'LIB' + app(prefix, ot) elif st == '-L': - if not ot: ot = lst.pop(0) - prefix = (force_static or static) and 'STLIBPATH_' or 'LIBPATH_' - appu(prefix + uselib, [ot]) + if not ot: + ot = lst.pop(0) + prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' + appu(prefix, ot) elif x.startswith('/LIBPATH:'): - prefix = (force_static or static) and 'STLIBPATH_' or 'LIBPATH_' - appu(prefix + uselib, [x.replace('/LIBPATH:', '')]) - elif x == '-pthread' or x.startswith('+') or x.startswith('-std'): - app('CFLAGS_' + uselib, [x]) - app('CXXFLAGS_' + uselib, [x]) - app('LINKFLAGS_' + uselib, [x]) + prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' + appu(prefix, x.replace('/LIBPATH:', '')) + elif x.startswith('-std='): + prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS' + app(prefix, x) + elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie', '-flto', '-fno-lto'): + app('CFLAGS', x) + app('CXXFLAGS', x) + app('LINKFLAGS', x) elif x == '-framework': - appu('FRAMEWORK_' + uselib, [lst.pop(0)]) + appu('FRAMEWORK', lst.pop(0)) elif x.startswith('-F'): - appu('FRAMEWORKPATH_' + uselib, [x[2:]]) - elif x == '-Wl,-rpath': - app('RPATH_' + uselib, lst.pop(0)) + appu('FRAMEWORKPATH', x[2:]) + elif x == '-Wl,-rpath' or x == '-Wl,-R': + app('RPATH', lst.pop(0).lstrip('-Wl,')) + elif x.startswith('-Wl,-R,'): + app('RPATH', x[7:]) elif x.startswith('-Wl,-R'): - app('RPATH_' + uselib, x[6:]) + app('RPATH', x[6:]) elif x.startswith('-Wl,-rpath,'): - app('RPATH_' + uselib, x[11:]) + app('RPATH', x[11:]) elif x == '-Wl,-Bstatic' or x == '-Bstatic': static = True elif x == '-Wl,-Bdynamic' or x == '-Bdynamic': static = False - elif x.startswith('-Wl'): - app('LINKFLAGS_' + uselib, [x]) - elif x.startswith('-m') or x.startswith('-f') or x.startswith('-dynamic'): - app('CFLAGS_' + uselib, [x]) - app('CXXFLAGS_' + uselib, [x]) + elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'): + app('LINKFLAGS', x) + elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')): + # Adding the -W option breaks python builds on Openindiana + app('CFLAGS', x) + app('CXXFLAGS', x) elif x.startswith('-bundle'): - app('LINKFLAGS_' + uselib, [x]) - elif x.startswith('-undefined') or x.startswith('-Xlinker'): + app('LINKFLAGS', x) + elif x.startswith(('-undefined', '-Xlinker')): arg = lst.pop(0) - app('LINKFLAGS_' + uselib, [x, arg]) - elif x.startswith('-arch') or x.startswith('-isysroot'): + app('LINKFLAGS', [x, arg]) + elif x.startswith(('-arch', '-isysroot')): tmp = [x, lst.pop(0)] - app('CFLAGS_' + uselib, tmp) - app('CXXFLAGS_' + uselib, tmp) - app('LINKFLAGS_' + uselib, tmp) - elif x.endswith('.a') or x.endswith('.so') or x.endswith('.dylib') or x.endswith('.lib'): - appu('LINKFLAGS_' + uselib, [x]) # not cool, #762 + app('CFLAGS', tmp) + app('CXXFLAGS', tmp) + app('LINKFLAGS', tmp) + elif x.endswith(('.a', '.dylib', '.lib')) or so_re.search(x): + appu('LINKFLAGS', x) # not cool, #762 + else: + self.to_log('Unhandled flag %r' % x) @conf def validate_cfg(self, kw): """ - Search for the program *pkg-config* if missing, and validate the parameters to pass to - :py:func:`waflib.Tools.c_config.exec_cfg`. + Searches for the program *pkg-config* if missing, and validates the + parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`. :param path: the **-config program to use** (default is *pkg-config*) :type path: list of string @@ -223,59 +213,56 @@ def validate_cfg(self, kw): self.find_program('pkg-config', var='PKGCONFIG') kw['path'] = self.env.PKGCONFIG - # pkg-config version - if 'atleast_pkgconfig_version' in kw: - if not 'msg' in kw: + # verify that exactly one action is requested + s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw) + if s != 1: + raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set') + if not 'msg' in kw: + if 'atleast_pkgconfig_version' in kw: kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version'] - return + elif 'modversion' in kw: + kw['msg'] = 'Checking for %r version' % kw['modversion'] + else: + kw['msg'] = 'Checking for %r' %(kw['package']) - if not 'okmsg' in kw: + # let the modversion check set the okmsg to the detected version + if not 'okmsg' in kw and not 'modversion' in kw: kw['okmsg'] = 'yes' if not 'errmsg' in kw: kw['errmsg'] = 'not found' - if 'modversion' in kw: - if not 'msg' in kw: - kw['msg'] = 'Checking for %r version' % kw['modversion'] - return - - # checking for the version of a module, for the moment, one thing at a time - for x in cfg_ver.keys(): - y = x.replace('-', '_') - if y in kw: - if not 'package' in kw: - raise ValueError('%s requires a package' % x) - - if not 'msg' in kw: - kw['msg'] = 'Checking for %r %s %s' % (kw['package'], cfg_ver[x], kw[y]) - return - - if not 'define_name' in kw: - pkgname = kw.get('uselib_store', kw['package'].upper()) - kw['define_name'] = self.have_define(pkgname) - - if not 'uselib_store' in kw: - self.undefine(kw['define_name']) - - if not 'msg' in kw: - kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path']) + # pkg-config version + if 'atleast_pkgconfig_version' in kw: + pass + elif 'modversion' in kw: + if not 'uselib_store' in kw: + kw['uselib_store'] = kw['modversion'] + if not 'define_name' in kw: + kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store']) + else: + if not 'uselib_store' in kw: + kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper() + if not 'define_name' in kw: + kw['define_name'] = self.have_define(kw['uselib_store']) @conf def exec_cfg(self, kw): """ - Execute the program *pkg-config*: + Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags: * if atleast_pkgconfig_version is given, check that pkg-config has the version n and return * if modversion is given, then return the module version * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable + :param path: the **-config program to use** + :type path: list of string :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests) :type atleast_pkgconfig_version: string :param package: package name, for example *gtk+-2.0* :type package: string - :param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables. + :param uselib_store: if the test is successful, define HAVE\\_*name*. It is also used to define *conf.env.FLAGS_name* variables. :type uselib_store: string - :param modversion: if provided, return the version of the given module and define *name*\_VERSION + :param modversion: if provided, return the version of the given module and define *name*\\_VERSION :type modversion: string :param args: arguments to give to *package* when retrieving flags :type args: list of string @@ -283,46 +270,49 @@ def exec_cfg(self, kw): :type variables: list of string :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES) :type define_variable: dict(string: string) + :param pkg_config_path: paths where pkg-config should search for .pc config files (overrides env.PKG_CONFIG_PATH if exists) + :type pkg_config_path: string, list of directories separated by colon + :param force_static: force usage of static libraries + :type force_static: bool default False + :param posix: usage of POSIX mode for shlex lexical analiysis library + :type posix: bool default True """ path = Utils.to_list(kw['path']) + env = self.env.env or None + if kw.get('pkg_config_path'): + if not env: + env = dict(self.environ) + env['PKG_CONFIG_PATH'] = kw['pkg_config_path'] def define_it(): - pkgname = kw.get('uselib_store', kw['package'].upper()) - if kw.get('global_define'): - # compatibility - self.define(self.have_define(kw['package']), 1, False) + define_name = kw['define_name'] + # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X + if kw.get('global_define', 1): + self.define(define_name, 1, False) else: - self.env.append_unique('DEFINES_%s' % pkgname, "%s=1" % self.have_define(pkgname)) - self.env[self.have_define(pkgname)] = 1 + self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name) + + if kw.get('add_have_to_env', 1): + self.env[define_name] = 1 # pkg-config version if 'atleast_pkgconfig_version' in kw: cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']] - self.cmd_and_log(cmd) - if not 'okmsg' in kw: - kw['okmsg'] = 'yes' + self.cmd_and_log(cmd, env=env) return - # checking for the version of a module - for x in cfg_ver: - y = x.replace('-', '_') - if y in kw: - self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']]) - if not 'okmsg' in kw: - kw['okmsg'] = 'yes' - define_it() - break - - # retrieving the version of a module + # single version for a module if 'modversion' in kw: - version = self.cmd_and_log(path + ['--modversion', kw['modversion']]).strip() - self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version) + version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip() + if not 'okmsg' in kw: + kw['okmsg'] = version + self.define(kw['define_name'], version) return version lst = [] + path - defi = kw.get('define_variable', None) + defi = kw.get('define_variable') if not defi: defi = self.env.PKG_CONFIG_DEFINES or {} for key, val in defi.items(): @@ -340,39 +330,33 @@ def define_it(): # retrieving variables of a module if 'variables' in kw: - env = kw.get('env', self.env) - uselib = kw.get('uselib_store', kw['package'].upper()) + v_env = kw.get('env', self.env) vars = Utils.to_list(kw['variables']) for v in vars: - val = self.cmd_and_log(lst + ['--variable=' + v]).strip() - var = '%s_%s' % (uselib, v) - env[var] = val - if not 'okmsg' in kw: - kw['okmsg'] = 'yes' + val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip() + var = '%s_%s' % (kw['uselib_store'], v) + v_env[var] = val return # so we assume the command-line will output flags to be parsed afterwards - ret = self.cmd_and_log(lst) - if not 'okmsg' in kw: - kw['okmsg'] = 'yes' + ret = self.cmd_and_log(lst, env=env) define_it() - self.parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env), force_static=static, posix=kw.get('posix', None)) + self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix')) return ret @conf def check_cfg(self, *k, **kw): """ - Check for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc). - Encapsulate the calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg` + Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc). + This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg` + so check exec_cfg parameters descriptions for more details on kw passed A few examples:: def configure(conf): conf.load('compiler_c') conf.check_cfg(package='glib-2.0', args='--libs --cflags') - conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0', - args='--cflags --libs') conf.check_cfg(package='pango') conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs']) conf.check_cfg(package='pango', @@ -381,26 +365,22 @@ def configure(conf): conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI', mandatory=False) - + # variables + conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO') + print(conf.env.FOO_includedir) """ - if k: - lst = k[0].split() - kw['package'] = lst[0] - kw['args'] = ' '.join(lst[1:]) - self.validate_cfg(kw) if 'msg' in kw: self.start_msg(kw['msg'], **kw) ret = None try: ret = self.exec_cfg(kw) - except self.errors.WafError: + except self.errors.WafError as e: if 'errmsg' in kw: self.end_msg(kw['errmsg'], 'YELLOW', **kw) if Logs.verbose > 1: - raise - else: - self.fatal('The configuration failed') + self.to_log('Command failure: %s' % e) + self.fatal('The configuration failed') else: if not ret: ret = True @@ -411,6 +391,9 @@ def configure(conf): return ret def build_fun(bld): + """ + Build function that is used for running configuration tests with ``conf.check()`` + """ if bld.kw['compile_filename']: node = bld.srcnode.make_node(bld.kw['compile_filename']) node.write(bld.kw['code']) @@ -420,13 +403,13 @@ def build_fun(bld): for k, v in bld.kw.items(): setattr(o, k, v) - if not bld.kw.get('quiet', None): + if not bld.kw.get('quiet'): bld.conf.to_log("==>\n%s\n<==" % bld.kw['code']) @conf def validate_c(self, kw): """ - pre-check the parameters that will be given to :py:func:`waflib.Configure.run_build` + Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build` :param compiler: c or cxx (tries to guess what is best) :type compiler: string @@ -451,6 +434,9 @@ def validate_c(self, kw): :param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers :type auto_add_header_name: bool """ + for x in ('type_name', 'field_name', 'function_name'): + if x in kw: + Logs.warn('Invalid argument %r in test' % x) if not 'build_fun' in kw: kw['build_fun'] = build_fun @@ -461,95 +447,63 @@ def validate_c(self, kw): if not 'compiler' in kw and not 'features' in kw: kw['compiler'] = 'c' - if env['CXX_NAME'] and Task.classes.get('cxx', None): + if env.CXX_NAME and Task.classes.get('cxx'): kw['compiler'] = 'cxx' - if not self.env['CXX']: + if not self.env.CXX: self.fatal('a c++ compiler is required') else: - if not self.env['CC']: + if not self.env.CC: self.fatal('a c compiler is required') if not 'compile_mode' in kw: kw['compile_mode'] = 'c' - if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx': + if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx': kw['compile_mode'] = 'cxx' if not 'type' in kw: kw['type'] = 'cprogram' if not 'features' in kw: - kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram c" + if not 'header_name' in kw or kw.get('link_header_test', True): + kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram" + else: + kw['features'] = [kw['compile_mode']] else: kw['features'] = Utils.to_list(kw['features']) if not 'compile_filename' in kw: kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '') - def to_header(dct): if 'header_name' in dct: dct = Utils.to_list(dct['header_name']) return ''.join(['#include <%s>\n' % x for x in dct]) return '' - #OSX if 'framework_name' in kw: + # OSX, not sure this is used anywhere fwkname = kw['framework_name'] if not 'uselib_store' in kw: kw['uselib_store'] = fwkname.upper() - - if not kw.get('no_header', False): - if not 'header_name' in kw: - kw['header_name'] = [] + if not kw.get('no_header'): fwk = '%s/%s.h' % (fwkname, fwkname) - if kw.get('remove_dot_h', None): + if kw.get('remove_dot_h'): fwk = fwk[:-2] - kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk] - + val = kw.get('header_name', []) + kw['header_name'] = Utils.to_list(val) + [fwk] kw['msg'] = 'Checking for framework %s' % fwkname kw['framework'] = fwkname - #kw['frameworkpath'] = set it yourself - - if 'function_name' in kw: - fu = kw['function_name'] - if not 'msg' in kw: - kw['msg'] = 'Checking for function %s' % fu - kw['code'] = to_header(kw) + SNIP_FUNCTION % fu - if not 'uselib_store' in kw: - kw['uselib_store'] = fu.upper() - if not 'define_name' in kw: - kw['define_name'] = self.have_define(fu) - - elif 'type_name' in kw: - tu = kw['type_name'] - if not 'header_name' in kw: - kw['header_name'] = 'stdint.h' - if 'field_name' in kw: - field = kw['field_name'] - kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field} - if not 'msg' in kw: - kw['msg'] = 'Checking for field %s in %s' % (field, tu) - if not 'define_name' in kw: - kw['define_name'] = self.have_define((tu + '_' + field).upper()) - else: - kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu} - if not 'msg' in kw: - kw['msg'] = 'Checking for type %s' % tu - if not 'define_name' in kw: - kw['define_name'] = self.have_define(tu.upper()) elif 'header_name' in kw: if not 'msg' in kw: kw['msg'] = 'Checking for header %s' % kw['header_name'] l = Utils.to_list(kw['header_name']) - assert len(l)>0, 'list of headers in header_name is empty' + assert len(l), 'list of headers in header_name is empty' kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM - if not 'uselib_store' in kw: kw['uselib_store'] = l[0].upper() - if not 'define_name' in kw: kw['define_name'] = self.have_define(l[0]) @@ -585,6 +539,7 @@ def to_header(dct): kw['execute'] = False if kw['execute']: kw['features'].append('test_exec') + kw['chmod'] = Utils.O755 if not 'errmsg' in kw: kw['errmsg'] = 'not found' @@ -599,7 +554,13 @@ def to_header(dct): if self.env[INCKEYS]: kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code'] - if not kw.get('success'): kw['success'] = None + # in case defines lead to very long command-lines + if kw.get('merge_config_header') or env.merge_config_header: + kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code']) + env.DEFINES = [] # modify the copy + + if not kw.get('success'): + kw['success'] = None if 'define_name' in kw: self.undefine(kw['define_name']) @@ -608,57 +569,84 @@ def to_header(dct): @conf def post_check(self, *k, **kw): - "Set the variables after a test executed in :py:func:`waflib.Tools.c_config.check` was run successfully" - + """ + Sets the variables after a test executed in + :py:func:`waflib.Tools.c_config.check` was run successfully + """ is_success = 0 if kw['execute']: if kw['success'] is not None: - if kw.get('define_ret', False): + if kw.get('define_ret'): is_success = kw['success'] else: is_success = (kw['success'] == 0) else: is_success = (kw['success'] == 0) - if 'define_name' in kw: - # TODO simplify? - if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw: - if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str): - self.define(kw['define_name'], is_success, quote=kw.get('quote', 1)) + if kw.get('define_name'): + comment = kw.get('comment', '') + define_name = kw['define_name'] + if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str): + if kw.get('global_define', 1): + self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment) else: - self.define_cond(kw['define_name'], is_success) + if kw.get('quote', 1): + succ = '"%s"' % is_success + else: + succ = int(is_success) + val = '%s=%s' % (define_name, succ) + var = 'DEFINES_%s' % kw['uselib_store'] + self.env.append_value(var, val) else: - self.define_cond(kw['define_name'], is_success) + if kw.get('global_define', 1): + self.define_cond(define_name, is_success, comment=comment) + else: + var = 'DEFINES_%s' % kw['uselib_store'] + self.env.append_value(var, '%s=%s' % (define_name, int(is_success))) + + # define conf.env.HAVE_X to 1 + if kw.get('add_have_to_env', 1): + if kw.get('uselib_store'): + self.env[self.have_define(kw['uselib_store'])] = 1 + elif kw['execute'] and kw.get('define_ret'): + self.env[define_name] = is_success + else: + self.env[define_name] = int(is_success) if 'header_name' in kw: - if kw.get('auto_add_header_name', False): + if kw.get('auto_add_header_name'): self.env.append_value(INCKEYS, Utils.to_list(kw['header_name'])) if is_success and 'uselib_store' in kw: from waflib.Tools import ccroot - - # TODO see get_uselib_vars from ccroot.py - _vars = set([]) + # See get_uselib_vars in ccroot.py + _vars = set() for x in kw['features']: if x in ccroot.USELIB_VARS: _vars |= ccroot.USELIB_VARS[x] for k in _vars: - lk = k.lower() - if lk in kw: - val = kw[lk] - # remove trailing slash - if isinstance(val, str): - val = val.rstrip(os.path.sep) - self.env.append_unique(k + '_' + kw['uselib_store'], Utils.to_list(val)) + x = k.lower() + if x in kw: + self.env.append_value(k + '_' + kw['uselib_store'], kw[x]) return is_success @conf def check(self, *k, **kw): """ - Perform a configuration test by calling :py:func:`waflib.Configure.run_build`. + Performs a configuration test by calling :py:func:`waflib.Configure.run_build`. For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. - To force a specific compiler, pass "compiler='c'" or "compiler='cxx'" in the arguments + To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments + + Besides build targets, complete builds can be given through a build function. All files will + be written to a temporary directory:: + + def build(bld): + lib_node = bld.srcnode.make_node('libdir/liblc1.c') + lib_node.parent.mkdir() + lib_node.write('#include \\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w') + bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') + conf.check(build_fun=build, msg=msg) """ self.validate_c(kw) self.start_msg(kw['msg'], **kw) @@ -684,24 +672,25 @@ def check(self, *k, **kw): class test_exec(Task.Task): """ - A task for executing a programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`. + A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`. """ color = 'PINK' def run(self): + cmd = [self.inputs[0].abspath()] + getattr(self.generator, 'test_args', []) if getattr(self.generator, 'rpath', None): if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd) else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()]) + self.generator.bld.retval = self.generator.bld.exec_command(cmd) else: env = self.env.env or {} env.update(dict(os.environ)) for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'): env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '') if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env) + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd, env=env) else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env) + self.generator.bld.retval = self.generator.bld.exec_command(cmd, env=env) @feature('test_exec') @after_method('apply_link') @@ -719,20 +708,51 @@ def configure(conf): @conf def check_cxx(self, *k, **kw): - # DO NOT USE + """ + Runs a test with a task generator of the form:: + + conf.check(features='cxx cxxprogram', ...) + """ kw['compiler'] = 'cxx' return self.check(*k, **kw) @conf def check_cc(self, *k, **kw): - # DO NOT USE + """ + Runs a test with a task generator of the form:: + + conf.check(features='c cprogram', ...) + """ kw['compiler'] = 'c' return self.check(*k, **kw) @conf -def define(self, key, val, quote=True): +def set_define_comment(self, key, comment): + """ + Sets a comment that will appear in the configuration header + + :type key: string + :type comment: string + """ + coms = self.env.DEFINE_COMMENTS + if not coms: + coms = self.env.DEFINE_COMMENTS = {} + coms[key] = comment or '' + +@conf +def get_define_comment(self, key): + """ + Returns the comment associated to a define + + :type key: string + """ + coms = self.env.DEFINE_COMMENTS or {} + return coms.get(key, '') + +@conf +def define(self, key, val, quote=True, comment=''): """ - Store a single define and its state into conf.env.DEFINES. If the value is True, False or None it is cast to 1 or 0. + Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1). :param key: define name :type key: string @@ -741,8 +761,9 @@ def define(self, key, val, quote=True): :param quote: enclose strings in quotes (yes by default) :type quote: bool """ - assert key and isinstance(key, str) - + assert isinstance(key, str) + if not key: + return if val is True: val = 1 elif val in (False, None): @@ -755,7 +776,7 @@ def define(self, key, val, quote=True): app = s % (key, str(val)) ban = key + '=' - lst = self.env['DEFINES'] + lst = self.env.DEFINES for x in lst: if x.startswith(ban): lst[lst.index(x)] = app @@ -764,26 +785,29 @@ def define(self, key, val, quote=True): self.env.append_value('DEFINES', app) self.env.append_unique(DEFKEYS, key) + self.set_define_comment(key, comment) @conf -def undefine(self, key): +def undefine(self, key, comment=''): """ - Remove a define from conf.env.DEFINES + Removes a global define from ``conf.env.DEFINES`` :param key: define name :type key: string """ - assert key and isinstance(key, str) - + assert isinstance(key, str) + if not key: + return ban = key + '=' - lst = [x for x in self.env['DEFINES'] if not x.startswith(ban)] - self.env['DEFINES'] = lst + lst = [x for x in self.env.DEFINES if not x.startswith(ban)] + self.env.DEFINES = lst self.env.append_unique(DEFKEYS, key) + self.set_define_comment(key, comment) @conf -def define_cond(self, key, val): +def define_cond(self, key, val, comment=''): """ - Conditionally define a name:: + Conditionally defines a name:: def configure(conf): conf.define_cond('A', True) @@ -796,16 +820,19 @@ def configure(conf): :param val: value :type val: int or string """ - assert key and isinstance(key, str) - + assert isinstance(key, str) + if not key: + return if val: - self.define(key, 1) + self.define(key, 1, comment=comment) else: - self.undefine(key) + self.undefine(key, comment=comment) @conf def is_defined(self, key): """ + Indicates whether a particular define is globally set in ``conf.env.DEFINES``. + :param key: define name :type key: string :return: True if the define is set @@ -814,7 +841,7 @@ def is_defined(self, key): assert key and isinstance(key, str) ban = key + '=' - for x in self.env['DEFINES']: + for x in self.env.DEFINES: if x.startswith(ban): return True return False @@ -822,14 +849,16 @@ def is_defined(self, key): @conf def get_define(self, key): """ + Returns the value of an existing define, or None if not found + :param key: define name :type key: string - :return: the value of a previously stored define or None if it is not set + :rtype: string """ assert key and isinstance(key, str) ban = key + '=' - for x in self.env['DEFINES']: + for x in self.env.DEFINES: if x.startswith(ban): return x[len(ban):] return None @@ -837,6 +866,9 @@ def get_define(self, key): @conf def have_define(self, key): """ + Returns a variable suitable for command-line or header use by removing invalid characters + and prefixing it with ``HAVE_`` + :param key: define name :type key: string :return: the input key prefixed by *HAVE_* and substitute any invalid characters. @@ -847,13 +879,16 @@ def have_define(self, key): @conf def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''): """ - Write a configuration header containing defines and includes:: + Writes a configuration header containing defines and includes:: def configure(cnf): cnf.define('A', 1) cnf.write_config_header('config.h') - :param configfile: relative path to the file to create + This function only adds include guards (if necessary), consult + :py:func:`waflib.Tools.c_config.get_config_header` for details on the body. + + :param configfile: path to the file to create (relative or absolute) :type configfile: string :param guard: include guard name to add, by default it is computed from the file name :type guard: string @@ -868,7 +903,8 @@ def configure(cnf): :type define_prefix: string :param define_prefix: prefix all the defines in the file with a particular prefix """ - if not configfile: configfile = WAF_CONFIG_H + if not configfile: + configfile = WAF_CONFIG_H waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile) node = top and self.bldnode or self.path.get_bld() @@ -882,7 +918,7 @@ def configure(cnf): node.write('\n'.join(lst)) - # config files are not removed on "waf clean" + # config files must not be removed on "waf clean" self.env.append_unique(Build.CFG_FILES, [node.abspath()]) if remove: @@ -893,12 +929,19 @@ def configure(cnf): @conf def get_config_header(self, defines=True, headers=False, define_prefix=''): """ - Create the contents of a ``config.h`` file from the defines and includes + Creates the contents of a ``config.h`` file from the defines and includes set in conf.env.define_key / conf.env.include_key. No include guards are added. + A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This + can be used to insert complex macros or include guards:: + + def configure(conf): + conf.env.WAF_CONFIG_H_PRELUDE = '#include \\n' + conf.write_config_header('config.h') + :param defines: write the defines values :type defines: bool - :param headers: write the headers + :param headers: write include entries for each element in self.env.INCKEYS :type headers: bool :type define_prefix: string :param define_prefix: prefix all the defines with a particular prefix @@ -906,52 +949,59 @@ def get_config_header(self, defines=True, headers=False, define_prefix=''): :rtype: string """ lst = [] + + if self.env.WAF_CONFIG_H_PRELUDE: + lst.append(self.env.WAF_CONFIG_H_PRELUDE) + if headers: for x in self.env[INCKEYS]: lst.append('#include <%s>' % x) if defines: tbl = {} - for k in self.env['DEFINES']: + for k in self.env.DEFINES: a, _, b = k.partition('=') tbl[a] = b for k in self.env[DEFKEYS]: + caption = self.get_define_comment(k) + if caption: + caption = ' /* %s */' % caption try: - txt = '#define %s%s %s' % (define_prefix, k, tbl[k]) + txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption) except KeyError: - txt = '/* #undef %s%s */' % (define_prefix, k) + txt = '/* #undef %s%s */%s' % (define_prefix, k, caption) lst.append(txt) return "\n".join(lst) @conf def cc_add_flags(conf): """ - Read the CFLAGS/CPPFLAGS from os.environ and add to conf.env.CFLAGS + Adds CFLAGS / CPPFLAGS from os.environ to conf.env """ - conf.add_os_flags('CPPFLAGS', 'CFLAGS') - conf.add_os_flags('CFLAGS') + conf.add_os_flags('CPPFLAGS', dup=False) + conf.add_os_flags('CFLAGS', dup=False) @conf def cxx_add_flags(conf): """ - Read the CXXFLAGS/CPPFLAGS and add to conf.env.CXXFLAGS + Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env """ - conf.add_os_flags('CPPFLAGS', 'CXXFLAGS') - conf.add_os_flags('CXXFLAGS') + conf.add_os_flags('CPPFLAGS', dup=False) + conf.add_os_flags('CXXFLAGS', dup=False) @conf def link_add_flags(conf): """ - Read the LINKFLAGS/LDFLAGS and add to conf.env.LDFLAGS + Adds LINKFLAGS / LDFLAGS from os.environ to conf.env """ - conf.add_os_flags('LINKFLAGS') - conf.add_os_flags('LDFLAGS', 'LINKFLAGS') + conf.add_os_flags('LINKFLAGS', dup=False) + conf.add_os_flags('LDFLAGS', dup=False) @conf def cc_load_tools(conf): """ - Load the c tool + Loads the Waf c extensions """ if not conf.env.DEST_OS: conf.env.DEST_OS = Utils.unversioned_sys_platform() @@ -960,7 +1010,7 @@ def cc_load_tools(conf): @conf def cxx_load_tools(conf): """ - Load the cxx tool + Loads the Waf c++ extensions """ if not conf.env.DEST_OS: conf.env.DEST_OS = Utils.unversioned_sys_platform() @@ -969,35 +1019,32 @@ def cxx_load_tools(conf): @conf def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): """ - Run the preprocessor to determine the compiler version + Runs the preprocessor to determine the gcc/icc/clang version The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* + + :raise: :py:class:`waflib.Errors.ConfigurationError` """ cmd = cc + ['-dM', '-E', '-'] env = conf.env.env or None try: - p = Utils.subprocess.Popen(cmd, stdin=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) - p.stdin.write('\n'.encode()) - out = p.communicate()[0] - except Exception: + out, err = conf.cmd_and_log(cmd, output=0, stdin=open('/dev/null','r'), env=env) + except Errors.WafError: conf.fatal('Could not determine the compiler version %r' % cmd) - if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') - if gcc: if out.find('__INTEL_COMPILER') >= 0: conf.fatal('The intel compiler pretends to be gcc') if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: conf.fatal('Could not determine the compiler type') - if icc and out.find('__INTEL_COMPILER') < 0: - conf.fatal('Not icc/icpc') + if icc and out.find('__INTEL_COMPILER') < 0 and out.find('__INTEL_CLANG_COMPILER') < 0: + conf.fatal('Not icc/icx/icpc/icpx') if clang and out.find('__clang__') < 0: conf.fatal('Not clang/clang++') - if not clang and out.find('__clang__') >= 0: - conf.fatal('Could not find g++, if renamed try eg: CXX=g++48 waf configure') + if not clang and not icc and out.find('__clang__') >= 0: + conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') k = {} if icc or gcc or clang: @@ -1012,9 +1059,6 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): def isD(var): return var in k - def isT(var): - return var in k and k[var] != '0' - # Some documentation is available at http://predef.sourceforge.net # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. if not conf.env.DEST_OS: @@ -1033,6 +1077,8 @@ def isT(var): conf.env.DEST_BINFMT = 'elf' elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): conf.env.DEST_BINFMT = 'pe' + if not conf.env.IMPLIBDIR: + conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files conf.env.LIBDIR = conf.env.BINDIR elif isD('__APPLE__'): conf.env.DEST_BINFMT = 'mac-o' @@ -1048,26 +1094,29 @@ def isT(var): Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')])) if icc: - ver = k['__INTEL_COMPILER'] - conf.env['CC_VERSION'] = (ver[:-2], ver[-2], ver[-1]) + if isD('__INTEL_CLANG_COMPILER'): + # 20230100 + ver = k['__INTEL_CLANG_COMPILER'] + conf.env.CC_VERSION = (ver[:4], ver[4:6], ver[-2:]) + conf.env.INTEL_CLANG_COMPILER = 1 + else: + ver = k['__INTEL_COMPILER'] + conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) else: - if isD('__clang__'): - try: - conf.env['CC_VERSION'] = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) - except KeyError: - # Some versions of OSX have a faux-gcc "clang" without clang version defines - conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) + if isD('__clang__') and isD('__clang_major__'): + conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) else: - try: - conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) - except KeyError: - conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], 0) + # older clang versions and gcc + conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) return k @conf def get_xlc_version(conf, cc): - """Get the compiler version""" + """ + Returns the Aix compiler version + :raise: :py:class:`waflib.Errors.ConfigurationError` + """ cmd = cc + ['-qversion'] try: out, err = conf.cmd_and_log(cmd, output=0) @@ -1080,15 +1129,18 @@ def get_xlc_version(conf, cc): match = version_re(out or err) if match: k = match.groupdict() - conf.env['CC_VERSION'] = (k['major'], k['minor']) + conf.env.CC_VERSION = (k['major'], k['minor']) break else: conf.fatal('Could not determine the XLC version.') @conf def get_suncc_version(conf, cc): - """Get the compiler version""" + """ + Returns the Sun compiler version + :raise: :py:class:`waflib.Errors.ConfigurationError` + """ cmd = cc + ['-V'] try: out, err = conf.cmd_and_log(cmd, output=0) @@ -1102,11 +1154,14 @@ def get_suncc_version(conf, cc): version = (out or err) version = version.splitlines()[0] - version_re = re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P\d*)\.(?P\d*)', re.I).search + # cc: Sun C 5.10 SunOS_i386 2009/06/03 + # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17 + # cc: WorkShop Compilers 5.0 98/12/15 C 5.0 + version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P\d*)\.(?P\d*)', re.I).search match = version_re(version) if match: k = match.groupdict() - conf.env['CC_VERSION'] = (k['major'], k['minor']) + conf.env.CC_VERSION = (k['major'], k['minor']) else: conf.fatal('Could not determine the suncc version.') @@ -1115,7 +1170,7 @@ def get_suncc_version(conf, cc): @conf def add_as_needed(self): """ - Add ``--as-needed`` to the *LINKFLAGS* + Adds ``--as-needed`` to the *LINKFLAGS* On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag. """ if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME): @@ -1123,22 +1178,31 @@ def add_as_needed(self): # ============ parallel configuration -class cfgtask(Task.TaskBase): +class cfgtask(Task.Task): """ - A task that executes configuration tests - make sure that the checks write to conf.env in a thread-safe manner + A task that executes build configuration tests (calls conf.check) - for the moment it only executes conf.check + Make sure to use locks if concurrent access to the same conf.env data is necessary. """ + def __init__(self, *k, **kw): + Task.Task.__init__(self, *k, **kw) + self.run_after = set() + def display(self): return '' def runnable_status(self): + for x in self.run_after: + if not x.hasrun: + return Task.ASK_LATER return Task.RUN_ME def uid(self): return Utils.SIG_NIL + def signature(self): + return Utils.SIG_NIL + def run(self): conf = self.conf bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath()) @@ -1146,34 +1210,91 @@ def run(self): bld.init_dirs() bld.in_msg = 1 # suppress top-level start_msg bld.logger = self.logger + bld.multicheck_task = self + args = self.args try: - bld.check(**self.args) + if 'func' in args: + bld.test(build_fun=args['func'], + msg=args.get('msg', ''), + okmsg=args.get('okmsg', ''), + errmsg=args.get('errmsg', ''), + ) + else: + args['multicheck_mandatory'] = args.get('mandatory', True) + args['mandatory'] = True + try: + bld.check(**args) + finally: + args['mandatory'] = args['multicheck_mandatory'] except Exception: return 1 + def process(self): + Task.Task.process(self) + if 'msg' in self.args: + with self.generator.bld.multicheck_lock: + self.conf.start_msg(self.args['msg']) + if self.hasrun == Task.NOT_RUN: + self.conf.end_msg('test cancelled', 'YELLOW') + elif self.hasrun != Task.SUCCESS: + self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW') + else: + self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN') + @conf def multicheck(self, *k, **kw): """ - Use tuples to perform parallel configuration tests + Runs configuration tests in parallel; results are printed sequentially at the end of the build + but each test must provide its own msg value to display a line:: + + def test_build(ctx): + ctx.in_msg = True # suppress console outputs + ctx.check_large_file(mandatory=False) + + conf.multicheck( + {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False}, + {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False}, + {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'}, + {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'}, + msg = 'Checking for headers in parallel', + mandatory = True, # mandatory tests raise an error at the end + run_all_tests = True, # try running all tests + ) + + The configuration tests may modify the values in conf.env in any order, and the define + values can affect configuration tests being executed. It is hence recommended + to provide `uselib_store` values with `global_define=False` to prevent such issues. """ self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw) + # Force a copy so that threads append to the same list at least + # no order is guaranteed, but the values should not disappear at least + for var in ('DEFINES', DEFKEYS): + self.env.append_value(var, []) + self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {} + + # define a task object that will execute our tests class par(object): def __init__(self): self.keep = False - self.returned_tasks = [] self.task_sigs = {} + self.progress_bar = 0 def total(self): return len(tasks) def to_log(self, *k, **kw): return bld = par() + bld.keep = kw.get('run_all_tests', True) + bld.imp_sigs = {} tasks = [] - for dct in k: - x = cfgtask(bld=bld) + + id_to_task = {} + for counter, dct in enumerate(k): + x = Task.classes['cfgtask'](bld=bld, env=None) tasks.append(x) x.args = dct + x.args['multicheck_counter'] = counter x.bld = bld x.conf = self x.args = dct @@ -1181,22 +1302,75 @@ def to_log(self, *k, **kw): # bind a logger that will keep the info in memory x.logger = Logs.make_mem_logger(str(id(x)), self.logger) + if 'id' in dct: + id_to_task[dct['id']] = x + + # second pass to set dependencies with after_test/before_test + for x in tasks: + for key in Utils.to_list(x.args.get('before_tests', [])): + tsk = id_to_task[key] + if not tsk: + raise ValueError('No test named %r' % key) + tsk.run_after.add(x) + for key in Utils.to_list(x.args.get('after_tests', [])): + tsk = id_to_task[key] + if not tsk: + raise ValueError('No test named %r' % key) + x.run_after.add(tsk) + def it(): yield tasks while 1: yield [] - p = Runner.Parallel(bld, Options.options.jobs) + bld.producer = p = Runner.Parallel(bld, Options.options.jobs) + bld.multicheck_lock = Utils.threading.Lock() p.biter = it() + + self.end_msg('started') p.start() # flush the logs in order into the config.log for x in tasks: x.logger.memhandler.flush() + self.start_msg('-> processing test results') + if p.error: + for x in p.error: + if getattr(x, 'err_msg', None): + self.to_log(x.err_msg) + self.end_msg('fail', color='RED') + raise Errors.WafError('There is an error in the library, read config.log for more information') + + failure_count = 0 + for x in tasks: + if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN): + failure_count += 1 + + if failure_count: + self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw) + else: + self.end_msg('all ok', **kw) + for x in tasks: if x.hasrun != Task.SUCCESS: - self.end_msg(kw.get('errmsg', 'no'), color='YELLOW', **kw) - self.fatal(kw.get('fatalmsg', None) or 'One of the tests has failed, see the config.log for more information') + if x.args.get('mandatory', True): + self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information') - self.end_msg('ok', **kw) +@conf +def check_gcc_o_space(self, mode='c'): + if int(self.env.CC_VERSION[0]) > 4: + # this is for old compilers + return + self.env.stash() + if mode == 'c': + self.env.CCLNK_TGT_F = ['-o', ''] + elif mode == 'cxx': + self.env.CXXLNK_TGT_F = ['-o', ''] + features = '%s %sshlib' % (mode, mode) + try: + self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features) + except self.errors.ConfigurationError: + self.env.revert() + else: + self.env.commit() diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py index a2f62412d4..f70b128b87 100644 --- a/waflib/Tools/c_osx.py +++ b/waflib/Tools/c_osx.py @@ -1,13 +1,13 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy 2008-2010 +# Thomas Nagy 2008-2018 (ita) """ MacOSX related tools """ -import os, shutil, sys, platform -from waflib import TaskGen, Task, Build, Options, Utils, Errors +import os, shutil, platform +from waflib import Task, Utils from waflib.TaskGen import taskgen_method, feature, after_method, before_method app_info = ''' @@ -24,7 +24,7 @@ NOTE THIS IS A GENERATED FILE, DO NOT MODIFY CFBundleExecutable - %s + {app_name} ''' @@ -37,8 +37,8 @@ def set_macosx_deployment_target(self): """ see WAF issue 285 and also and also http://trac.macports.org/ticket/17059 """ - if self.env['MACOSX_DEPLOYMENT_TARGET']: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET'] + if self.env.MACOSX_DEPLOYMENT_TARGET: + os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: if Utils.unversioned_sys_platform() == 'darwin': os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) @@ -46,9 +46,8 @@ def set_macosx_deployment_target(self): @taskgen_method def create_bundle_dirs(self, name, out): """ - Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp` + Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp` """ - bld = self.bld dir = out.parent.find_or_declare(name) dir.mkdir() macos = dir.find_or_declare(['Contents', 'MacOS']) @@ -71,7 +70,7 @@ def create_task_macapp(self): To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: def build(bld): - bld.shlib(source='a.c', target='foo', mac_app = True) + bld.shlib(source='a.c', target='foo', mac_app=True) To force *all* executables to be transformed into Mac applications:: @@ -79,7 +78,7 @@ def build(bld): bld.env.MACAPP = True bld.shlib(source='a.c', target='foo') """ - if self.env['MACAPP'] or getattr(self, 'mac_app', False): + if self.env.MACAPP or getattr(self, 'mac_app', False): out = self.link_task.outputs[0] name = bundle_name_for_output(out) @@ -89,37 +88,33 @@ def build(bld): self.apptask = self.create_task('macapp', self.link_task.outputs, n1) inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name - self.bld.install_files(inst_to, n1, chmod=Utils.O755) - - if getattr(self, 'mac_resources', None): + self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755) + + if getattr(self, 'mac_files', None): + # this only accepts files; they will be installed as seen from mac_files_root + mac_files_root = getattr(self, 'mac_files_root', None) + if isinstance(mac_files_root, str): + mac_files_root = self.path.find_node(mac_files_root) + if not mac_files_root: + self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root) res_dir = n1.parent.parent.make_node('Resources') inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name - for x in self.to_list(self.mac_resources): - node = self.path.find_node(x) - if not node: - raise Errors.WafError('Missing mac_resource %r in %r' % (x, self)) - - parent = node.parent - if os.path.isdir(node.abspath()): - nodes = node.ant_glob('**') - else: - nodes = [node] - for node in nodes: - rel = node.path_from(parent) - tsk = self.create_task('macapp', node, res_dir.make_node(rel)) - self.bld.install_as(inst_to + '/%s' % rel, node) + for node in self.to_nodes(self.mac_files): + relpath = node.path_from(mac_files_root or node.parent) + self.create_task('macapp', node, res_dir.make_node(relpath)) + self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node) if getattr(self.bld, 'is_install', None): - # disable the normal binary installation + # disable regular binary installation self.install_task.hasrun = Task.SKIP_ME @feature('cprogram', 'cxxprogram') @after_method('apply_link') def create_task_macplist(self): """ - Create a :py:class:`waflib.Tools.c_osx.macplist` instance. + Creates a :py:class:`waflib.Tools.c_osx.macplist` instance. """ - if self.env['MACAPP'] or getattr(self, 'mac_app', False): + if self.env.MACAPP or getattr(self, 'mac_app', False): out = self.link_task.outputs[0] name = bundle_name_for_output(out) @@ -127,6 +122,14 @@ def create_task_macplist(self): dir = self.create_bundle_dirs(name, out) n1 = dir.find_or_declare(['Contents', 'Info.plist']) self.plisttask = plisttask = self.create_task('macplist', [], n1) + plisttask.context = { + 'app_name': self.link_task.outputs[0].name, + 'env': self.env + } + + plist_ctx = getattr(self, 'plist_context', None) + if (plist_ctx): + plisttask.context.update(plist_ctx) if getattr(self, 'mac_plist', False): node = self.path.find_resource(self.mac_plist) @@ -135,10 +138,10 @@ def create_task_macplist(self): else: plisttask.code = self.mac_plist else: - plisttask.code = app_info % self.link_task.outputs[0].name + plisttask.code = app_info inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name - self.bld.install_files(inst_to, n1) + self.add_install_files(install_to=inst_to, install_from=n1) @feature('cshlib', 'cxxshlib') @before_method('apply_link', 'propagate_uselib_vars') @@ -155,9 +158,9 @@ def build(bld): bld.env.MACBUNDLE = True bld.shlib(source='a.c', target='foo') """ - if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False): - self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag - self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN'] + if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False): + self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN use = self.use = self.to_list(getattr(self, 'use', [])) if not 'MACBUNDLE' in use: use.append('MACBUNDLE') @@ -166,7 +169,7 @@ def build(bld): class macapp(Task.Task): """ - Create mac applications + Creates mac applications """ color = 'PINK' def run(self): @@ -175,7 +178,7 @@ def run(self): class macplist(Task.Task): """ - Create plist files + Creates plist files """ color = 'PINK' ext_in = ['.bin'] @@ -184,5 +187,7 @@ def run(self): txt = self.code else: txt = self.inputs[0].read() + context = getattr(self, 'context', {}) + txt = txt.format(**context) self.outputs[0].write(txt) diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py index c244f64217..68e5f5aea2 100644 --- a/waflib/Tools/c_preproc.py +++ b/waflib/Tools/c_preproc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ C/C++ preprocessor for finding dependencies @@ -28,11 +28,13 @@ import re, string, traceback from waflib import Logs, Utils, Errors -from waflib.Logs import debug, error class PreprocError(Errors.WafError): pass +FILE_CACHE_SIZE = 100000 +LINE_CACHE_SIZE = 100000 + POPFILE = '-' "Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously" @@ -42,15 +44,15 @@ class PreprocError(Errors.WafError): go_absolute = False "Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)" -standard_includes = ['/usr/include'] +standard_includes = ['/usr/local/include', '/usr/include'] if Utils.is_win32: standard_includes = [] use_trigraphs = 0 """Apply trigraph rules (False by default)""" +# obsolete, do not use strict_quotes = 0 -"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default.""" g_optrans = { 'not':'!', @@ -69,17 +71,17 @@ class PreprocError(Errors.WafError): # ignore #warning and #error re_lines = re.compile( - '^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', + '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', re.IGNORECASE | re.MULTILINE) """Match #include lines""" -re_mac = re.compile("^[a-zA-Z_]\w*") +re_mac = re.compile(r"^[a-zA-Z_]\w*") """Match macro definitions""" re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') """Match macro functions""" -re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE) +re_pragma_once = re.compile(r'^\s*once\s*', re.IGNORECASE) """Match #pragma once statements""" re_nl = re.compile('\\\\\r*\n', re.MULTILINE) @@ -137,54 +139,22 @@ class PreprocError(Errors.WafError): def repl(m): """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`""" - s = m.group(0) - if s.startswith('/'): + s = m.group() + if s[0] == '/': return ' ' return s -def filter_comments(filename): - """ - Filter the comments from a c/h file, and return the preprocessor lines. - The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally. - - :return: the preprocessor directives as a list of (keyword, line) - :rtype: a list of string pairs - """ - # return a list of tuples : keyword, line - code = Utils.readf(filename) - if use_trigraphs: - for (a, b) in trig_def: code = code.split(a).join(b) - code = re_nl.sub('', code) - code = re_cpp.sub(repl, code) - return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)] - prec = {} """ -Operator precendence rules required for parsing expressions of the form:: +Operator precedence rules required for parsing expressions of the form:: #if 1 && 2 != 0 """ ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] -for x in range(len(ops)): - syms = ops[x] +for x, syms in enumerate(ops): for u in syms.split(): prec[u] = x -def trimquotes(s): - """ - Remove the single quotes around an expression:: - - trimquotes("'test'") == "test" - - :param s: expression to transform - :type s: string - :rtype: string - """ - if not s: return '' - s = s.rstrip() - if s[0] == "'" and s[-1] == "'": return s[1:-1] - return s - def reduce_nums(val_1, val_2, val_op): """ Apply arithmetic rules to compute a result @@ -200,32 +170,56 @@ def reduce_nums(val_1, val_2, val_op): #print val_1, val_2, val_op # now perform the operation, make certain a and b are numeric - try: a = 0 + val_1 - except TypeError: a = int(val_1) - try: b = 0 + val_2 - except TypeError: b = int(val_2) + try: + a = 0 + val_1 + except TypeError: + a = int(val_1) + try: + b = 0 + val_2 + except TypeError: + b = int(val_2) d = val_op - if d == '%': c = a%b - elif d=='+': c = a+b - elif d=='-': c = a-b - elif d=='*': c = a*b - elif d=='/': c = a/b - elif d=='^': c = a^b - elif d=='==': c = int(a == b) - elif d=='|' or d == 'bitor': c = a|b - elif d=='||' or d == 'or' : c = int(a or b) - elif d=='&' or d == 'bitand': c = a&b - elif d=='&&' or d == 'and': c = int(a and b) - elif d=='!=' or d == 'not_eq': c = int(a != b) - elif d=='^' or d == 'xor': c = int(a^b) - elif d=='<=': c = int(a <= b) - elif d=='<': c = int(a < b) - elif d=='>': c = int(a > b) - elif d=='>=': c = int(a >= b) - elif d=='<<': c = a<>': c = a>>b - else: c = 0 + if d == '%': + c = a % b + elif d=='+': + c = a + b + elif d=='-': + c = a - b + elif d=='*': + c = a * b + elif d=='/': + c = a / b + elif d=='^': + c = a ^ b + elif d=='==': + c = int(a == b) + elif d=='|' or d == 'bitor': + c = a | b + elif d=='||' or d == 'or' : + c = int(a or b) + elif d=='&' or d == 'bitand': + c = a & b + elif d=='&&' or d == 'and': + c = int(a and b) + elif d=='!=' or d == 'not_eq': + c = int(a != b) + elif d=='^' or d == 'xor': + c = int(a^b) + elif d=='<=': + c = int(a <= b) + elif d=='<': + c = int(a < b) + elif d=='>': + c = int(a > b) + elif d=='>=': + c = int(a >= b) + elif d=='<<': + c = a << b + elif d=='>>': + c = a >> b + else: + c = 0 return c def get_num(lst): @@ -237,7 +231,8 @@ def get_num(lst): :return: a pair containing the number and the rest of the list :rtype: tuple(value, list) """ - if not lst: raise PreprocError("empty list for get_num") + if not lst: + raise PreprocError('empty list for get_num') (p, v) = lst[0] if p == OP: if v == '(': @@ -255,7 +250,7 @@ def get_num(lst): count_par += 1 i += 1 else: - raise PreprocError("rparen expected %r" % lst) + raise PreprocError('rparen expected %r' % lst) (num, _) = get_term(lst[1:i]) return (num, lst[i+1:]) @@ -272,14 +267,14 @@ def get_num(lst): num, lst = get_num(lst[1:]) return (~ int(num), lst) else: - raise PreprocError("Invalid op token %r for get_num" % lst) + raise PreprocError('Invalid op token %r for get_num' % lst) elif p == NUM: return v, lst[1:] elif p == IDENT: # all macros should have been replaced, remaining identifiers eval to 0 return 0, lst[1:] else: - raise PreprocError("Invalid token %r for get_num" % lst) + raise PreprocError('Invalid token %r for get_num' % lst) def get_term(lst): """ @@ -293,7 +288,8 @@ def get_term(lst): :rtype: value, list """ - if not lst: raise PreprocError("empty list for get_term") + if not lst: + raise PreprocError('empty list for get_term') num, lst = get_num(lst) if not lst: return (num, []) @@ -318,7 +314,7 @@ def get_term(lst): break i += 1 else: - raise PreprocError("rparen expected %r" % lst) + raise PreprocError('rparen expected %r' % lst) if int(num): return get_term(lst[1:i]) @@ -336,7 +332,7 @@ def get_term(lst): # operator precedence p2, v2 = lst[0] if p2 != OP: - raise PreprocError("op expected %r" % lst) + raise PreprocError('op expected %r' % lst) if prec[v2] >= prec[v]: num2 = reduce_nums(num, num2, v) @@ -347,7 +343,7 @@ def get_term(lst): return get_term([(NUM, num), (p, v), (NUM, num3)] + lst) - raise PreprocError("cannot reduce %r" % lst) + raise PreprocError('cannot reduce %r' % lst) def reduce_eval(lst): """ @@ -432,7 +428,7 @@ def reduce_tokens(lst, defs, ban=[]): else: lst[i] = (NUM, 0) else: - raise PreprocError("Invalid define expression %r" % lst) + raise PreprocError('Invalid define expression %r' % lst) elif p == IDENT and v in defs: @@ -447,8 +443,8 @@ def reduce_tokens(lst, defs, ban=[]): del lst[i] accu = to_add[:] reduce_tokens(accu, defs, ban+[v]) - for x in range(len(accu)): - lst.insert(i, accu[x]) + for tmp in accu: + lst.insert(i, tmp) i += 1 else: # collect the arguments for the funcall @@ -457,11 +453,11 @@ def reduce_tokens(lst, defs, ban=[]): del lst[i] if i >= len(lst): - raise PreprocError("expected '(' after %r (got nothing)" % v) + raise PreprocError('expected ( after %r (got nothing)' % v) (p2, v2) = lst[i] if p2 != OP or v2 != '(': - raise PreprocError("expected '(' after %r" % v) + raise PreprocError('expected ( after %r' % v) del lst[i] @@ -476,18 +472,22 @@ def reduce_tokens(lst, defs, ban=[]): one_param.append((p2, v2)) count_paren += 1 elif v2 == ')': - if one_param: args.append(one_param) + if one_param: + args.append(one_param) break elif v2 == ',': - if not one_param: raise PreprocError("empty param in funcall %s" % v) + if not one_param: + raise PreprocError('empty param in funcall %r' % v) args.append(one_param) one_param = [] else: one_param.append((p2, v2)) else: one_param.append((p2, v2)) - if v2 == '(': count_paren += 1 - elif v2 == ')': count_paren -= 1 + if v2 == '(': + count_paren += 1 + elif v2 == ')': + count_paren -= 1 else: raise PreprocError('malformed macro') @@ -524,7 +524,6 @@ def reduce_tokens(lst, defs, ban=[]): accu.append((p2, v2)) accu.extend(toks) elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__': - # TODO not sure # first collect the tokens va_toks = [] st = len(macro_def[0]) @@ -532,7 +531,8 @@ def reduce_tokens(lst, defs, ban=[]): for x in args[pt-st+1:]: va_toks.extend(x) va_toks.append((OP, ',')) - if va_toks: va_toks.pop() # extra comma + if va_toks: + va_toks.pop() # extra comma if len(accu)>1: (p3, v3) = accu[-1] (p4, v4) = accu[-2] @@ -580,8 +580,15 @@ def eval_macro(lst, defs): :rtype: int """ reduce_tokens(lst, defs, []) - if not lst: raise PreprocError("missing tokens to evaluate") - (p, v) = reduce_eval(lst) + if not lst: + raise PreprocError('missing tokens to evaluate') + + if lst: + p, v = lst[0] + if p == IDENT and v not in defs: + raise PreprocError('missing macro %r' % lst) + + p, v = reduce_eval(lst) return int(v) != 0 def extract_macro(txt): @@ -601,7 +608,8 @@ def extract_macro(txt): p, name = t[0] p, v = t[1] - if p != OP: raise PreprocError("expected open parenthesis") + if p != OP: + raise PreprocError('expected (') i = 1 pindex = 0 @@ -620,27 +628,27 @@ def extract_macro(txt): elif p == OP and v == ')': break else: - raise PreprocError("unexpected token (3)") + raise PreprocError('unexpected token (3)') elif prev == IDENT: if p == OP and v == ',': prev = v elif p == OP and v == ')': break else: - raise PreprocError("comma or ... expected") + raise PreprocError('comma or ... expected') elif prev == ',': if p == IDENT: params[v] = pindex pindex += 1 prev = p elif p == OP and v == '...': - raise PreprocError("not implemented (1)") + raise PreprocError('not implemented (1)') else: - raise PreprocError("comma or ... expected (2)") + raise PreprocError('comma or ... expected (2)') elif prev == '...': - raise PreprocError("not implemented (2)") + raise PreprocError('not implemented (2)') else: - raise PreprocError("unexpected else") + raise PreprocError('unexpected else') #~ print (name, [params, t[i+1:]]) return (name, [params, t[i+1:]]) @@ -652,7 +660,7 @@ def extract_macro(txt): # empty define, assign an empty token return (v, [[], [('T','')]]) -re_include = re.compile('^\s*(<(?P.*)>|"(?P.*)")') +re_include = re.compile(r'^\s*(<(?:.*)>|"(?:.*)")') def extract_include(txt, defs): """ Process a line in the form:: @@ -668,15 +676,15 @@ def extract_include(txt, defs): """ m = re_include.search(txt) if m: - if m.group('a'): return '<', m.group('a') - if m.group('b'): return '"', m.group('b') + txt = m.group(1) + return txt[0], txt[1:-1] # perform preprocessing and look at the result, it must match an include toks = tokenize(txt) reduce_tokens(toks, defs, ['waf_include']) if not toks: - raise PreprocError("could not parse include %s" % txt) + raise PreprocError('could not parse include %r' % txt) if len(toks) == 1: if toks[0][0] == STR: @@ -686,7 +694,7 @@ def extract_include(txt, defs): ret = '<', stringize(toks).lstrip('<').rstrip('>') return ret - raise PreprocError("could not parse include %s." % txt) + raise PreprocError('could not parse include %r' % txt) def parse_char(txt): """ @@ -698,21 +706,26 @@ def parse_char(txt): :rtype: string """ - if not txt: raise PreprocError("attempted to parse a null char") + if not txt: + raise PreprocError('attempted to parse a null char') if txt[0] != '\\': return ord(txt) c = txt[1] if c == 'x': - if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16) + if len(txt) == 4 and txt[3] in string.hexdigits: + return int(txt[2:], 16) return int(txt[2:], 16) elif c.isdigit(): - if c == '0' and len(txt)==2: return 0 + if c == '0' and len(txt)==2: + return 0 for i in 3, 2, 1: if len(txt) > i and txt[1:1+i].isdigit(): return (1+i, int(txt[1:1+i], 8)) else: - try: return chr_esc[c] - except KeyError: raise PreprocError("could not parse char literal '%s'" % txt) + try: + return chr_esc[c] + except KeyError: + raise PreprocError('could not parse char literal %r' % txt) def tokenize(s): """ @@ -725,7 +738,6 @@ def tokenize(s): """ return tokenize_private(s)[:] # force a copy of the results -@Utils.run_once def tokenize_private(s): ret = [] for match in re_clexer.finditer(s): @@ -734,28 +746,32 @@ def tokenize_private(s): v = m(name) if v: if name == IDENT: - try: - g_optrans[v]; + if v in g_optrans: name = OP - except KeyError: - # c++ specific - if v.lower() == "true": - v = 1 - name = NUM - elif v.lower() == "false": - v = 0 - name = NUM + elif v.lower() == "true": + v = 1 + name = NUM + elif v.lower() == "false": + v = 0 + name = NUM elif name == NUM: - if m('oct'): v = int(v, 8) - elif m('hex'): v = int(m('hex'), 16) - elif m('n0'): v = m('n0') + if m('oct'): + v = int(v, 8) + elif m('hex'): + v = int(m('hex'), 16) + elif m('n0'): + v = m('n0') else: v = m('char') - if v: v = parse_char(v) - else: v = m('n2') or m('n4') + if v: + v = parse_char(v) + else: + v = m('n2') or m('n4') elif name == OP: - if v == '%:': v = '#' - elif v == '%:%:': v = '##' + if v == '%:': + v = '#' + elif v == '%:%:': + v = '##' elif name == STR: # remove the quotes around the string v = v[1:-1] @@ -763,15 +779,20 @@ def tokenize_private(s): break return ret -@Utils.run_once -def define_name(line): - """ - :param line: define line - :type line: string - :rtype: string - :return: the define name - """ - return re_mac.match(line).group(0) +def format_defines(lst): + ret = [] + for y in lst: + if y: + pos = y.find('=') + if pos == -1: + # "-DFOO" should give "#define FOO 1" + ret.append(y) + elif pos > 0: + # all others are assumed to be -DX=Y + ret.append('%s %s' % (y[:pos], y[pos+1:])) + else: + raise ValueError('Invalid define expression %r' % y) + return ret class c_parser(object): """ @@ -803,9 +824,12 @@ def __init__(self, nodepaths=None, defines=None): self.curfile = '' """Current file""" - self.ban_includes = set([]) + self.ban_includes = set() """Includes that must not be read (#pragma once)""" + self.listed = set() + """Include nodes/names already listed to avoid duplicates in self.nodes/self.names""" + def cached_find_resource(self, node, filename): """ Find a file from the input directory @@ -818,13 +842,13 @@ def cached_find_resource(self, node, filename): :rtype: :py:class:`waflib.Node.Node` """ try: - nd = node.ctx.cache_nd + cache = node.ctx.preproc_cache_node except AttributeError: - nd = node.ctx.cache_nd = {} + cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE) - tup = (node, filename) + key = (node, filename) try: - return nd[tup] + return cache[key] except KeyError: ret = node.find_resource(filename) if ret: @@ -834,10 +858,10 @@ def cached_find_resource(self, node, filename): tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) if tmp and getattr(tmp, 'children', None): ret = None - nd[tup] = ret + cache[key] = ret return ret - def tryfind(self, filename): + def tryfind(self, filename, kind='"', env=None): """ Try to obtain a node from the filename based from the include paths. Will add the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to @@ -851,29 +875,70 @@ def tryfind(self, filename): """ if filename.endswith('.moc'): # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated - # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9 + # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. self.names.append(filename) return None self.curfile = filename - # for msvc it should be a for loop over the whole stack - found = self.cached_find_resource(self.currentnode_stack[-1], filename) + found = None + if kind == '"': + if env.MSVC_VERSION: + for n in reversed(self.currentnode_stack): + found = self.cached_find_resource(n, filename) + if found: + break + else: + found = self.cached_find_resource(self.currentnode_stack[-1], filename) - for n in self.nodepaths: - if found: - break - found = self.cached_find_resource(n, filename) + if not found: + for n in self.nodepaths: + found = self.cached_find_resource(n, filename) + if found: + break + listed = self.listed if found and not found in self.ban_includes: - # TODO duplicates do not increase the no-op build times too much, but they may be worth removing - self.nodes.append(found) + if found not in listed: + listed.add(found) + self.nodes.append(found) self.addlines(found) else: - if not filename in self.names: + if filename not in listed: + listed.add(filename) self.names.append(filename) return found + def filter_comments(self, node): + """ + Filter the comments from a c/h file, and return the preprocessor lines. + The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally. + + :return: the preprocessor directives as a list of (keyword, line) + :rtype: a list of string pairs + """ + # return a list of tuples : keyword, line + code = node.read() + if use_trigraphs: + for (a, b) in trig_def: + code = code.split(a).join(b) + code = re_nl.sub('', code) + code = re_cpp.sub(repl, code) + return re_lines.findall(code) + + def parse_lines(self, node): + try: + cache = node.ctx.preproc_cache_lines + except AttributeError: + cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE) + try: + return cache[node] + except KeyError: + cache[node] = lines = self.filter_comments(node) + lines.append((POPFILE, '')) + lines.reverse() + return lines + def addlines(self, node): """ Add the lines from a header in the list of preprocessor lines to parse @@ -883,34 +948,23 @@ def addlines(self, node): """ self.currentnode_stack.append(node.parent) - filepath = node.abspath() self.count_files += 1 if self.count_files > recursion_limit: # issue #812 - raise PreprocError("recursion limit exceeded") - pc = self.parse_cache - debug('preproc: reading file %r', filepath) - try: - lns = pc[filepath] - except KeyError: - pass - else: - self.lines.extend(lns) - return + raise PreprocError('recursion limit exceeded') + if Logs.verbose: + Logs.debug('preproc: reading file %r', node) try: - lines = filter_comments(filepath) - lines.append((POPFILE, '')) - lines.reverse() - pc[filepath] = lines # cache the lines filtered - self.lines.extend(lines) - except IOError: - raise PreprocError("could not read the file %s" % filepath) + lines = self.parse_lines(node) + except EnvironmentError: + raise PreprocError('could not read the file %r' % node) except Exception: if Logs.verbose > 0: - error("parsing %s failed" % filepath) - traceback.print_exc() + Logs.error('parsing %r failed %s', node, traceback.format_exc()) + else: + self.lines.extend(lines) def start(self, node, env): """ @@ -922,27 +976,16 @@ def start(self, node, env): :param env: config set containing additional defines to take into account :type env: :py:class:`waflib.ConfigSet.ConfigSet` """ - - debug('preproc: scanning %s (in %s)', node.name, node.parent.name) - - bld = node.ctx - try: - self.parse_cache = bld.parse_cache - except AttributeError: - self.parse_cache = bld.parse_cache = {} + Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name) self.current_file = node self.addlines(node) # macros may be defined on the command-line, so they must be parsed as if they were part of the file - if env['DEFINES']: - try: - lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]] - lst.reverse() - self.lines.extend([('define', x) for x in lst]) - except AttributeError: - # if the defines are invalid the compiler will tell the user - pass + if env.DEFINES: + lst = format_defines(env.DEFINES) + lst.reverse() + self.lines.extend([('define', x) for x in lst]) while self.lines: (token, line) = self.lines.pop() @@ -952,8 +995,6 @@ def start(self, node, env): continue try: - ve = Logs.verbose - if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state) state = self.state # make certain we define the state if we are about to enter in an if block @@ -969,23 +1010,27 @@ def start(self, node, env): if token == 'if': ret = eval_macro(tokenize(line), self.defs) - if ret: state[-1] = accepted - else: state[-1] = ignored + if ret: + state[-1] = accepted + else: + state[-1] = ignored elif token == 'ifdef': m = re_mac.match(line) - if m and m.group(0) in self.defs: state[-1] = accepted - else: state[-1] = ignored + if m and m.group() in self.defs: + state[-1] = accepted + else: + state[-1] = ignored elif token == 'ifndef': m = re_mac.match(line) - if m and m.group(0) in self.defs: state[-1] = ignored - else: state[-1] = accepted + if m and m.group() in self.defs: + state[-1] = ignored + else: + state[-1] = accepted elif token == 'include' or token == 'import': (kind, inc) = extract_include(line, self.defs) - if ve: debug('preproc: include found %s (%s) ', inc, kind) - if kind == '"' or not strict_quotes: - self.current_file = self.tryfind(inc) - if token == 'import': - self.ban_includes.add(self.current_file) + self.current_file = self.tryfind(inc, kind, env) + if token == 'import': + self.ban_includes.add(self.current_file) elif token == 'elif': if state[-1] == accepted: state[-1] = skipped @@ -993,24 +1038,35 @@ def start(self, node, env): if eval_macro(tokenize(line), self.defs): state[-1] = accepted elif token == 'else': - if state[-1] == accepted: state[-1] = skipped - elif state[-1] == ignored: state[-1] = accepted + if state[-1] == accepted: + state[-1] = skipped + elif state[-1] == ignored: + state[-1] = accepted elif token == 'define': try: - self.defs[define_name(line)] = line - except Exception: - raise PreprocError("Invalid define line %s" % line) + self.defs[self.define_name(line)] = line + except AttributeError: + raise PreprocError('Invalid define line %r' % line) elif token == 'undef': m = re_mac.match(line) - if m and m.group(0) in self.defs: - self.defs.__delitem__(m.group(0)) + if m and m.group() in self.defs: + self.defs.__delitem__(m.group()) #print "undef %s" % name elif token == 'pragma': if re_pragma_once.match(line.lower()): self.ban_includes.add(self.current_file) except Exception as e: if Logs.verbose: - debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack()) + Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc()) + + def define_name(self, line): + """ + :param line: define line + :type line: string + :rtype: string + :return: the define name + """ + return re_mac.match(line).group() def scan(task): """ @@ -1020,9 +1076,6 @@ def scan(task): This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example """ - - global go_absolute - try: incn = task.generator.includes_nodes except AttributeError: @@ -1035,7 +1088,4 @@ def scan(task): tmp = c_parser(nodepaths) tmp.start(task.inputs[0], task.env) - if Logs.verbose: - debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names)) return (tmp.nodes, tmp.names) - diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py index bac171a332..bdd186c6bc 100644 --- a/waflib/Tools/c_tests.py +++ b/waflib/Tools/c_tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2016-2018 (ita) """ Various configuration tests. @@ -9,7 +9,6 @@ from waflib import Task from waflib.Configure import conf from waflib.TaskGen import feature, before_method, after_method -import sys LIB_CODE = ''' #ifdef _MSC_VER @@ -59,7 +58,7 @@ def write_test_file(task): @conf def check_library(self, mode=None, test_exec=True): """ - Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`. + Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`. :param mode: c or cxx or d :type mode: string @@ -73,8 +72,7 @@ def check_library(self, mode=None, test_exec=True): features = 'link_lib_test', msg = 'Checking for libraries', mode = mode, - test_exec = test_exec, - ) + test_exec = test_exec) ######################################################################################## @@ -90,7 +88,7 @@ def check_library(self, mode=None, test_exec=True): @conf def check_inline(self, **kw): """ - Check for the right value for inline macro. + Checks for the right value for inline macro. Define INLINE_MACRO to 1 if the define is found. If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__) @@ -99,7 +97,6 @@ def check_inline(self, **kw): :param features: by default *c* or *cxx* depending on the compiler present :type features: list of string """ - self.start_msg('Checking for inline') if not 'define_name' in kw: @@ -136,7 +133,7 @@ def check_inline(self, **kw): @conf def check_large_file(self, **kw): """ - Check for large file support and define the macro HAVE_LARGEFILE + Checks for large file support and define the macro HAVE_LARGEFILE The test is skipped on win32 systems (DEST_BINFMT == pe). :param define_name: define to set, by default *HAVE_LARGEFILE* @@ -144,7 +141,6 @@ def check_large_file(self, **kw): :param execute: execute the test (yes by default) :type execute: bool """ - if not 'define_name' in kw: kw['define_name'] = 'HAVE_LARGEFILE' if not 'execute' in kw: @@ -184,9 +180,15 @@ def check_large_file(self, **kw): ######################################################################################## ENDIAN_FRAGMENT = ''' +#ifdef _MSC_VER +#define testshlib_EXPORT __declspec(dllexport) +#else +#define testshlib_EXPORT +#endif + short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; -int use_ascii (int i) { +int testshlib_EXPORT use_ascii (int i) { return ascii_mm[i] + ascii_ii[i]; } short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; @@ -198,9 +200,12 @@ def check_large_file(self, **kw): ''' class grep_for_endianness(Task.Task): + """ + Task that reads a binary and tries to determine the endianness + """ color = 'PINK' def run(self): - txt = self.inputs[0].read(flags='rb').decode('iso8859-1') + txt = self.inputs[0].read(flags='rb').decode('latin-1') if txt.find('LiTTleEnDian') > -1: self.generator.tmp.append('little') elif txt.find('BIGenDianSyS') > -1: @@ -209,18 +214,24 @@ def run(self): return -1 @feature('grep_for_endianness') -@after_method('process_source') +@after_method('apply_link') def grep_for_endianness_fun(self): - self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) + """ + Used by the endianness configuration test + """ + self.create_task('grep_for_endianness', self.link_task.outputs[0]) @conf def check_endianness(self): """ - Execute a configuration test to determine the endianness + Executes a configuration test to determine the endianness """ tmp = [] def check_msg(self): return tmp[0] - self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg) + + self.check(fragment=ENDIAN_FRAGMENT, features='c cshlib grep_for_endianness', + msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, + okmsg=check_msg, confcache=None) return tmp[0] diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py index 30577477bc..76deff54dc 100644 --- a/waflib/Tools/ccroot.py +++ b/waflib/Tools/ccroot.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Classes and methods shared by tools providing support for C-like language such @@ -8,7 +8,7 @@ """ import os, re -from waflib import Task, Utils, Node, Errors +from waflib import Task, Utils, Node, Errors, Logs from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests from waflib.Configure import conf @@ -25,8 +25,8 @@ USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS']) USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH']) -USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH']) -USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH']) +USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) +USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS']) USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) @@ -77,7 +77,7 @@ def to_incnodes(self, inlst): :return: list of include folders as nodes """ lst = [] - seen = set([]) + seen = set() for x in self.to_list(inlst): if x in seen or not x: continue @@ -111,25 +111,30 @@ def apply_incpaths(self): tg = bld(features='includes', includes='.') The folders only need to be relative to the current directory, the equivalent build directory is - added automatically (for headers created in the build directory). This enable using a build directory + added automatically (for headers created in the build directory). This enables using a build directory or not (``top == out``). This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``, and the list of include paths in ``tg.env.INCLUDES``. """ - lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES']) + lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES) self.includes_nodes = lst - self.env['INCPATHS'] = [x.abspath() for x in lst] + cwd = self.get_cwd() + self.env.INCPATHS = [x.path_from(cwd) for x in lst] class link_task(Task.Task): """ Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`. .. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib + :top-classes: waflib.Tools.ccroot.link_task """ color = 'YELLOW' + weight = 3 + """Try to process link tasks as early as possible""" + inst_to = None """Default installation path for the link task outputs, or None to disable""" @@ -142,6 +147,12 @@ def add_target(self, target): The settings are retrieved from ``env.clsname_PATTERN`` """ if isinstance(target, str): + base = self.generator.path + if target.startswith('#'): + # for those who like flat structures + target = target[1:] + base = self.generator.bld.bldnode + pattern = self.env[self.__class__.__name__ + '_PATTERN'] if not pattern: pattern = '%s' @@ -151,17 +162,62 @@ def add_target(self, target): nums = self.generator.vnum.split('.') if self.env.DEST_BINFMT == 'pe': # include the version in the dll file name, - # the import lib file name stays unversionned. + # the import lib file name stays unversioned. name = name + '-' + nums[0] elif self.env.DEST_OS == 'openbsd': pattern = '%s.%s' % (pattern, nums[0]) if len(nums) >= 2: pattern += '.%s' % nums[1] - tmp = folder + os.sep + pattern % name - target = self.generator.path.find_or_declare(tmp) + if folder: + tmp = folder + os.sep + pattern % name + else: + tmp = pattern % name + target = base.find_or_declare(tmp) self.set_outputs(target) + def exec_command(self, *k, **kw): + ret = super(link_task, self).exec_command(*k, **kw) + if not ret and self.env.DO_MANIFEST: + ret = self.exec_mf() + return ret + + def exec_mf(self): + """ + Create manifest files for VS-like compilers (msvc, ifort, ...) + """ + if not self.env.MT: + return 0 + + manifest = None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest = out_node.abspath() + break + else: + # Should never get here. If we do, it means the manifest file was + # never added to the outputs list, thus we don't have a manifest file + # to embed, so we just return. + return 0 + + # embedding mode. Different for EXE's and DLL's. + # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx + mode = '' + for x in Utils.to_list(self.generator.features): + if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'): + mode = 1 + elif x in ('cshlib', 'cxxshlib', 'fcshlib'): + mode = 2 + + Logs.debug('msvc: embedding manifest in mode %r', mode) + + lst = [] + self.env.MT + lst.extend(Utils.to_list(self.env.MTFLAGS)) + lst.extend(['-manifest', manifest]) + lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode)) + + return super(link_task, self).exec_command(lst) + class stlink_task(link_task): """ Base for static link tasks, which use *ar* most of the time. @@ -175,12 +231,25 @@ class stlink_task(link_task): def rm_tgt(cls): old = cls.run def wrap(self): - try: os.remove(self.outputs[0].abspath()) - except OSError: pass + try: + os.remove(self.outputs[0].abspath()) + except OSError: + pass return old(self) setattr(cls, 'run', wrap) rm_tgt(stlink_task) +@feature('skip_stlib_link_deps') +@before_method('process_use') +def apply_skip_stlib_link_deps(self): + """ + This enables an optimization in the :py:func:wafilb.Tools.ccroot.processes_use: method that skips dependency and + link flag optimizations for targets that generate static libraries (via the :py:class:Tools.ccroot.stlink_task task). + The actual behavior is implemented in :py:func:wafilb.Tools.ccroot.processes_use: method so this feature only tells waf + to enable the new behavior. + """ + self.env.SKIP_STLIB_LINK_DEPS = True + @feature('c', 'cxx', 'd', 'fc', 'asm') @after_method('process_source') def apply_link(self): @@ -216,10 +285,12 @@ def build(bld): try: inst_to = self.install_path except AttributeError: - inst_to = self.link_task.__class__.inst_to + inst_to = self.link_task.inst_to if inst_to: # install a copy of the node list we have at this moment (implib not added) - self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod, task=self.link_task) + self.install_task = self.add_install_files( + install_to=inst_to, install_from=self.link_task.outputs[:], + chmod=self.link_task.chmod, task=self.link_task) @taskgen_method def use_rec(self, name, **kw): @@ -279,7 +350,7 @@ def build(bld): See :py:func:`waflib.Tools.ccroot.use_rec`. """ - use_not = self.tmp_use_not = set([]) + use_not = self.tmp_use_not = set() self.tmp_use_seen = [] # we would like an ordered set use_prec = self.tmp_use_prec = {} self.uselib = self.to_list(getattr(self, 'uselib', [])) @@ -294,7 +365,7 @@ def build(bld): del use_prec[x] # topological sort - out = [] + out = self.tmp_use_sorted = [] tmp = [] for x in self.tmp_use_seen: for k in use_prec.values(): @@ -327,17 +398,22 @@ def build(bld): y = self.bld.get_tgen_by_name(x) var = y.tmp_use_var if var and link_task: - if var == 'LIB' or y.tmp_use_stlib or x in names: + if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task, stlink_task): + # If the skip_stlib_link_deps feature is enabled then we should + # avoid adding lib deps to the stlink_task instance. + pass + elif var == 'LIB' or y.tmp_use_stlib or x in names: self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) self.link_task.dep_nodes.extend(y.link_task.outputs) - tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode) + tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd()) self.env.append_unique(var + 'PATH', [tmp_path]) else: if y.tmp_use_objects: self.add_objects_from_tgen(y) if getattr(y, 'export_includes', None): - self.includes.extend(y.to_incnodes(y.export_includes)) + # self.includes may come from a global variable #2035 + self.includes = self.includes + y.to_incnodes(y.export_includes) if getattr(y, 'export_defines', None): self.env.append_value('DEFINES', self.to_list(y.export_defines)) @@ -387,7 +463,7 @@ def get_uselib_vars(self): :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`) :rtype: list of string """ - _vars = set([]) + _vars = set() for x in self.features: if x in USELIB_VARS: _vars |= USELIB_VARS[x] @@ -402,7 +478,7 @@ def propagate_uselib_vars(self): def build(bld): bld.env.AFLAGS_aaa = ['bar'] from waflib.Tools.ccroot import USELIB_VARS - USELIB_VARS['aaa'] = set('AFLAGS') + USELIB_VARS['aaa'] = ['AFLAGS'] tg = bld(features='aaa', aflags='test') @@ -444,20 +520,20 @@ def apply_implib(self): name = self.target.name else: name = os.path.split(self.target)[1] - implib = self.env['implib_PATTERN'] % name + implib = self.env.implib_PATTERN % name implib = dll.parent.find_or_declare(implib) - self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath()) + self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath()) self.link_task.outputs.append(implib) if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe': node = self.path.find_resource(self.defs) if not node: raise Errors.WafError('invalid def file %r' % self.defs) - if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): - self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode)) + if self.env.def_PATTERN: + self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd())) self.link_task.dep_nodes.append(node) else: - #gcc for windows takes *.def file a an input without any special flag + # gcc for windows takes *.def file as input without any special flag self.link_task.inputs.append(node) # where to put the import library @@ -472,10 +548,11 @@ def apply_implib(self): except AttributeError: # else, put the library in BINDIR and the import library in LIBDIR inst_to = '${IMPLIBDIR}' - self.install_task.dest = '${BINDIR}' + self.install_task.install_to = '${BINDIR}' if not self.env.IMPLIBDIR: self.env.IMPLIBDIR = self.env.LIBDIR - self.implib_install_task = self.bld.install_files(inst_to, implib, env=self.env, chmod=self.link_task.chmod, task=self.link_task) + self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib, + chmod=self.link_task.chmod, task=self.link_task) # ============ the code above must not know anything about vnum processing on unix platforms ========= @@ -489,10 +566,19 @@ def apply_vnum(self): def build(bld): bld.shlib(source='a.c', target='foo', vnum='14.15.16') - In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created: + In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: + + * ``libfoo.so → libfoo.so.14.15.16`` + * ``libfoo.so.14 → libfoo.so.14.15.16`` + + By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: + + def build(bld): + bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') + + In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. - * ``libfoo.so → libfoo.so.1.2.3`` - * ``libfoo.so.1 → libfoo.so.1.2.3`` + On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. """ if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): return @@ -503,13 +589,18 @@ def build(bld): nums = self.vnum.split('.') node = link.outputs[0] + cnum = getattr(self, 'cnum', str(nums[0])) + cnums = cnum.split('.') + if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums: + raise Errors.WafError('invalid compatibility version %s' % cnum) + libname = node.name if libname.endswith('.dylib'): name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) - name2 = libname.replace('.dylib', '.%s.dylib' % nums[0]) + name2 = libname.replace('.dylib', '.%s.dylib' % cnum) else: name3 = libname + '.' + self.vnum - name2 = libname + '.' + nums[0] + name2 = libname + '.' + cnum # add the so name for the ld linker - to disable, just unset env.SONAME_ST if self.env.SONAME_ST: @@ -518,45 +609,46 @@ def build(bld): # the following task is just to enable execution from the build dir :-/ if self.env.DEST_OS != 'openbsd': - outs = [node.parent.find_or_declare(name3)] + outs = [node.parent.make_node(name3)] if name2 != name3: - outs.append(node.parent.find_or_declare(name2)) + outs.append(node.parent.make_node(name2)) self.create_task('vnum', node, outs) if getattr(self, 'install_task', None): - self.install_task.hasrun = Task.SKIP_ME - bld = self.bld - path = self.install_task.dest + self.install_task.hasrun = Task.SKIPPED + self.install_task.no_errcheck_out = True + path = self.install_task.install_to if self.env.DEST_OS == 'openbsd': libname = self.link_task.outputs[0].name - t1 = bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env, chmod=self.link_task.chmod) + t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod) self.vnum_install_task = (t1,) else: - t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod) - t3 = bld.symlink_as(path + os.sep + libname, name3) + t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod) + t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3) if name2 != name3: - t2 = bld.symlink_as(path + os.sep + name2, name3) + t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3) self.vnum_install_task = (t1, t2, t3) else: self.vnum_install_task = (t1, t3) - if '-dynamiclib' in self.env['LINKFLAGS']: + if '-dynamiclib' in self.env.LINKFLAGS: # this requires after(propagate_uselib_vars) try: inst_to = self.install_path except AttributeError: - inst_to = self.link_task.__class__.inst_to + inst_to = self.link_task.inst_to if inst_to: p = Utils.subst_vars(inst_to, self.env) - path = os.path.join(p, self.link_task.outputs[0].name) + path = os.path.join(p, name2) self.env.append_value('LINKFLAGS', ['-install_name', path]) + self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) + self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum) class vnum(Task.Task): """ Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum` """ color = 'CYAN' - quient = True ext_in = ['.bin'] def keyword(self): return 'Symlinking' @@ -581,9 +673,6 @@ def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER - - for x in self.outputs: - x.sig = Utils.h_file(x.abspath()) return Task.SKIP_ME class fake_stlib(stlink_task): @@ -594,9 +683,6 @@ def runnable_status(self): for t in self.run_after: if not t.hasrun: return Task.ASK_LATER - - for x in self.outputs: - x.sig = Utils.h_file(x.abspath()) return Task.SKIP_ME @conf @@ -639,7 +725,10 @@ def process_lib(self): for y in names: node = x.find_node(y) if node: - node.sig = Utils.h_file(node.abspath()) + try: + Utils.h_file(node.abspath()) + except EnvironmentError: + raise ValueError('Could not read %r' % y) break else: continue @@ -686,6 +775,9 @@ def read_object(self, obj): @feature('cxxprogram', 'cprogram') @after_method('apply_link', 'process_use') def set_full_paths_hpux(self): + """ + On hp-ux, extend the libpaths and static library paths to absolute paths + """ if self.env.DEST_OS != 'hp-ux': return base = self.bld.bldnode.abspath() diff --git a/waflib/Tools/clang.py b/waflib/Tools/clang.py index 999c604cbb..3828e39118 100644 --- a/waflib/Tools/clang.py +++ b/waflib/Tools/clang.py @@ -6,14 +6,13 @@ Detect the Clang C compiler """ -import os, sys from waflib.Tools import ccroot, ar, gcc from waflib.Configure import conf @conf def find_clang(conf): """ - Find the program clang and execute it to ensure it really is clang + Finds the program clang and executes it to ensure it really is clang """ cc = conf.find_program('clang', var='CC') conf.get_cc_version(cc, clang=True) @@ -21,6 +20,7 @@ def find_clang(conf): def configure(conf): conf.find_clang() + conf.find_program(['llvm-ar', 'ar'], var='AR') conf.find_ar() conf.gcc_common_flags() conf.gcc_modifier_platform() diff --git a/waflib/Tools/clangxx.py b/waflib/Tools/clangxx.py index c8d34a71b0..152013ce7a 100644 --- a/waflib/Tools/clangxx.py +++ b/waflib/Tools/clangxx.py @@ -1,19 +1,18 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy 2009-2010 (ita) +# Thomas Nagy 2009-2018 (ita) """ Detect the Clang++ C++ compiler """ -import os, sys from waflib.Tools import ccroot, ar, gxx from waflib.Configure import conf @conf def find_clangxx(conf): """ - Find the program clang++, and execute it to ensure it really is clang++ + Finds the program clang++, and executes it to ensure it really is clang++ """ cxx = conf.find_program('clang++', var='CXX') conf.get_cc_version(cxx, clang=True) @@ -21,6 +20,7 @@ def find_clangxx(conf): def configure(conf): conf.find_clangxx() + conf.find_program(['llvm-ar', 'ar'], var='AR') conf.find_ar() conf.gxx_common_flags() conf.gxx_modifier_platform() diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py index 69c808f2cb..e033ce6c5c 100644 --- a/waflib/Tools/compiler_c.py +++ b/waflib/Tools/compiler_c.py @@ -30,27 +30,28 @@ def build(bld): $ CC=clang waf configure """ -import os, sys, imp, types, re +import re from waflib.Tools import ccroot -from waflib import Utils, Configure +from waflib import Utils from waflib.Logs import debug c_compiler = { -'win32': ['msvc', 'gcc', 'clang'], -'cygwin': ['gcc'], -'darwin': ['clang', 'gcc'], -'aix': ['xlc', 'gcc', 'clang'], -'linux': ['gcc', 'clang', 'icc'], -'sunos': ['suncc', 'gcc'], -'irix': ['gcc', 'irixcc'], -'hpux': ['gcc'], -'osf1V': ['gcc'], -'gnu': ['gcc', 'clang'], -'java': ['gcc', 'msvc', 'clang', 'icc'], -'default':['gcc', 'clang'], +'win32': ['msvc', 'gcc', 'clang'], +'cygwin': ['gcc', 'clang'], +'darwin': ['clang', 'gcc'], +'aix': ['xlc', 'gcc', 'clang'], +'linux': ['gcc', 'clang', 'icc'], +'sunos': ['suncc', 'gcc'], +'irix': ['gcc', 'irixcc'], +'hpux': ['gcc'], +'osf1V': ['gcc'], +'gnu': ['gcc', 'clang'], +'java': ['gcc', 'msvc', 'clang', 'icc'], +'gnukfreebsd': ['gcc', 'clang'], +'default': ['clang', 'gcc'], } """ -Dict mapping the platform names to Waf tools finding specific C compilers:: +Dict mapping platform names to Waf tools finding specific C compilers:: from waflib.Tools.compiler_c import c_compiler c_compiler['linux'] = ['gcc', 'icc', 'suncc'] @@ -63,10 +64,14 @@ def default_compilers(): def configure(conf): """ - Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. + Detects a suitable C compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found """ - try: test_for_compiler = conf.options.check_c_compiler or default_compilers() - except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')") + try: + test_for_compiler = conf.options.check_c_compiler or default_compilers() + except AttributeError: + conf.fatal("Add options(opt): opt.load('compiler_c')") for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() @@ -76,19 +81,21 @@ def configure(conf): except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - debug('compiler_c: %r' % e) + debug('compiler_c: %r', e) else: - if conf.env['CC']: + if conf.env.CC: conf.end_msg(conf.env.get_flat('CC')) - conf.env['COMPILER_CC'] = compiler + conf.env.COMPILER_CC = compiler + conf.env.commit() break + conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a C compiler!') def options(opt): """ - Restrict the compiler detection from the command-line:: + This is how to provide compiler preferences on the command-line:: $ waf configure --check-c-compiler=gcc """ diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py index 4b15b9e3a6..42658c5847 100644 --- a/waflib/Tools/compiler_cxx.py +++ b/waflib/Tools/compiler_cxx.py @@ -31,24 +31,25 @@ def build(bld): """ -import os, sys, imp, types, re +import re from waflib.Tools import ccroot -from waflib import Utils, Configure +from waflib import Utils from waflib.Logs import debug cxx_compiler = { -'win32': ['msvc', 'g++', 'clang++'], -'cygwin': ['g++'], -'darwin': ['clang++', 'g++'], -'aix': ['xlc++', 'g++', 'clang++'], -'linux': ['g++', 'clang++', 'icpc'], -'sunos': ['sunc++', 'g++'], -'irix': ['g++'], -'hpux': ['g++'], -'osf1V': ['g++'], -'gnu': ['g++', 'clang++'], -'java': ['g++', 'msvc', 'clang++', 'icpc'], -'default': ['g++', 'clang++'] +'win32': ['msvc', 'g++', 'clang++'], +'cygwin': ['g++', 'clang++'], +'darwin': ['clang++', 'g++'], +'aix': ['xlc++', 'g++', 'clang++'], +'linux': ['g++', 'clang++', 'icpc'], +'sunos': ['sunc++', 'g++'], +'irix': ['g++'], +'hpux': ['g++'], +'osf1V': ['g++'], +'gnu': ['g++', 'clang++'], +'java': ['g++', 'msvc', 'clang++', 'icpc'], +'gnukfreebsd': ['g++', 'clang++'], +'default': ['clang++', 'g++'] } """ Dict mapping the platform names to Waf tools finding specific C++ compilers:: @@ -64,10 +65,14 @@ def default_compilers(): def configure(conf): """ - Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. + Detects a suitable C++ compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found """ - try: test_for_compiler = conf.options.check_cxx_compiler or default_compilers() - except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')") + try: + test_for_compiler = conf.options.check_cxx_compiler or default_compilers() + except AttributeError: + conf.fatal("Add options(opt): opt.load('compiler_cxx')") for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() @@ -77,19 +82,21 @@ def configure(conf): except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - debug('compiler_cxx: %r' % e) + debug('compiler_cxx: %r', e) else: - if conf.env['CXX']: + if conf.env.CXX: conf.end_msg(conf.env.get_flat('CXX')) - conf.env['COMPILER_CXX'] = compiler + conf.env.COMPILER_CXX = compiler + conf.env.commit() break + conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a C++ compiler!') def options(opt): """ - Restrict the compiler detection from the command-line:: + This is how to provide compiler preferences on the command-line:: $ waf configure --check-cxx-compiler=gxx """ diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py index 8b07c7da89..43bb1f646a 100644 --- a/waflib/Tools/compiler_d.py +++ b/waflib/Tools/compiler_d.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2016-2018 (ita) """ Try to detect a D compiler from the list of supported compilers:: @@ -20,8 +20,8 @@ def build(bld): * ldc2 """ -import os, sys, imp, types, re -from waflib import Utils, Configure, Options, Logs +import re +from waflib import Utils, Logs d_compiler = { 'default' : ['gdc', 'dmd', 'ldc2'] @@ -40,10 +40,14 @@ def default_compilers(): def configure(conf): """ - Try to find a suitable D compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. + Detects a suitable D compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found """ - try: test_for_compiler = conf.options.check_d_compiler or default_compilers() - except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_d')") + try: + test_for_compiler = conf.options.check_d_compiler or default_compilers() + except AttributeError: + conf.fatal("Add options(opt): opt.load('compiler_d')") for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() @@ -53,19 +57,21 @@ def configure(conf): except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - Logs.debug('compiler_d: %r' % e) + Logs.debug('compiler_d: %r', e) else: if conf.env.D: conf.end_msg(conf.env.get_flat('D')) - conf.env['COMPILER_D'] = compiler + conf.env.COMPILER_D = compiler + conf.env.commit() break + conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a D compiler!') def options(opt): """ - Restrict the compiler detection from the command-line:: + This is how to provide compiler preferences on the command-line:: $ waf configure --check-d-compiler=dmd """ diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py index 5643f5e02a..96b58e706f 100644 --- a/waflib/Tools/compiler_fc.py +++ b/waflib/Tools/compiler_fc.py @@ -1,8 +1,8 @@ #!/usr/bin/env python # encoding: utf-8 -import os, sys, imp, types, re -from waflib import Utils, Configure, Options, Logs, Errors +import re +from waflib import Utils, Logs from waflib.Tools import fc fc_compiler = { @@ -27,10 +27,14 @@ def default_compilers(): def configure(conf): """ - Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`. + Detects a suitable Fortran compiler + + :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found """ - try: test_for_compiler = conf.options.check_fortran_compiler or default_compilers() - except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')") + try: + test_for_compiler = conf.options.check_fortran_compiler or default_compilers() + except AttributeError: + conf.fatal("Add options(opt): opt.load('compiler_fc')") for compiler in re.split('[ ,]+', test_for_compiler): conf.env.stash() conf.start_msg('Checking for %r (Fortran compiler)' % compiler) @@ -39,19 +43,21 @@ def configure(conf): except conf.errors.ConfigurationError as e: conf.env.revert() conf.end_msg(False) - Logs.debug('compiler_fortran: %r' % e) + Logs.debug('compiler_fortran: %r', e) else: - if conf.env['FC']: + if conf.env.FC: conf.end_msg(conf.env.get_flat('FC')) conf.env.COMPILER_FORTRAN = compiler + conf.env.commit() break + conf.env.revert() conf.end_msg(False) else: conf.fatal('could not configure a Fortran compiler!') def options(opt): """ - Restrict the compiler detection from the command-line:: + This is how to provide compiler preferences on the command-line:: $ waf configure --check-fortran-compiler=ifort """ diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py index 59917e5aff..aecca6da13 100644 --- a/waflib/Tools/cs.py +++ b/waflib/Tools/cs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ C# support. A simple example:: @@ -21,11 +21,10 @@ def configure(conf): bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support') """ -from waflib import Utils, Task, Options, Logs, Errors +from waflib import Utils, Task, Options, Errors from waflib.TaskGen import before_method, after_method, feature from waflib.Tools import ccroot from waflib.Configure import conf -import os, tempfile ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) ccroot.lib_patterns['csshlib'] = ['%s'] @@ -55,7 +54,7 @@ def apply_cs(self): if inst_to: # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) - self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod) + self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod) @feature('cs') @after_method('apply_cs') @@ -81,7 +80,7 @@ def build(bld): if not tsk: self.bld.fatal('cs task has no link task for use %r' % self) self.cs_task.dep_nodes.extend(tsk.outputs) # dependency - self.cs_task.set_run_after(tsk) # order (redundant, the order is infered from the nodes inputs/outputs) + self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs) self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath()) @feature('cs') @@ -104,10 +103,10 @@ def build(bld): else: out = node.change_ext('.pdb') self.cs_task.outputs.append(out) - try: - self.install_task.source.append(out) - except AttributeError: - pass + + if getattr(self, 'install_task', None): + self.pdb_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=out) if csdebug == 'pdbonly': val = ['/debug+', '/debug:pdbonly'] @@ -117,6 +116,29 @@ def build(bld): val = ['/debug-'] self.env.append_value('CSFLAGS', val) +@feature('cs') +@after_method('debug_cs') +def doc_cs(self): + """ + The C# targets may create .xml documentation files:: + + def build(bld): + bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True) + # csdoc is a boolean value + """ + csdoc = getattr(self, 'csdoc', self.env.CSDOC) + if not csdoc: + return + + node = self.cs_task.outputs[0] + out = node.change_ext('.xml') + self.cs_task.outputs.append(out) + + if getattr(self, 'install_task', None): + self.doc_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=out) + + self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath()) class mcs(Task.Task): """ @@ -125,47 +147,16 @@ class mcs(Task.Task): color = 'YELLOW' run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' - def exec_command(self, cmd, **kw): - bld = self.generator.bld - - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir - - try: - tmp = None - if isinstance(cmd, list) and len(' '.join(cmd)) >= 8192: - program = cmd[0] #unquoted program name, otherwise exec_command will fail - cmd = [self.quote_response_command(x) for x in cmd] - (fd, tmp) = tempfile.mkstemp() - os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode()) - os.close(fd) - cmd = [program, '@' + tmp] - # no return here, that's on purpose - ret = self.generator.bld.exec_command(cmd, **kw) - finally: - if tmp: - try: - os.remove(tmp) - except OSError: - pass # anti-virus and indexers can keep the files open -_- - return ret - - def quote_response_command(self, flag): - # /noconfig is not allowed when using response files - if flag.lower() == '/noconfig': - return '' - - if flag.find(' ') > -1: - for x in ('/r:', '/reference:', '/resource:', '/lib:', '/out:'): - if flag.startswith(x): - flag = '%s"%s"' % (x, '","'.join(flag[len(x):].split(','))) - break + def split_argfile(self, cmd): + inline = [cmd[0]] + infile = [] + for x in cmd[1:]: + # csc doesn't want /noconfig in @file + if x.lower() == '/noconfig': + inline.append(x) else: - flag = '"%s"' % flag - return flag + infile.append(self.quote_flag(x)) + return (inline, infile) def configure(conf): """ @@ -198,8 +189,6 @@ class fake_csshlib(Task.Task): inst_to = None def runnable_status(self): - for x in self.outputs: - x.sig = Utils.h_file(x.abspath()) return Task.SKIP_ME @conf diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py index 9e1777d46e..705ec74d06 100644 --- a/waflib/Tools/cxx.py +++ b/waflib/Tools/cxx.py @@ -1,40 +1,40 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) "Base for c++ programs and libraries" -from waflib import TaskGen, Task, Utils +from waflib import TaskGen, Task from waflib.Tools import c_preproc from waflib.Tools.ccroot import link_task, stlink_task @TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') def cxx_hook(self, node): - "Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance" + "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances" return self.create_compiled_task('cxx', node) if not '.c' in TaskGen.task_gen.mappings: TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] class cxx(Task.Task): - "Compile C++ files into object files" - run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}' + "Compiles C++ files into object files" + run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].relpath()} ${CPPFLAGS}' vars = ['CXXDEPS'] # unused variable to depend on, just in case ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] scan = c_preproc.scan class cxxprogram(link_task): - "Link object files into a c++ program" - run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + "Links object files into c++ programs" + run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].relpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' vars = ['LINKDEPS'] ext_out = ['.bin'] inst_to = '${BINDIR}' class cxxshlib(cxxprogram): - "Link object files into a c++ shared library" + "Links object files into c++ shared libraries" inst_to = '${LIBDIR}' class cxxstlib(stlink_task): - "Link object files into a c++ static library" + "Links object files into c++ static libraries" pass # do not remove diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py index 14a3114bd9..e4cf73bb4a 100644 --- a/waflib/Tools/d.py +++ b/waflib/Tools/d.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2007-2010 (ita) +# Thomas Nagy, 2007-2018 (ita) from waflib import Utils, Task, Errors from waflib.TaskGen import taskgen_method, feature, extension @@ -57,7 +57,7 @@ def create_compiled_task(self, name, node): if getattr(self, 'generate_headers', None): tsk = create_compiled_task(self, 'd_with_header', node) - tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) + tsk.outputs.append(node.change_ext(self.env.DHEADER_ext)) else: tsk = create_compiled_task(self, 'd', node) return tsk diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py index c181d64e53..6637556524 100644 --- a/waflib/Tools/d_config.py +++ b/waflib/Tools/d_config.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2016-2018 (ita) from waflib import Utils from waflib.Configure import conf @@ -8,24 +8,24 @@ @conf def d_platform_flags(self): """ - Set the extensions dll/so for d programs and libraries + Sets the extensions dll/so for d programs and libraries """ v = self.env if not v.DEST_OS: v.DEST_OS = Utils.unversioned_sys_platform() binfmt = Utils.destos_to_binfmt(self.env.DEST_OS) if binfmt == 'pe': - v['dprogram_PATTERN'] = '%s.exe' - v['dshlib_PATTERN'] = 'lib%s.dll' - v['dstlib_PATTERN'] = 'lib%s.a' + v.dprogram_PATTERN = '%s.exe' + v.dshlib_PATTERN = 'lib%s.dll' + v.dstlib_PATTERN = 'lib%s.a' elif binfmt == 'mac-o': - v['dprogram_PATTERN'] = '%s' - v['dshlib_PATTERN'] = 'lib%s.dylib' - v['dstlib_PATTERN'] = 'lib%s.a' + v.dprogram_PATTERN = '%s' + v.dshlib_PATTERN = 'lib%s.dylib' + v.dstlib_PATTERN = 'lib%s.a' else: - v['dprogram_PATTERN'] = '%s' - v['dshlib_PATTERN'] = 'lib%s.so' - v['dstlib_PATTERN'] = 'lib%s.a' + v.dprogram_PATTERN = '%s' + v.dshlib_PATTERN = 'lib%s.so' + v.dstlib_PATTERN = 'lib%s.a' DLIB = ''' version(D_Version2) { @@ -55,7 +55,8 @@ def d_platform_flags(self): @conf def check_dlibrary(self, execute=True): """ - Detect the kind of standard library that comes with the compiler, will set conf.env.DLIBRARY to tango, phobos1 or phobos2. + Detects the kind of standard library that comes with the compiler, + and sets conf.env.DLIBRARY to tango, phobos1 or phobos2 """ ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True) if execute: diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py index dc4049e88a..4e807a6b9f 100644 --- a/waflib/Tools/d_scan.py +++ b/waflib/Tools/d_scan.py @@ -1,13 +1,13 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010 (ita) +# Thomas Nagy, 2016-2018 (ita) """ Provide a scanner for finding dependencies on d files """ import re -from waflib import Utils, Logs +from waflib import Utils def filter_comments(filename): """ @@ -29,7 +29,8 @@ def filter_comments(filename): i += 1 while i < max: c = txt[i] - if c == delim: break + if c == delim: + break elif c == '\\': # skip the character following backslash i += 1 i += 1 @@ -38,7 +39,8 @@ def filter_comments(filename): elif c == '/': # try to replace a comment with whitespace buf.append(txt[begin:i]) i += 1 - if i == max: break + if i == max: + break c = txt[i] if c == '+': # eat nesting /+ +/ comment i += 1 @@ -52,7 +54,8 @@ def filter_comments(filename): c = None elif prev == '+' and c == '/': nesting -= 1 - if nesting == 0: break + if nesting == 0: + break c = None i += 1 elif c == '*': # eat /* */ comment @@ -61,7 +64,8 @@ def filter_comments(filename): while i < max: prev = c c = txt[i] - if prev == '*' and c == '/': break + if prev == '*' and c == '/': + break i += 1 elif c == '/': # eat // comment i += 1 @@ -89,8 +93,8 @@ def __init__(self, env, incpaths): self.allnames = [] - self.re_module = re.compile("module\s+([^;]+)") - self.re_import = re.compile("import\s+([^;]+)") + self.re_module = re.compile(r"module\s+([^;]+)") + self.re_import = re.compile(r"import\s+([^;]+)") self.re_import_bindings = re.compile("([^:]+):(.*)") self.re_import_alias = re.compile("[^=]+=(.+)") @@ -134,7 +138,7 @@ def get_strings(self, code): mod_name = self.re_module.search(code) if mod_name: - self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces + self.module = re.sub(r'\s+', '', mod_name.group(1)) # strip all whitespaces # go through the code, have a look at all import occurrences @@ -142,7 +146,7 @@ def get_strings(self, code): import_iterator = self.re_import.finditer(code) if import_iterator: for import_match in import_iterator: - import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces + import_match_str = re.sub(r'\s+', '', import_match.group(1)) # strip all whitespaces # does this end with an import bindings declaration? # (import bindings always terminate the list of imports) @@ -188,7 +192,8 @@ def iter(self, node): names = self.get_strings(code) # obtain the import strings for x in names: # optimization - if x in self.allnames: continue + if x in self.allnames: + continue self.allnames.append(x) # for each name, see if it is like a node or not @@ -202,8 +207,5 @@ def scan(self): gruik.start(node) nodes = gruik.nodes names = gruik.names - - if Logs.verbose: - Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names)) return (nodes, names) diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py index b460633076..d520f1c0f2 100644 --- a/waflib/Tools/dbus.py +++ b/waflib/Tools/dbus.py @@ -3,7 +3,7 @@ # Ali Sabil, 2007 """ -Compile dbus files with **dbus-binding-tool** +Compiles dbus files with **dbus-binding-tool** Typical usage:: @@ -25,7 +25,7 @@ def build(bld): @taskgen_method def add_dbus_file(self, filename, prefix, mode): """ - Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. + Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. :param filename: xml file to compile :type filename: string @@ -40,10 +40,10 @@ def add_dbus_file(self, filename, prefix, mode): self.meths.append('process_dbus') self.dbus_lst.append([filename, prefix, mode]) -@before_method('apply_core') +@before_method('process_source') def process_dbus(self): """ - Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances. + Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances. """ for filename, prefix, mode in getattr(self, 'dbus_lst', []): node = self.path.find_resource(filename) @@ -55,7 +55,7 @@ def process_dbus(self): class dbus_binding_tool(Task.Task): """ - Compile a dbus file + Compiles a dbus file """ color = 'BLUE' ext_out = ['.h'] @@ -64,7 +64,7 @@ class dbus_binding_tool(Task.Task): def configure(conf): """ - Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL* + Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL`` """ - dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') + conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py index 64bc43689d..8917ca1b3f 100644 --- a/waflib/Tools/dmd.py +++ b/waflib/Tools/dmd.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2008-2010 (ita) +# Thomas Nagy, 2008-2018 (ita) import sys from waflib.Tools import ar, d @@ -10,7 +10,7 @@ @conf def find_dmd(conf): """ - Find the program *dmd*, *dmd2*, or *ldc* and set the variable *D* + Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D* """ conf.find_program(['dmd', 'dmd2', 'ldc'], var='D') @@ -24,48 +24,40 @@ def find_dmd(conf): @conf def common_flags_ldc(conf): """ - Set the D flags required by *ldc* + Sets the D flags required by *ldc* """ v = conf.env - v['DFLAGS'] = ['-d-version=Posix'] - v['LINKFLAGS'] = [] - v['DFLAGS_dshlib'] = ['-relocation-model=pic'] + v.DFLAGS = ['-d-version=Posix'] + v.LINKFLAGS = [] + v.DFLAGS_dshlib = ['-relocation-model=pic'] @conf def common_flags_dmd(conf): """ Set the flags required by *dmd* or *dmd2* """ - v = conf.env - # _DFLAGS _DIMPORTFLAGS - - # Compiler is dmd so 'gdc' part will be ignored, just - # ensure key is there, so wscript can append flags to it - #v['DFLAGS'] = ['-version=Posix'] - - v['D_SRC_F'] = ['-c'] - v['D_TGT_F'] = '-of%s' + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-of%s' - # linker - v['D_LINKER'] = v['D'] - v['DLNK_SRC_F'] = '' - v['DLNK_TGT_F'] = '-of%s' - v['DINC_ST'] = '-I%s' + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-of%s' + v.DINC_ST = '-I%s' - v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' - v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s' - v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s' + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' - v['LINKFLAGS_dprogram']= ['-quiet'] + v.LINKFLAGS_dprogram= ['-quiet'] - v['DFLAGS_dshlib'] = ['-fPIC'] - v['LINKFLAGS_dshlib'] = ['-L-shared'] + v.DFLAGS_dshlib = ['-fPIC'] + v.LINKFLAGS_dshlib = ['-L-shared'] - v['DHEADER_ext'] = '.di' + v.DHEADER_ext = '.di' v.DFLAGS_d_with_header = ['-H', '-Hf'] - v['D_HDR_F'] = '%s' + v.D_HDR_F = '%s' def configure(conf): """ @@ -75,7 +67,7 @@ def configure(conf): if sys.platform == 'win32': out = conf.cmd_and_log(conf.env.D + ['--help']) - if out.find("D Compiler v2.") > -1: + if out.find('D Compiler v2.') > -1: conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') conf.load('ar') diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py index 9ac0cafe6c..de8d75a42b 100644 --- a/waflib/Tools/errcheck.py +++ b/waflib/Tools/errcheck.py @@ -3,9 +3,9 @@ # Thomas Nagy, 2011 (ita) """ -errcheck: highlight common mistakes +Common mistakes highlighting. -There is a performance hit, so this tool is only loaded when running "waf -v" +There is a performance impact, so this tool is only loaded when running ``waf -v`` """ typos = { @@ -18,12 +18,14 @@ 'importpath':'includes', 'installpath':'install_path', 'iscopy':'is_copy', +'uses':'use', } meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects'] +import sys from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils -import waflib.Tools.ccroot +from waflib.Tools import ccroot def check_same_targets(self): mp = Utils.defaultdict(list) @@ -32,6 +34,8 @@ def check_same_targets(self): def check_task(tsk): if not isinstance(tsk, Task.Task): return + if hasattr(tsk, 'no_errcheck_out'): + return for node in tsk.outputs: mp[node].append(tsk) @@ -57,44 +61,51 @@ def check_task(tsk): Logs.error(msg) for x in v: if Logs.verbose > 1: - Logs.error(' %d. %r' % (1 + v.index(x), x.generator)) + Logs.error(' %d. %r', 1 + v.index(x), x.generator) else: - Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))) + Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)) + Logs.error('If you think that this is an error, set no_errcheck_out on the task instance') if not dupe: for (k, v) in uids.items(): if len(v) > 1: - Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') + Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') + tg_details = tsk.generator.name + if Logs.verbose > 2: + tg_details = tsk.generator for tsk in v: - Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator)) + Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details) def check_invalid_constraints(self): - feat = set([]) + feat = set() for x in list(TaskGen.feats.values()): feat.union(set(x)) for (x, y) in TaskGen.task_gen.prec.items(): feat.add(x) feat.union(set(y)) - ext = set([]) + ext = set() for x in TaskGen.task_gen.mappings.values(): ext.add(x.__name__) invalid = ext & feat if invalid: - Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method' % list(invalid)) + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid)) # the build scripts have been read, so we can check for invalid after/before attributes on task classes for cls in list(Task.classes.values()): + if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str): + raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)' % (cls, cls.hcode)) + for x in ('before', 'after'): for y in Utils.to_list(getattr(cls, x, [])): - if not Task.classes.get(y, None): - Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__)) + if not Task.classes.get(y): + Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__) if getattr(cls, 'rule', None): - Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__) + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__) def replace(m): """ - We could add properties, but they would not work in some cases: - bld.program(...) requires 'source' in the attributes + Replaces existing BuildContext methods to verify parameter names, + for example ``bld(source=)`` has no ending *s* """ oldcall = getattr(Build.BuildContext, m) def call(self, *k, **kw): @@ -103,14 +114,13 @@ def call(self, *k, **kw): if x in kw: if x == 'iscopy' and 'subst' in getattr(self, 'features', ''): continue - err = True - Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret)) + Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret) return ret setattr(Build.BuildContext, m, call) def enhance_lib(): """ - modify existing classes and methods + Modifies existing classes and methods to enable error verification """ for m in meths_typos: replace(m) @@ -118,26 +128,36 @@ def enhance_lib(): # catch '..' in ant_glob patterns def ant_glob(self, *k, **kw): if k: - lst=Utils.to_list(k[0]) + lst = Utils.to_list(k[0]) for pat in lst: - if '..' in pat.split('/'): - Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0]) - if kw.get('remove', True): - try: - if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False): - Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self) - except AttributeError: - pass + sp = pat.split('/') + if '..' in sp: + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0]) + if '.' in sp: + Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0]) return self.old_ant_glob(*k, **kw) Node.Node.old_ant_glob = Node.Node.ant_glob Node.Node.ant_glob = ant_glob + # catch ant_glob on build folders + def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): + if remove: + try: + if self.is_child_of(self.ctx.bldnode) and not quiet: + quiet = True + Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self) + except AttributeError: + pass + return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet) + Node.Node.old_ant_iter = Node.Node.ant_iter + Node.Node.ant_iter = ant_iter + # catch conflicting ext_in/ext_out/before/after declarations old = Task.is_before def is_before(t1, t2): ret = old(t1, t2) if ret and old(t2, t1): - Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2)) + Logs.error('Contradictory order constraints in classes %r %r', t1, t2) return ret Task.is_before = is_before @@ -149,7 +169,7 @@ def check_err_features(self): Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') for x in ('c', 'cxx', 'd', 'fc'): if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]: - Logs.error('%r features is probably missing %r' % (self, x)) + Logs.error('%r features is probably missing %r', self, x) TaskGen.feature('*')(check_err_features) # check for erroneous order constraints @@ -157,12 +177,12 @@ def check_err_order(self): if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features): for x in ('before', 'after', 'ext_in', 'ext_out'): if hasattr(self, x): - Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self)) + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self) else: for x in ('before', 'after'): for y in self.to_list(getattr(self, x, [])): - if not Task.classes.get(y, None): - Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self)) + if not Task.classes.get(y): + Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self) TaskGen.feature('*')(check_err_order) # check for @extension used with @feature/@before_method/@after_method @@ -197,24 +217,21 @@ def use_rec(self, name, **kw): TaskGen.task_gen.use_rec = use_rec # check for env.append - def getattri(self, name, default=None): + def _getattr(self, name, default=None): if name == 'append' or name == 'add': raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') elif name == 'prepend': raise Errors.WafError('env.prepend does not exist: use env.prepend_value') if name in self.__slots__: - return object.__getattr__(self, name, default) + return super(ConfigSet.ConfigSet, self).__getattr__(name, default) else: return self[name] - ConfigSet.ConfigSet.__getattr__ = getattri + ConfigSet.ConfigSet.__getattr__ = _getattr def options(opt): """ - Add a few methods + Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options """ enhance_lib() -def configure(conf): - pass - diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py index 829b335a2a..7fbd76d365 100644 --- a/waflib/Tools/fc.py +++ b/waflib/Tools/fc.py @@ -1,48 +1,58 @@ #! /usr/bin/env python # encoding: utf-8 # DC 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) """ -fortran support +Fortran support """ -import re - -from waflib import Utils, Task, TaskGen, Logs +from waflib import Utils, Task, Errors from waflib.Tools import ccroot, fc_config, fc_scan -from waflib.TaskGen import feature, before_method, after_method, extension +from waflib.TaskGen import extension from waflib.Configure import conf -ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES']) -ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) +ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS']) +ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS']) +ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS']) ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS']) -@feature('fcprogram', 'fcshlib', 'fcstlib', 'fcprogram_test') -def dummy(self): - pass - -@extension('.f', '.f90', '.F', '.F90', '.for', '.FOR') +@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08') def fc_hook(self, node): - "Bind the typical Fortran file extensions to the creation of a :py:class:`waflib.Tools.fc.fc` instance" + "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances" return self.create_compiled_task('fc', node) @conf def modfile(conf, name): """ - Turn a module name into the right module file name. + Turns a module name into the right module file name. Defaults to all lower case. """ - return {'lower' :name.lower() + '.mod', - 'lower.MOD' :name.upper() + '.MOD', - 'UPPER.mod' :name.upper() + '.mod', - 'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower'] + if name.find(':') >= 0: + # Depending on a submodule! + separator = conf.env.FC_SUBMOD_SEPARATOR or '@' + # Ancestors of the submodule will be prefixed to the + # submodule name, separated by a colon. + modpath = name.split(':') + # Only the ancestor (actual) module and the submodule name + # will be used for the filename. + modname = modpath[0] + separator + modpath[-1] + suffix = conf.env.FC_SUBMOD_SUFFIX or '.smod' + else: + modname = name + suffix = '.mod' + + return {'lower' :modname.lower() + suffix.lower(), + 'lower.MOD' :modname.lower() + suffix.upper(), + 'UPPER.mod' :modname.upper() + suffix.lower(), + 'UPPER' :modname.upper() + suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or 'lower'] def get_fortran_tasks(tsk): """ - Obtain all other fortran tasks from the same build group. Those tasks must not have + Obtains all fortran tasks from the same build group. Those tasks must not have the attribute 'nomod' or 'mod_fortran_done' + + :return: a list of :py:class:`waflib.Tools.fc.fc` instances """ bld = tsk.generator.bld tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator)) @@ -50,27 +60,24 @@ def get_fortran_tasks(tsk): class fc(Task.Task): """ - The fortran tasks can only run when all fortran tasks in the current group are ready to be executed - This may cause a deadlock if another fortran task is waiting for something that cannot happen (circular dependency) - in this case, set the 'nomod=True' on those tasks instances to break the loop + Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed + This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency) + Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop """ - color = 'GREEN' - run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' + run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}' vars = ["FORTRANMODPATHFLAG"] def scan(self): - """scanner for fortran dependencies""" + """Fortran dependency scanner""" tmp = fc_scan.fortran_parser(self.generator.includes_nodes) tmp.task = self tmp.start(self.inputs[0]) - if Logs.verbose: - Logs.debug('deps: deps for %r: %r; unresolved %r' % (self.inputs, tmp.nodes, tmp.names)) return (tmp.nodes, tmp.names) def runnable_status(self): """ - Set the mod file outputs and the dependencies on the mod files over all the fortran tasks + Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks executed by the main thread so there are no concurrency issues """ if getattr(self, 'mod_fortran_done', None): @@ -92,12 +99,11 @@ def runnable_status(self): ret = tsk.runnable_status() if ret == Task.ASK_LATER: # we have to wait for one of the other fortran tasks to be ready - # this may deadlock if there are dependencies between the fortran tasks + # this may deadlock if there are dependencies between fortran tasks # but this should not happen (we are setting them here!) for x in lst: x.mod_fortran_done = None - # TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end return Task.ASK_LATER ins = Utils.defaultdict(set) @@ -110,10 +116,8 @@ def runnable_status(self): if x.startswith('MOD@'): name = bld.modfile(x.replace('MOD@', '')) node = bld.srcnode.find_or_declare(name) - if not hasattr(node, 'sig'): - node.sig = Utils.SIG_NIL tsk.set_outputs(node) - outs[id(node)].add(tsk) + outs[node].add(tsk) # the .mod files to use for tsk in lst: @@ -125,12 +129,14 @@ def runnable_status(self): if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) - ins[id(node)].add(tsk) + ins[node].add(tsk) # if the intersection matches, set the order for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) + for x in outs[k]: + self.generator.bld.producer.revdeps[x].add(a) # the scanner cannot output nodes, so we have to set them # ourselves as task.dep_nodes (additional input nodes) @@ -150,17 +156,21 @@ def runnable_status(self): return super(fc, self).runnable_status() class fcprogram(ccroot.link_task): - """Link fortran programs""" + """Links Fortran programs""" color = 'YELLOW' - run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}' + run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' inst_to = '${BINDIR}' class fcshlib(fcprogram): - """Link fortran libraries""" + """Links Fortran libraries""" inst_to = '${LIBDIR}' +class fcstlib(ccroot.stlink_task): + """Links Fortran static libraries (uses ar by default)""" + pass # do not remove the pass statement + class fcprogram_test(fcprogram): - """Custom link task to obtain the compiler outputs for fortran configuration tests""" + """Custom link task to obtain compiler outputs for Fortran configuration tests""" def runnable_status(self): """This task is always executed""" @@ -170,12 +180,12 @@ def runnable_status(self): return ret def exec_command(self, cmd, **kw): - """Store the compiler std our/err onto the build context, to bld.out + bld.err""" + """Stores the compiler std our/err onto the build context, to bld.out + bld.err""" bld = self.generator.bld kw['shell'] = isinstance(cmd, str) kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE - kw['cwd'] = bld.variant_dir + kw['cwd'] = self.get_cwd() bld.out = bld.err = '' bld.to_log('command: %s\n' % cmd) @@ -183,15 +193,11 @@ def exec_command(self, cmd, **kw): kw['output'] = 0 try: (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw) - except Exception as e: + except Errors.WafError: return -1 if bld.out: - bld.to_log("out: %s\n" % bld.out) + bld.to_log('out: %s\n' % bld.out) if bld.err: - bld.to_log("err: %s\n" % bld.err) - -class fcstlib(ccroot.stlink_task): - """Link fortran static libraries (uses ar by default)""" - pass # do not remove the pass statement + bld.to_log('err: %s\n' % bld.err) diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py index d8c9ac9339..dc5e5c9e9a 100644 --- a/waflib/Tools/fc_config.py +++ b/waflib/Tools/fc_config.py @@ -1,7 +1,7 @@ #! /usr/bin/env python # encoding: utf-8 # DC 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) """ Fortran configuration helpers @@ -10,7 +10,6 @@ import re, os, sys, shlex from waflib.Configure import conf from waflib.TaskGen import feature, before_method -from waflib import Utils FC_FRAGMENT = ' program main\n end program main\n' FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these? @@ -18,45 +17,51 @@ @conf def fc_flags(conf): """ - Define common fortran configuration flags and file extensions + Defines common fortran configuration flags and file extensions """ v = conf.env - v['FC_SRC_F'] = [] - v['FC_TGT_F'] = ['-c', '-o'] - v['FCINCPATH_ST'] = '-I%s' - v['FCDEFINES_ST'] = '-D%s' + v.FC_SRC_F = [] + v.FC_TGT_F = ['-c', '-o'] + v.FCINCPATH_ST = '-I%s' + v.FCDEFINES_ST = '-D%s' - if not v['LINK_FC']: v['LINK_FC'] = v['FC'] - v['FCLNK_SRC_F'] = [] - v['FCLNK_TGT_F'] = ['-o'] + if not v.LINK_FC: + v.LINK_FC = v.FC - v['FCFLAGS_fcshlib'] = ['-fpic'] - v['LINKFLAGS_fcshlib'] = ['-shared'] - v['fcshlib_PATTERN'] = 'lib%s.so' + v.FCLNK_SRC_F = [] + v.FCLNK_TGT_F = ['-o'] - v['fcstlib_PATTERN'] = 'lib%s.a' + v.FCFLAGS_fcshlib = ['-fpic'] + v.LINKFLAGS_fcshlib = ['-shared'] + v.fcshlib_PATTERN = 'lib%s.so' - v['FCLIB_ST'] = '-l%s' - v['FCLIBPATH_ST'] = '-L%s' - v['FCSTLIB_ST'] = '-l%s' - v['FCSTLIBPATH_ST'] = '-L%s' - v['FCSTLIB_MARKER'] = '-Wl,-Bstatic' - v['FCSHLIB_MARKER'] = '-Wl,-Bdynamic' + v.fcstlib_PATTERN = 'lib%s.a' - v['SONAME_ST'] = '-Wl,-h,%s' + v.FCLIB_ST = '-l%s' + v.FCLIBPATH_ST = '-L%s' + v.FCSTLIB_ST = '-l%s' + v.FCSTLIBPATH_ST = '-L%s' + v.FCSTLIB_MARKER = '-Wl,-Bstatic' + v.FCSHLIB_MARKER = '-Wl,-Bdynamic' + + v.SONAME_ST = '-Wl,-h,%s' @conf def fc_add_flags(conf): """ - FCFLAGS? + Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env """ - conf.add_os_flags('FCFLAGS') - conf.add_os_flags('LDFLAGS', 'LINKFLAGS') + conf.add_os_flags('FCPPFLAGS', dup=False) + conf.add_os_flags('FCFLAGS', dup=False) + conf.add_os_flags('LINKFLAGS', dup=False) + conf.add_os_flags('LDFLAGS', dup=False) @conf def check_fortran(self, *k, **kw): - """See if the fortran compiler works by compiling a simple fortran program""" + """ + Compiles a Fortran program to ensure that the settings are correct + """ self.check_cc( fragment = FC_FRAGMENT, compile_filename = 'test.f', @@ -66,8 +71,8 @@ def check_fortran(self, *k, **kw): @conf def check_fc(self, *k, **kw): """ - Same as :py:func:`waflib.Tools.c_config.check` but default to the *Fortran* programming language - (Overriding the C defaults in :py:func:`waflib.Tools.c_config.validate_c` here) + Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language + (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`) """ kw['compiler'] = 'fc' if not 'compile_mode' in kw: @@ -88,35 +93,34 @@ def check_fc(self, *k, **kw): @conf def fortran_modifier_darwin(conf): """ - Define fortran flags and extensions for the OSX systems + Defines Fortran flags and extensions for OSX systems """ v = conf.env - v['FCFLAGS_fcshlib'] = ['-fPIC'] - v['LINKFLAGS_fcshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] - v['fcshlib_PATTERN'] = 'lib%s.dylib' - v['FRAMEWORKPATH_ST'] = '-F%s' - v['FRAMEWORK_ST'] = '-framework %s' + v.FCFLAGS_fcshlib = ['-fPIC'] + v.LINKFLAGS_fcshlib = ['-dynamiclib'] + v.fcshlib_PATTERN = 'lib%s.dylib' + v.FRAMEWORKPATH_ST = '-F%s' + v.FRAMEWORK_ST = ['-framework'] - v['LINKFLAGS_fcstlib'] = [] - - v['FCSHLIB_MARKER'] = '' - v['FCSTLIB_MARKER'] = '' - v['SONAME_ST'] = '' + v.LINKFLAGS_fcstlib = [] + v.FCSHLIB_MARKER = '' + v.FCSTLIB_MARKER = '' + v.SONAME_ST = '' @conf def fortran_modifier_win32(conf): - """Define fortran flags for the windows platforms""" + """ + Defines Fortran flags for Windows platforms + """ v = conf.env - v['fcprogram_PATTERN'] = v['fcprogram_test_PATTERN'] = '%s.exe' - - v['fcshlib_PATTERN'] = '%s.dll' - v['implib_PATTERN'] = 'lib%s.dll.a' - v['IMPLIB_ST'] = '-Wl,--out-implib,%s' + v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe' - v['FCFLAGS_fcshlib'] = [] + v.fcshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.dll.a' + v.IMPLIB_ST = '-Wl,--out-implib,%s' - v.append_value('FCFLAGS_fcshlib', ['-DDLL_EXPORT']) # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea + v.FCFLAGS_fcshlib = [] # Auto-import is enabled by default even without this option, # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages @@ -125,27 +129,23 @@ def fortran_modifier_win32(conf): @conf def fortran_modifier_cygwin(conf): - """Define fortran flags for use on cygwin""" + """ + Defines Fortran flags for use on cygwin + """ fortran_modifier_win32(conf) v = conf.env - v['fcshlib_PATTERN'] = 'cyg%s.dll' + v.fcshlib_PATTERN = 'cyg%s.dll' v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base']) - v['FCFLAGS_fcshlib'] = [] + v.FCFLAGS_fcshlib = [] + # ------------------------------------------------------------------------ @conf def check_fortran_dummy_main(self, *k, **kw): """ - Guess if a main function is needed by compiling a code snippet with - the C compiler and link with the Fortran compiler - - TODO: (DC) - - handling dialects (F77, F90, etc... -> needs core support first) - - fix dummy main check (AC_FC_DUMMY_MAIN vs AC_FC_MAIN) - - TODO: what does the above mean? (ita) + Determines if a main function is needed by compiling a code snippet with + the C compiler and linking it with the Fortran compiler (useful on unix-like systems) """ - if not self.env.CC: self.fatal('A c compiler is required for check_fortran_dummy_main') @@ -178,12 +178,12 @@ def check_fortran_dummy_main(self, *k, **kw): # ------------------------------------------------------------------------ GCC_DRIVER_LINE = re.compile('^Driving:') -POSIX_STATIC_EXT = re.compile('\S+\.a') -POSIX_LIB_FLAGS = re.compile('-l\S+') +POSIX_STATIC_EXT = re.compile(r'\S+\.a') +POSIX_LIB_FLAGS = re.compile(r'-l\S+') @conf def is_link_verbose(self, txt): - """Return True if 'useful' link options can be found in txt""" + """Returns True if 'useful' link options can be found in txt""" assert isinstance(txt, str) for line in txt.splitlines(): if not GCC_DRIVER_LINE.search(line): @@ -194,7 +194,7 @@ def is_link_verbose(self, txt): @conf def check_fortran_verbose_flag(self, *k, **kw): """ - Check what kind of verbose (-v) flag works, then set it to env.FC_VERBOSE_FLAG + Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG """ self.start_msg('fortran link verbose flag') for x in ('-v', '--verbose', '-verbose', '-V'): @@ -204,8 +204,7 @@ def check_fortran_verbose_flag(self, *k, **kw): fragment = FC_FRAGMENT2, compile_filename = 'test.f', linkflags = [x], - mandatory=True - ) + mandatory=True) except self.errors.ConfigurationError: pass else: @@ -231,7 +230,7 @@ def check_fortran_verbose_flag(self, *k, **kw): RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED] def _match_ignore(line): - """Returns True if the line should be ignored (fortran test for verbosity).""" + """Returns True if the line should be ignored (Fortran verbose flag test)""" for i in RLINKFLAGS_IGNORED: if i.match(line): return True @@ -240,7 +239,6 @@ def _match_ignore(line): def parse_fortran_link(lines): """Given the output of verbose link of Fortran compiler, this returns a list of flags necessary for linking using the standard linker.""" - # TODO: On windows ? final_flags = [] for line in lines: if not GCC_DRIVER_LINE.match(line): @@ -283,7 +281,7 @@ def _parse_flink_token(lexer, token, tmp_flags): elif POSIX_LIB_FLAGS.match(token): tmp_flags.append(token) else: - # ignore anything not explicitely taken into account + # ignore anything not explicitly taken into account pass t = lexer.get_token() @@ -305,7 +303,7 @@ def _parse_flink_line(line, final_flags): @conf def check_fortran_clib(self, autoadd=True, *k, **kw): """ - Obtain the flags for linking with the C library + Obtains the flags for linking with the C library if this check works, add uselib='CLIB' to your task generators """ if not self.env.FC_VERBOSE_FLAG: @@ -333,24 +331,30 @@ def check_fortran_clib(self, autoadd=True, *k, **kw): def getoutput(conf, cmd, stdin=False): """ - TODO a bit redundant, can be removed anytime + Obtains Fortran command outputs """ - if stdin: - stdin = Utils.subprocess.PIPE + from waflib import Errors + if conf.env.env: + env = conf.env.env else: - stdin = None - env = conf.env.env or None + env = dict(os.environ) + env['LANG'] = 'C' + input = stdin and '\n'.encode() or None try: - p = Utils.subprocess.Popen(cmd, stdin=stdin, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env) - if stdin: - p.stdin.write('\n'.encode()) - out, err = p.communicate() + out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input) + except Errors.WafError as e: + # An WafError might indicate an error code during the command + # execution, in this case we still obtain the stderr and stdout, + # which we can use to find the version string. + if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')): + raise e + else: + # Ignore the return code and return the original + # stdout and stderr. + out = e.stdout + err = e.stderr except Exception: conf.fatal('could not determine the compiler version %r' % cmd) - if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') - if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') return (out, err) # ------------------------------------------------------------------------ @@ -422,13 +426,12 @@ def check_fortran_mangling(self, *k, **kw): for (u, du, c) in mangling_schemes(): try: self.check_cc( - compile_filename = [], - features = 'link_main_routines_func', - msg = 'nomsg', - errmsg = 'nomsg', - mandatory=True, - dummy_func_nounder = mangle_name(u, du, c, "foobar"), - dummy_func_under = mangle_name(u, du, c, "foo_bar"), + compile_filename = [], + features = 'link_main_routines_func', + msg = 'nomsg', + errmsg = 'nomsg', + dummy_func_nounder = mangle_name(u, du, c, 'foobar'), + dummy_func_under = mangle_name(u, du, c, 'foo_bar'), main_func_name = self.env.FC_MAIN ) except self.errors.ConfigurationError: @@ -440,25 +443,27 @@ def check_fortran_mangling(self, *k, **kw): else: self.end_msg(False) self.fatal('mangler not found') - return (u, du, c) @feature('pyext') @before_method('propagate_uselib_vars', 'apply_link') def set_lib_pat(self): - """Set the fortran flags for linking with the python library""" - self.env['fcshlib_PATTERN'] = self.env['pyext_PATTERN'] + """Sets the Fortran flags for linking with Python""" + self.env.fcshlib_PATTERN = self.env.pyext_PATTERN @conf def detect_openmp(self): + """ + Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS`` + """ for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): try: self.check_fc( - msg='Checking for OpenMP flag %s' % x, - fragment='program main\n call omp_get_num_threads()\nend program main', - fcflags=x, - linkflags=x, - uselib_store='OPENMP' + msg = 'Checking for OpenMP flag %s' % x, + fragment = 'program main\n call omp_get_num_threads()\nend program main', + fcflags = x, + linkflags = x, + uselib_store = 'OPENMP' ) except self.errors.ConfigurationError: pass @@ -467,3 +472,17 @@ def detect_openmp(self): else: self.fatal('Could not find OpenMP') +@conf +def check_gfortran_o_space(self): + if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4: + # This is for old compilers and only for gfortran. + # No idea how other implementations handle this. Be safe and bail out. + return + self.env.stash() + self.env.FCLNK_TGT_F = ['-o', ''] + try: + self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib') + except self.errors.ConfigurationError: + self.env.revert() + else: + self.env.commit() diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py index 6d04401735..0824c92b7e 100644 --- a/waflib/Tools/fc_scan.py +++ b/waflib/Tools/fc_scan.py @@ -1,31 +1,28 @@ #! /usr/bin/env python # encoding: utf-8 # DC 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) import re -from waflib import Utils, Task, TaskGen, Logs -from waflib.TaskGen import feature, before_method, after_method, extension -from waflib.Configure import conf - -INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" -USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +INC_REGEX = r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX = r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX = r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)""" +SMD_REGEX = r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)""" re_inc = re.compile(INC_REGEX, re.I) re_use = re.compile(USE_REGEX, re.I) re_mod = re.compile(MOD_REGEX, re.I) +re_smd = re.compile(SMD_REGEX, re.I) class fortran_parser(object): """ - This parser will return: + This parser returns: - * the nodes corresponding to the module names that will be produced + * the nodes corresponding to the module names to produce * the nodes corresponding to the include files used - * the module names used by the fortran file + * the module names used by the fortran files """ - def __init__(self, incpaths): self.seen = [] """Files already parsed""" @@ -41,7 +38,7 @@ def __init__(self, incpaths): def find_deps(self, node): """ - Parse a fortran file to read the dependencies used and provided + Parses a Fortran file to obtain the dependencies used/provided :param node: fortran file to read :type node: :py:class:`waflib.Node.Node` @@ -63,11 +60,15 @@ def find_deps(self, node): m = re_mod.search(line) if m: mods.append(m.group(1)) + m = re_smd.search(line) + if m: + uses.append(m.group(1)) + mods.append('{0}:{1}'.format(m.group(1),m.group(2))) return (incs, uses, mods) def start(self, node): """ - Start the parsing. Use the stack self.waiting to hold the nodes to iterate on + Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on :param node: fortran file :type node: :py:class:`waflib.Node.Node` @@ -79,10 +80,9 @@ def start(self, node): def iter(self, node): """ - Process a single file in the search for dependencies, extract the files used - the modules used, and the modules provided. + Processes a single file during dependency parsing. Extracts files used + modules used and modules provided. """ - path = node.abspath() incs, uses, mods = self.find_deps(node) for x in incs: if x in self.seen: @@ -102,7 +102,7 @@ def iter(self, node): def tryfind_header(self, filename): """ - Try to find an include and add it the nodes to process + Adds an include file to the list of nodes to process :param filename: file name :type filename: string @@ -118,4 +118,3 @@ def tryfind_header(self, filename): if not filename in self.names: self.names.append(filename) - diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py index 4bdc208eca..2256657b6a 100644 --- a/waflib/Tools/flex.py +++ b/waflib/Tools/flex.py @@ -1,14 +1,16 @@ #!/usr/bin/env python # encoding: utf-8 # John O'Meara, 2006 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ The **flex** program is a code generator which creates C or C++ files. The generated files are compiled into object files. """ -import waflib.TaskGen, os, re +import os, re +from waflib import Task, TaskGen +from waflib.Tools import ccroot def decide_ext(self, node): if 'cxx' in self.features: @@ -20,12 +22,13 @@ def flexfun(tsk): bld = tsk.generator.bld wd = bld.variant_dir def to_list(xx): - if isinstance(xx, str): return [xx] + if isinstance(xx, str): + return [xx] return xx tsk.last_cmd = lst = [] - lst.extend(to_list(env['FLEX'])) - lst.extend(to_list(env['FLEXFLAGS'])) - inputs = [a.path_from(bld.bldnode) for a in tsk.inputs] + lst.extend(to_list(env.FLEX)) + lst.extend(to_list(env.FLEXFLAGS)) + inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs] if env.FLEX_MSYS: inputs = [x.replace(os.sep, '/') for x in inputs] lst.extend(inputs) @@ -33,13 +36,19 @@ def to_list(xx): txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 -waflib.TaskGen.declare_chain( +TaskGen.declare_chain( name = 'flex', rule = flexfun, # issue #854 ext_in = '.l', decider = decide_ext, ) +# To support the following: +# bld(features='c', flexflags='-P/foo') +Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX'] +ccroot.USELIB_VARS['c'].add('FLEXFLAGS') +ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS') + def configure(conf): """ Detect the *flex* program diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py index 027be39e47..f69ba4f367 100644 --- a/waflib/Tools/g95.py +++ b/waflib/Tools/g95.py @@ -1,7 +1,7 @@ #! /usr/bin/env python # encoding: utf-8 # KWS 2010 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) import re from waflib import Utils @@ -17,9 +17,9 @@ def find_g95(conf): @conf def g95_flags(conf): v = conf.env - v['FCFLAGS_fcshlib'] = ['-fPIC'] - v['FORTRANMODFLAG'] = ['-fmod=', ''] # template for module path - v['FCFLAGS_DEBUG'] = ['-Werror'] # why not + v.FCFLAGS_fcshlib = ['-fPIC'] + v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path + v.FCFLAGS_DEBUG = ['-Werror'] # why not @conf def g95_modifier_win32(conf): @@ -35,7 +35,7 @@ def g95_modifier_darwin(conf): @conf def g95_modifier_platform(conf): - dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None) if g95_modifier_func: g95_modifier_func() @@ -54,7 +54,7 @@ def get_g95_version(conf, fc): if not match: conf.fatal('cannot determine g95 version') k = match.groupdict() - conf.env['FC_VERSION'] = (k['major'], k['minor']) + conf.env.FC_VERSION = (k['major'], k['minor']) def configure(conf): conf.find_g95() diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py index c97b373fe8..4a8745afd7 100644 --- a/waflib/Tools/gas.py +++ b/waflib/Tools/gas.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2008-2010 (ita) +# Thomas Nagy, 2008-2018 (ita) "Detect as/gas/gcc for compiling assembly files" @@ -16,3 +16,4 @@ def configure(conf): conf.env.ASLNK_TGT_F = ['-o'] conf.find_ar() conf.load('asm') + conf.env.ASM_NAME = 'gas' diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py index 0de826a220..2bda5f5242 100644 --- a/waflib/Tools/gcc.py +++ b/waflib/Tools/gcc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) # Yinon Ehrlich, 2009 @@ -8,8 +8,6 @@ gcc/llvm detection. """ -import os, sys -from waflib import Configure, Options, Utils from waflib.Tools import ccroot, ar from waflib.Configure import conf @@ -29,101 +27,101 @@ def gcc_common_flags(conf): """ v = conf.env - v['CC_SRC_F'] = [] - v['CC_TGT_F'] = ['-c', '-o'] + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o'] - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = [] - v['CCLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + if not v.LINK_CC: + v.LINK_CC = v.CC - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' - v['RPATH_ST'] = '-Wl,-rpath,%s' + v.CCLNK_SRC_F = [] + v.CCLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['SONAME_ST'] = '-Wl,-h,%s' - v['SHLIB_MARKER'] = '-Wl,-Bdynamic' - v['STLIB_MARKER'] = '-Wl,-Bstatic' + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' - # program - v['cprogram_PATTERN'] = '%s' + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Wl,-Bdynamic' + v.STLIB_MARKER = '-Wl,-Bstatic' - # shared librar - v['CFLAGS_cshlib'] = ['-fPIC'] - v['LINKFLAGS_cshlib'] = ['-shared'] - v['cshlib_PATTERN'] = 'lib%s.so' + v.cprogram_PATTERN = '%s' - # static lib - v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic'] - v['cstlib_PATTERN'] = 'lib%s.a' + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-shared'] + v.cshlib_PATTERN = 'lib%s.so' - # osx stuff - v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] - v['CFLAGS_MACBUNDLE'] = ['-fPIC'] - v['macbundle_PATTERN'] = '%s.bundle' + v.LINKFLAGS_cstlib = ['-Wl,-Bstatic'] + v.cstlib_PATTERN = 'lib%s.a' + + v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] + v.CFLAGS_MACBUNDLE = ['-fPIC'] + v.macbundle_PATTERN = '%s.bundle' @conf def gcc_modifier_win32(conf): """Configuration flags for executing gcc on Windows""" v = conf.env - v['cprogram_PATTERN'] = '%s.exe' + v.cprogram_PATTERN = '%s.exe' - v['cshlib_PATTERN'] = '%s.dll' - v['implib_PATTERN'] = 'lib%s.dll.a' - v['IMPLIB_ST'] = '-Wl,--out-implib,%s' + v.cshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.dll.a' + v.IMPLIB_ST = '-Wl,--out-implib,%s' - v['CFLAGS_cshlib'] = [] + v.CFLAGS_cshlib = [] # Auto-import is enabled by default even without this option, # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages # that the linker emits otherwise. - v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) + import sys + if sys.platform != "cygwin": + # disabled on cygwin as it breaks build with arm cross compiler + v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) @conf def gcc_modifier_cygwin(conf): """Configuration flags for executing gcc on Cygwin""" gcc_modifier_win32(conf) v = conf.env - v['cshlib_PATTERN'] = 'cyg%s.dll' + v.cshlib_PATTERN = 'cyg%s.dll' v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base']) - v['CFLAGS_cshlib'] = [] + v.CFLAGS_cshlib = [] @conf def gcc_modifier_darwin(conf): """Configuration flags for executing gcc on MacOS""" v = conf.env - v['CFLAGS_cshlib'] = ['-fPIC'] - v['LINKFLAGS_cshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] - v['cshlib_PATTERN'] = 'lib%s.dylib' - v['FRAMEWORKPATH_ST'] = '-F%s' - v['FRAMEWORK_ST'] = ['-framework'] - v['ARCH_ST'] = ['-arch'] + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-dynamiclib'] + v.cshlib_PATTERN = 'lib%s.dylib' + v.FRAMEWORKPATH_ST = '-F%s' + v.FRAMEWORK_ST = ['-framework'] + v.ARCH_ST = ['-arch'] - v['LINKFLAGS_cstlib'] = [] + v.LINKFLAGS_cstlib = [] - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['SONAME_ST'] = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] @conf def gcc_modifier_aix(conf): """Configuration flags for executing gcc on AIX""" v = conf.env - v['LINKFLAGS_cprogram'] = ['-Wl,-brtl'] - v['LINKFLAGS_cshlib'] = ['-shared','-Wl,-brtl,-bexpfull'] - v['SHLIB_MARKER'] = [] + v.LINKFLAGS_cprogram = ['-Wl,-brtl'] + v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull'] + v.SHLIB_MARKER = [] @conf def gcc_modifier_hpux(conf): v = conf.env - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['CFLAGS_cshlib'] = ['-fPIC','-DPIC'] - v['cshlib_PATTERN'] = 'lib%s.sl' + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.CFLAGS_cshlib = ['-fPIC','-DPIC'] + v.cshlib_PATTERN = 'lib%s.sl' @conf def gcc_modifier_openbsd(conf): @@ -131,9 +129,10 @@ def gcc_modifier_openbsd(conf): @conf def gcc_modifier_osf1V(conf): - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['SONAME_ST'] = [] + v = conf.env + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] @conf def gcc_modifier_platform(conf): @@ -156,5 +155,5 @@ def configure(conf): conf.cc_load_tools() conf.cc_add_flags() conf.link_add_flags() - + conf.check_gcc_o_space() diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py index 61c1cce3e5..d89a66d306 100644 --- a/waflib/Tools/gdc.py +++ b/waflib/Tools/gdc.py @@ -2,14 +2,13 @@ # encoding: utf-8 # Carlos Rafael Giani, 2007 (dv) -import sys from waflib.Tools import ar, d from waflib.Configure import conf @conf def find_gdc(conf): """ - Find the program gdc and set the variable *D* + Finds the program gdc and set the variable *D* """ conf.find_program('gdc', var='D') @@ -20,33 +19,29 @@ def find_gdc(conf): @conf def common_flags_gdc(conf): """ - Set the flags required by *gdc* + Sets the flags required by *gdc* """ v = conf.env - # _DFLAGS _DIMPORTFLAGS + v.DFLAGS = [] - # for mory info about the meaning of this dict see dmd.py - v['DFLAGS'] = [] + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-o%s' - v['D_SRC_F'] = ['-c'] - v['D_TGT_F'] = '-o%s' + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-o%s' + v.DINC_ST = '-I%s' - # linker - v['D_LINKER'] = v['D'] - v['DLNK_SRC_F'] = '' - v['DLNK_TGT_F'] = '-o%s' - v['DINC_ST'] = '-I%s' + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s' - v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' - v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-l%s' - v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L%s' + v.LINKFLAGS_dshlib = ['-shared'] - v['LINKFLAGS_dshlib'] = ['-shared'] - - v['DHEADER_ext'] = '.di' + v.DHEADER_ext = '.di' v.DFLAGS_d_with_header = '-fintfc' - v['D_HDR_F'] = '-fintfc-file=%s' + v.D_HDR_F = '-fintfc-file=%s' def configure(conf): """ diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py index 06d039c254..1050667315 100644 --- a/waflib/Tools/gfortran.py +++ b/waflib/Tools/gfortran.py @@ -1,7 +1,7 @@ #! /usr/bin/env python # encoding: utf-8 # DC 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) import re from waflib import Utils @@ -19,9 +19,9 @@ def find_gfortran(conf): @conf def gfortran_flags(conf): v = conf.env - v['FCFLAGS_fcshlib'] = ['-fPIC'] - v['FORTRANMODFLAG'] = ['-J', ''] # template for module path - v['FCFLAGS_DEBUG'] = ['-Werror'] # why not + v.FCFLAGS_fcshlib = ['-fPIC'] + v.FORTRANMODFLAG = ['-J', ''] # template for module path + v.FCFLAGS_DEBUG = ['-Werror'] # why not @conf def gfortran_modifier_win32(conf): @@ -37,7 +37,7 @@ def gfortran_modifier_darwin(conf): @conf def gfortran_modifier_platform(conf): - dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None) if gfortran_modifier_func: gfortran_modifier_func() @@ -50,8 +50,10 @@ def get_gfortran_version(conf, fc): version_re = re.compile(r"GNU\s*Fortran", re.I).search cmd = fc + ['--version'] out, err = fc_config.getoutput(conf, cmd, stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not determine the compiler type') @@ -79,7 +81,7 @@ def isD(var): def isT(var): return var in k and k[var] != '0' - conf.env['FC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) + conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) def configure(conf): conf.find_gfortran() @@ -88,3 +90,4 @@ def configure(conf): conf.fc_add_flags() conf.gfortran_flags() conf.gfortran_modifier_platform() + conf.check_gfortran_o_space() diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py index 15dcf7cce0..1589a61c87 100644 --- a/waflib/Tools/glib2.py +++ b/waflib/Tools/glib2.py @@ -1,6 +1,6 @@ #! /usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ Support for GLib2 tools: @@ -12,8 +12,9 @@ """ import os +import functools from waflib import Context, Task, Utils, Options, Errors, Logs -from waflib.TaskGen import taskgen_method, before_method, after_method, feature, extension +from waflib.TaskGen import taskgen_method, before_method, feature, extension from waflib.Configure import conf ################## marshal files @@ -21,7 +22,7 @@ @taskgen_method def add_marshal_file(self, filename, prefix): """ - Add a file to the list of marshal files to process. Store them in the attribute *marshal_list*. + Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*. :param filename: xml file to compile :type filename: string @@ -36,8 +37,8 @@ def add_marshal_file(self, filename, prefix): @before_method('process_source') def process_marshal(self): """ - Process the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances. - Add the c file created to the list of source to process. + Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances. + Adds the c file created to the list of source to process. """ for f, prefix in getattr(self, 'marshal_list', []): node = self.path.find_resource(f) @@ -54,10 +55,11 @@ def process_marshal(self): self.source.append(c_node) class glib_genmarshal(Task.Task): - + vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'] + color = 'BLUE' + ext_out = ['.h'] def run(self): - - bld = self.inputs[0].__class__.ctx + bld = self.generator.bld get = self.env.get_flat cmd1 = "%s %s --prefix=%s --header > %s" % ( @@ -68,7 +70,8 @@ def run(self): ) ret = bld.exec_command(cmd1) - if ret: return ret + if ret: + return ret #print self.outputs[1].abspath() c = '''#include "%s"\n''' % self.outputs[0].name @@ -82,16 +85,12 @@ def run(self): ) return bld.exec_command(cmd2) - vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'] - color = 'BLUE' - ext_out = ['.h'] - ########################## glib-mkenums @taskgen_method def add_enums_from_template(self, source='', target='', template='', comments=''): """ - Add a file to the list of enum files to process. Store them in the attribute *enums_list*. + Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. :param source: enum file to process :type source: string @@ -122,7 +121,7 @@ def add_enums(self, source='', target='', file_head='', file_prod='', file_tail='', enum_prod='', value_head='', value_prod='', value_tail='', comments=''): """ - Add a file to the list of enum files to process. Store them in the attribute *enums_list*. + Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. :param source: enum file to process :type source: string @@ -156,7 +155,7 @@ def add_enums(self, source='', target='', @before_method('process_source') def process_enums(self): """ - Process the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. + Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. """ for enum in getattr(self, 'enums_list', []): task = self.create_task('glib_mkenums') @@ -170,7 +169,7 @@ def process_enums(self): raise Errors.WafError('missing source ' + str(enum)) source_list = [self.path.find_resource(k) for k in source_list] inputs += source_list - env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list] + env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] # find the target if not enum['target']: @@ -178,7 +177,7 @@ def process_enums(self): tgt_node = self.path.find_or_declare(enum['target']) if tgt_node.name.endswith('.c'): self.source.append(tgt_node) - env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath() + env.GLIB_MKENUMS_TARGET = tgt_node.abspath() options = [] @@ -199,7 +198,7 @@ def process_enums(self): if enum[param]: options.append('%s %r' % (option, enum[param])) - env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options) + env.GLIB_MKENUMS_OPTIONS = ' '.join(options) # update the task instance task.set_inputs(inputs) @@ -207,7 +206,7 @@ def process_enums(self): class glib_mkenums(Task.Task): """ - Process enum files + Processes enum files """ run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' color = 'PINK' @@ -218,7 +217,7 @@ class glib_mkenums(Task.Task): @taskgen_method def add_settings_schemas(self, filename_list): """ - Add settings files to process, add them to *settings_schema_files* + Adds settings files to process to *settings_schema_files* :param filename_list: files :type filename_list: list of string @@ -234,7 +233,7 @@ def add_settings_schemas(self, filename_list): @taskgen_method def add_settings_enums(self, namespace, filename_list): """ - This function may be called only once by task generator to set the enums namespace. + Called only once by task generator to set the enums namespace. :param namespace: namespace :type namespace: string @@ -242,17 +241,17 @@ def add_settings_enums(self, namespace, filename_list): :type filename_list: file list """ if hasattr(self, 'settings_enum_namespace'): - raise Errors.WafError("Tried to add gsettings enums to '%s' more than once" % self.name) + raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name) self.settings_enum_namespace = namespace - if type(filename_list) != 'list': + if not isinstance(filename_list, list): filename_list = [filename_list] self.settings_enum_files = filename_list @feature('glib2') def process_settings(self): """ - Process the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The + Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks. """ @@ -260,7 +259,7 @@ def process_settings(self): install_files = [] settings_schema_files = getattr(self, 'settings_schema_files', []) - if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: + if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS: raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") # 1. process gsettings_enum_files (generate .enums.xml) @@ -271,18 +270,18 @@ def process_settings(self): source_list = self.settings_enum_files source_list = [self.path.find_resource(k) for k in source_list] enums_task.set_inputs(source_list) - enums_task.env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list] + enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] target = self.settings_enum_namespace + '.enums.xml' tgt_node = self.path.find_or_declare(target) enums_task.set_outputs(tgt_node) - enums_task.env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath() + enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath() enums_tgt_node = [tgt_node] - install_files.append (tgt_node) + install_files.append(tgt_node) options = '--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" ' % (self.settings_enum_namespace) - enums_task.env['GLIB_MKENUMS_OPTIONS'] = options + enums_task.env.GLIB_MKENUMS_OPTIONS = options # 2. process gsettings_schema_files (validate .gschema.xml files) # @@ -291,38 +290,47 @@ def process_settings(self): schema_node = self.path.find_resource(schema) if not schema_node: - raise Errors.WafError("Cannot find the schema file '%s'" % schema) + raise Errors.WafError("Cannot find the schema file %r" % schema) install_files.append(schema_node) source_list = enums_tgt_node + [schema_node] schema_task.set_inputs (source_list) - schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS'] = [("--schema-file=" + k.abspath()) for k in source_list] + schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list] target_node = schema_node.change_ext('.xml.valid') schema_task.set_outputs (target_node) - schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT'] = target_node.abspath() + schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath() # 3. schemas install task def compile_schemas_callback(bld): - if not bld.is_install: return - Logs.pprint ('YELLOW','Updating GSettings schema cache') - command = Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}", bld.env) - ret = self.bld.exec_command(command) + if not bld.is_install: + return + compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS) + destdir = Options.options.destdir + paths = bld._compile_schemas_registered + if destdir: + paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths) + for path in paths: + Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path) + if self.bld.exec_command(compile_schemas + [path]): + Logs.warn('Could not update GSettings schema cache %r' % path) if self.bld.is_install: - if not self.env['GSETTINGSSCHEMADIR']: + schemadir = self.env.GSETTINGSSCHEMADIR + if not schemadir: raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') if install_files: - self.bld.install_files (self.env['GSETTINGSSCHEMADIR'], install_files) - - if not hasattr(self.bld, '_compile_schemas_registered'): - self.bld.add_post_fun (compile_schemas_callback) - self.bld._compile_schemas_registered = True + self.add_install_files(install_to=schemadir, install_from=install_files) + registered_schemas = getattr(self.bld, '_compile_schemas_registered', None) + if not registered_schemas: + registered_schemas = self.bld._compile_schemas_registered = set() + self.bld.add_post_fun(compile_schemas_callback) + registered_schemas.add(schemadir) class glib_validate_schema(Task.Task): """ - Validate schema files + Validates schema files """ run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' color = 'PINK' @@ -332,9 +340,9 @@ class glib_validate_schema(Task.Task): @extension('.gresource.xml') def process_gresource_source(self, node): """ - Hook to process .gresource.xml to generate C source files + Creates tasks that turn ``.gresource.xml`` files to C code """ - if not self.env['GLIB_COMPILE_RESOURCES']: + if not self.env.GLIB_COMPILE_RESOURCES: raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure") if 'gresource' in self.features: @@ -348,7 +356,7 @@ def process_gresource_source(self, node): @feature('gresource') def process_gresource_bundle(self): """ - Generate a binary .gresource files from .gresource.xml files:: + Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files:: def build(bld): bld( @@ -368,26 +376,22 @@ def build(bld): task = self.create_task('glib_gresource_bundle', node, node.change_ext('')) inst_to = getattr(self, 'install_path', None) if inst_to: - self.bld.install_files(inst_to, task.outputs) + self.add_install_files(install_to=inst_to, install_from=task.outputs) class glib_gresource_base(Task.Task): """ - Base class for gresource based tasks, it implements the implicit dependencies scan. + Base class for gresource based tasks """ color = 'BLUE' base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' def scan(self): """ - Scan gresource dependencies through ``glib-compile-resources --generate-dependencies command`` + Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command`` """ bld = self.generator.bld kw = {} - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir + kw['cwd'] = self.get_cwd() kw['quiet'] = Context.BOTH cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % ( @@ -435,9 +439,7 @@ def find_glib_genmarshal(conf): @conf def find_glib_mkenums(conf): - if not conf.env.PERL: - conf.find_program('perl', var='PERL') - conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS') + conf.find_program('glib-mkenums', var='GLIB_MKENUMS') @conf def find_glib_compile_schemas(conf): @@ -448,16 +450,15 @@ def find_glib_compile_schemas(conf): def getstr(varname): return getattr(Options.options, varname, getattr(conf.env,varname, '')) - # TODO make this dependent on the gnu_dirs tool? gsettingsschemadir = getstr('GSETTINGSSCHEMADIR') if not gsettingsschemadir: datadir = getstr('DATADIR') if not datadir: - prefix = conf.env['PREFIX'] + prefix = conf.env.PREFIX datadir = os.path.join(prefix, 'share') gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas') - conf.env['GSETTINGSSCHEMADIR'] = gsettingsschemadir + conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir @conf def find_glib_compile_resources(conf): @@ -465,14 +466,12 @@ def find_glib_compile_resources(conf): def configure(conf): """ - Find the following programs: + Finds the following programs: * *glib-genmarshal* and set *GLIB_GENMARSHAL* * *glib-mkenums* and set *GLIB_MKENUMS* * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory) * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory) - - And set the variable *GSETTINGSSCHEMADIR* """ conf.find_glib_genmarshal() conf.find_glib_mkenums() @@ -481,8 +480,7 @@ def configure(conf): def options(opt): """ - Add the ``--gsettingsschemadir`` command-line option + Adds the ``--gsettingsschemadir`` command-line option """ gr = opt.add_option_group('Installation directories') gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR') - diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py index 987c4bcfc9..2847071d17 100644 --- a/waflib/Tools/gnu_dirs.py +++ b/waflib/Tools/gnu_dirs.py @@ -71,7 +71,7 @@ def configure(conf): """ - Read the command-line options to set lots of variables in *conf.env*. The variables + Reads the command-line options to set lots of variables in *conf.env*. The variables BINDIR and LIBDIR will be overwritten. """ def get_param(varname, default): @@ -101,7 +101,7 @@ def get_param(varname, default): def options(opt): """ - Add lots of command-line options, for example:: + Adds lots of command-line options, for example:: --exec-prefix: EXEC_PREFIX """ diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py index b6f692cb0a..2667258197 100644 --- a/waflib/Tools/gxx.py +++ b/waflib/Tools/gxx.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) # Yinon Ehrlich, 2009 @@ -8,15 +8,13 @@ g++/llvm detection. """ -import os, sys -from waflib import Configure, Options, Utils from waflib.Tools import ccroot, ar from waflib.Configure import conf @conf def find_gxx(conf): """ - Find the program g++, and if present, try to detect its version number + Finds the program g++, and if present, try to detect its version number """ cxx = conf.find_program(['g++', 'c++'], var='CXX') conf.get_cc_version(cxx, gcc=True) @@ -29,102 +27,102 @@ def gxx_common_flags(conf): """ v = conf.env - v['CXX_SRC_F'] = [] - v['CXX_TGT_F'] = ['-c', '-o'] + v.CXX_SRC_F = [] + v.CXX_TGT_F = ['-c', '-o'] - # linker - if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] - v['CXXLNK_SRC_F'] = [] - v['CXXLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + if not v.LINK_CXX: + v.LINK_CXX = v.CXX - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' - v['RPATH_ST'] = '-Wl,-rpath,%s' + v.CXXLNK_SRC_F = [] + v.CXXLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['SONAME_ST'] = '-Wl,-h,%s' - v['SHLIB_MARKER'] = '-Wl,-Bdynamic' - v['STLIB_MARKER'] = '-Wl,-Bstatic' + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' - # program - v['cxxprogram_PATTERN'] = '%s' + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Wl,-Bdynamic' + v.STLIB_MARKER = '-Wl,-Bstatic' - # shared library - v['CXXFLAGS_cxxshlib'] = ['-fPIC'] - v['LINKFLAGS_cxxshlib'] = ['-shared'] - v['cxxshlib_PATTERN'] = 'lib%s.so' + v.cxxprogram_PATTERN = '%s' - # static lib - v['LINKFLAGS_cxxstlib'] = ['-Wl,-Bstatic'] - v['cxxstlib_PATTERN'] = 'lib%s.a' + v.CXXFLAGS_cxxshlib = ['-fPIC'] + v.LINKFLAGS_cxxshlib = ['-shared'] + v.cxxshlib_PATTERN = 'lib%s.so' - # osx stuff - v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup'] - v['CXXFLAGS_MACBUNDLE'] = ['-fPIC'] - v['macbundle_PATTERN'] = '%s.bundle' + v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic'] + v.cxxstlib_PATTERN = 'lib%s.a' + + v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] + v.CXXFLAGS_MACBUNDLE = ['-fPIC'] + v.macbundle_PATTERN = '%s.bundle' @conf def gxx_modifier_win32(conf): """Configuration flags for executing gcc on Windows""" v = conf.env - v['cxxprogram_PATTERN'] = '%s.exe' + v.cxxprogram_PATTERN = '%s.exe' - v['cxxshlib_PATTERN'] = '%s.dll' - v['implib_PATTERN'] = 'lib%s.dll.a' - v['IMPLIB_ST'] = '-Wl,--out-implib,%s' + v.cxxshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.dll.a' + v.IMPLIB_ST = '-Wl,--out-implib,%s' - v['CXXFLAGS_cxxshlib'] = [] + v.CXXFLAGS_cxxshlib = [] # Auto-import is enabled by default even without this option, # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages # that the linker emits otherwise. - v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) + import sys + if sys.platform != "cygwin": + # disabled on cygwin as it breaks build with arm cross compiler + v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) @conf def gxx_modifier_cygwin(conf): """Configuration flags for executing g++ on Cygwin""" gxx_modifier_win32(conf) v = conf.env - v['cxxshlib_PATTERN'] = 'cyg%s.dll' + v.cxxshlib_PATTERN = 'cyg%s.dll' v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base']) - v['CXXFLAGS_cxxshlib'] = [] + v.CXXFLAGS_cxxshlib = [] @conf def gxx_modifier_darwin(conf): """Configuration flags for executing g++ on MacOS""" v = conf.env - v['CXXFLAGS_cxxshlib'] = ['-fPIC'] - v['LINKFLAGS_cxxshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1'] - v['cxxshlib_PATTERN'] = 'lib%s.dylib' - v['FRAMEWORKPATH_ST'] = '-F%s' - v['FRAMEWORK_ST'] = ['-framework'] - v['ARCH_ST'] = ['-arch'] + v.CXXFLAGS_cxxshlib = ['-fPIC'] + v.LINKFLAGS_cxxshlib = ['-dynamiclib'] + v.cxxshlib_PATTERN = 'lib%s.dylib' + v.FRAMEWORKPATH_ST = '-F%s' + v.FRAMEWORK_ST = ['-framework'] + v.ARCH_ST = ['-arch'] - v['LINKFLAGS_cxxstlib'] = [] + v.LINKFLAGS_cxxstlib = [] - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['SONAME_ST'] = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] @conf def gxx_modifier_aix(conf): """Configuration flags for executing g++ on AIX""" v = conf.env - v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl'] + v.LINKFLAGS_cxxprogram= ['-Wl,-brtl'] - v['LINKFLAGS_cxxshlib'] = ['-shared', '-Wl,-brtl,-bexpfull'] - v['SHLIB_MARKER'] = [] + v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull'] + v.SHLIB_MARKER = [] @conf def gxx_modifier_hpux(conf): v = conf.env - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['CFLAGS_cxxshlib'] = ['-fPIC','-DPIC'] - v['cxxshlib_PATTERN'] = 'lib%s.sl' + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.CFLAGS_cxxshlib = ['-fPIC','-DPIC'] + v.cxxshlib_PATTERN = 'lib%s.sl' @conf def gxx_modifier_openbsd(conf): @@ -132,9 +130,10 @@ def gxx_modifier_openbsd(conf): @conf def gcc_modifier_osf1V(conf): - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] - v['SONAME_ST'] = [] + v = conf.env + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] + v.SONAME_ST = [] @conf def gxx_modifier_platform(conf): @@ -157,4 +156,5 @@ def configure(conf): conf.cxx_load_tools() conf.cxx_add_flags() conf.link_add_flags() + conf.check_gcc_o_space('cxx') diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py index c50d4995c9..1b41ffcf70 100644 --- a/waflib/Tools/icc.py +++ b/waflib/Tools/icc.py @@ -1,33 +1,56 @@ #!/usr/bin/env python # encoding: utf-8 # Stian Selnes 2008 -# Thomas Nagy 2009-2010 (ita) +# Thomas Nagy 2009-2018 (ita) """ -Detect the Intel C compiler +Detects the Intel C compiler """ -import os, sys +from waflib import Utils from waflib.Tools import ccroot, ar, gcc from waflib.Configure import conf +from waflib.Tools import msvc @conf def find_icc(conf): """ - Find the program icc and execute it to ensure it really is icc + Finds the program icc and execute it to ensure it really is icc """ - if sys.platform == 'cygwin': - conf.fatal('The Intel compiler does not work on Cygwin') + if Utils.is_win32: + conf.find_program(['icx-cl'], var='ICXCL', mandatory=False) + if conf.env.ICXCL: + conf.env.INTEL_CLANG_COMPILER = True + conf.env.CC = conf.env.ICXCL + + if not conf.env.ICXCL: + cc = conf.find_program(['icx', 'icc', 'ICL'], var='CC') + conf.get_cc_version(cc, icc=True) - cc = conf.find_program(['icc', 'ICL'], var='CC') - conf.get_cc_version(cc, icc=True) conf.env.CC_NAME = 'icc' def configure(conf): conf.find_icc() - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() + if conf.env.ICXCL and Utils.is_win32: + conf.find_msvc() + conf.find_program('MT', var='MT') + conf.env.MTFLAGS = ['/nologo'] + conf.env.MSVC_MANIFEST = True + + conf.msvc_common_flags() + + conf.env.CFLAGS = [] + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + + conf.visual_studio_add_flags() + conf.env.CC_TGT_F = ['/FC', '/c', '/Fo'] + conf.env.CPPPATH_ST = '/I%s' + else: + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py index ec1320edd2..04ec692177 100644 --- a/waflib/Tools/icpc.py +++ b/waflib/Tools/icpc.py @@ -1,33 +1,55 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy 2009-2010 (ita) +# Thomas Nagy 2009-2018 (ita) """ -Detect the Intel C++ compiler +Detects the Intel C++ compiler """ -import os, sys +from waflib import Utils from waflib.Tools import ccroot, ar, gxx from waflib.Configure import conf +from waflib.Tools import msvc @conf def find_icpc(conf): """ - Find the program icpc, and execute it to ensure it really is icpc + Finds the program icpc, and execute it to ensure it really is icpc """ - if sys.platform == 'cygwin': - conf.fatal('The Intel compiler does not work on Cygwin') + if Utils.is_win32: + conf.find_program(['icx-cl'], var='ICPXCL', mandatory=False) + if conf.env.ICPXCL: + conf.env.INTEL_CLANG_COMPILER = True + conf.env.CXX = conf.env.ICPXCL + + if not conf.env.ICPXCL: + cc = conf.find_program(['icpx', 'icpc', 'ICL'], var='CXX') + conf.get_cc_version(cc, icc=True) - cxx = conf.find_program('icpc', var='CXX') - conf.get_cc_version(cxx, icc=True) conf.env.CXX_NAME = 'icc' def configure(conf): conf.find_icpc() - conf.find_ar() - conf.gxx_common_flags() - conf.gxx_modifier_platform() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() + if conf.env.ICPXCL and Utils.is_win32: + conf.find_msvc() + conf.find_program('MT', var='MT') + conf.env.MTFLAGS = ['/nologo'] + conf.env.MSVC_MANIFEST = True + + conf.msvc_common_flags() + + conf.env.CXXFLAGS = [] + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() + conf.visual_studio_add_flags() + conf.env.CXX_TGT_F = ['/c', '/Fo'] + conf.env.CPPPATH_ST = '/I%s' + else: + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py index 8d00a01317..30230f5c38 100644 --- a/waflib/Tools/ifort.py +++ b/waflib/Tools/ifort.py @@ -1,26 +1,46 @@ #! /usr/bin/env python # encoding: utf-8 # DC 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) -import re -from waflib import Utils -from waflib.Tools import fc, fc_config, fc_scan, ar +import os, re, traceback +from waflib import Utils, Logs, Errors +from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot from waflib.Configure import conf +from waflib.TaskGen import after_method, feature @conf def find_ifort(conf): - fc = conf.find_program('ifort', var='FC') + fc = conf.find_program(['ifx', 'ifort'], var='FC') conf.get_ifort_version(fc) conf.env.FC_NAME = 'IFORT' @conf -def ifort_modifier_cygwin(conf): - raise NotImplementedError("Ifort on cygwin not yet implemented") +def ifort_modifier_win32(self): + v = self.env + v.IFORT_WIN32 = True + v.FCSTLIB_MARKER = '' + v.FCSHLIB_MARKER = '' -@conf -def ifort_modifier_win32(conf): - fc_config.fortran_modifier_win32(conf) + v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib' + v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s' + v.FCINCPATH_ST = '/I%s' + v.FCDEFINES_ST = '/D%s' + + v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe' + v.fcshlib_PATTERN = '%s.dll' + v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib' + + v.FCLNK_TGT_F = '/out:' + v.FC_TGT_F = ['/c', '/o', ''] + v.FCFLAGS_fcshlib = '' + v.LINKFLAGS_fcshlib = '/DLL' + v.AR_TGT_F = '/out:' + v.IMPLIB_ST = '/IMPLIB:%s' + + v.append_value('LINKFLAGS', '/subsystem:console') + if v.IFORT_MANIFEST: + v.append_value('LINKFLAGS', ['/MANIFEST']) @conf def ifort_modifier_darwin(conf): @@ -28,16 +48,17 @@ def ifort_modifier_darwin(conf): @conf def ifort_modifier_platform(conf): - dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform() + dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None) if ifort_modifier_func: ifort_modifier_func() @conf def get_ifort_version(conf, fc): - """get the compiler version""" - - version_re = re.compile(r"Intel[\sa-zA-Z()0-9,-]*Version\s*(?P\d*)\.(?P\d*)",re.I).search + """ + Detects the compiler version and sets ``conf.env.FC_VERSION`` + """ + version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P\d*)\.(?P\d*)",re.I).search if Utils.is_win32: cmd = fc else: @@ -48,13 +69,345 @@ def get_ifort_version(conf, fc): if not match: conf.fatal('cannot determine ifort version.') k = match.groupdict() - conf.env['FC_VERSION'] = (k['major'], k['minor']) + conf.env.FC_VERSION = (k['major'], k['minor']) def configure(conf): - conf.find_ifort() - conf.find_program('xiar', var='AR') - conf.env.ARFLAGS = 'rcs' - conf.fc_flags() - conf.fc_add_flags() - conf.ifort_modifier_platform() + """ + Detects the Intel Fortran compilers + """ + if Utils.is_win32: + compiler, version, path, includes, libdirs, arch = conf.detect_ifort() + v = conf.env + v.DEST_CPU = arch + v.PATH = path + v.INCLUDES = includes + v.LIBPATH = libdirs + v.MSVC_COMPILER = compiler + try: + v.MSVC_VERSION = float(version) + except ValueError: + v.MSVC_VERSION = float(version[:-3]) + + conf.find_ifort_win32() + conf.ifort_modifier_win32() + else: + conf.find_ifort() + conf.find_program('xiar', var='AR') + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.ifort_modifier_platform() + + +all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] +"""List of icl platforms""" + +@conf +def gather_ifort_versions(conf, versions): + """ + List compiler versions by looking up registry keys + """ + version_pattern = re.compile(r'^...?.?\....?.?') + try: + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran') + except OSError: + try: + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran') + except OSError: + return + index = 0 + while 1: + try: + version = Utils.winreg.EnumKey(all_versions, index) + except OSError: + break + index += 1 + if not version_pattern.match(version): + continue + targets = {} + for target,arch in all_ifort_platforms: + if target=='intel64': + targetDir='EM64T_NATIVE' + else: + targetDir=target + try: + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + except OSError: + pass + else: + batch_file=os.path.join(path,'bin','ifortvars.bat') + if os.path.isfile(batch_file): + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + + for target,arch in all_ifort_platforms: + try: + icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) + path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') + except OSError: + continue + else: + batch_file=os.path.join(path,'bin','ifortvars.bat') + if os.path.isfile(batch_file): + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) + major = version[0:2] + versions['intel ' + major] = targets + +@conf +def setup_ifort(conf, versiondict): + """ + Checks installed compilers and targets and returns the first combination from the user's + options, env, or the global supported lists that checks. + + :param versiondict: dict(platform -> dict(architecture -> configuration)) + :type versiondict: dict(string -> dict(string -> target_compiler) + :return: the compiler, revision, path, include dirs, library paths and target architecture + :rtype: tuple of strings + """ + platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms] + desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) + for version in desired_versions: + try: + targets = versiondict[version] + except KeyError: + continue + for arch in platforms: + try: + cfg = targets[arch] + except KeyError: + continue + cfg.evaluate() + if cfg.is_valid: + compiler,revision = version.rsplit(' ', 1) + return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu + conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) + +@conf +def get_ifort_version_win32(conf, compiler, version, target, vcvars): + # FIXME hack + try: + conf.msvc_cnt += 1 + except AttributeError: + conf.msvc_cnt = 1 + batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +""" % (vcvars,target)) + sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) + batfile.delete() + lines = sout.splitlines() + + if not lines[0]: + lines.pop(0) + + MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None + for line in lines: + if line.startswith('PATH='): + path = line[5:] + MSVC_PATH = path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR = [i for i in line[8:].split(';') if i] + elif line.startswith('LIB='): + MSVC_LIBDIR = [i for i in line[4:].split(';') if i] + if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): + conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)') + + # Check if the compiler is usable at all. + # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. + env = dict(os.environ) + env.update(PATH = path) + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + fc = conf.find_program(compiler_name, path_list=MSVC_PATH) + + # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. + if 'CL' in env: + del(env['CL']) + + try: + conf.cmd_and_log(fc + ['/help'], env=env) + except UnicodeError: + st = traceback.format_exc() + if conf.logger: + conf.logger.error(st) + conf.fatal('ifort: Unicode error - check the code page?') + except Exception as e: + Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e)) + conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)') + else: + Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target) + finally: + conf.env[compiler_name] = '' + + return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) + +class target_compiler(object): + """ + Wraps a compiler configuration; call evaluate() to determine + whether the configuration is usable. + """ + def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): + """ + :param ctx: configuration context to use to eventually get the version environment + :param compiler: compiler name + :param cpu: target cpu + :param version: compiler version number + :param bat_target: ? + :param bat: path to the batch file to run + :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths) + """ + self.conf = ctx + self.name = None + self.is_valid = False + self.is_done = False + + self.compiler = compiler + self.cpu = cpu + self.version = version + self.bat_target = bat_target + self.bat = bat + self.callback = callback + + def evaluate(self): + if self.is_done: + return + self.is_done = True + try: + vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat) + except Errors.ConfigurationError: + self.is_valid = False + return + if self.callback: + vs = self.callback(self, vs) + self.is_valid = True + (self.bindirs, self.incdirs, self.libdirs) = vs + + def __str__(self): + return str((self.bindirs, self.incdirs, self.libdirs)) + + def __repr__(self): + return repr((self.bindirs, self.incdirs, self.libdirs)) + +@conf +def detect_ifort(self): + return self.setup_ifort(self.get_ifort_versions(False)) + +@conf +def get_ifort_versions(self, eval_and_save=True): + """ + :return: platforms to compiler configurations + :rtype: dict + """ + dct = {} + self.gather_ifort_versions(dct) + return dct + +def _get_prog_names(self, compiler): + if compiler=='intel': + compiler_name = 'ifort' + linker_name = 'XILINK' + lib_name = 'XILIB' + else: + # assumes CL.exe + compiler_name = 'CL' + linker_name = 'LINK' + lib_name = 'LIB' + return compiler_name, linker_name, lib_name + +@conf +def find_ifort_win32(conf): + # the autodetection is supposed to be performed before entering in this method + v = conf.env + path = v.PATH + compiler = v.MSVC_COMPILER + version = v.MSVC_VERSION + + compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) + v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11) + + # compiler + fc = conf.find_program(compiler_name, var='FC', path_list=path) + + # before setting anything, check if the compiler is really intel fortran + env = dict(conf.environ) + if path: + env.update(PATH = ';'.join(path)) + if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env): + conf.fatal('not intel fortran compiler could not be identified') + + v.FC_NAME = 'IFORT' + + if not v.LINK_FC: + conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True) + + if not v.AR: + conf.find_program(lib_name, path_list=path, var='AR', mandatory=True) + v.ARFLAGS = ['/nologo'] + + # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later + if v.IFORT_MANIFEST: + conf.find_program('MT', path_list=path, var='MT') + v.MTFLAGS = ['/nologo'] + + try: + conf.load('winres') + except Errors.WafError: + Logs.warn('Resource compiler not found. Compiling resource file is disabled') + +####################################################################################################### +##### conf above, build below + +@after_method('apply_link') +@feature('fc') +def apply_flags_ifort(self): + """ + Adds additional flags implied by msvc, such as subsystems and pdb files:: + + def build(bld): + bld.stlib(source='main.c', target='bar', subsystem='gruik') + """ + if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None): + return + + is_static = isinstance(self.link_task, ccroot.stlink_task) + + subsystem = getattr(self, 'subsystem', '') + if subsystem: + subsystem = '/subsystem:%s' % subsystem + flags = is_static and 'ARFLAGS' or 'LINKFLAGS' + self.env.append_value(flags, subsystem) + + if not is_static: + for f in self.env.LINKFLAGS: + d = f.lower() + if d[1:] == 'debug': + pdbnode = self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + + if getattr(self, 'install_task', None): + self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode) + + break + +@feature('fcprogram', 'fcshlib', 'fcprogram_test') +@after_method('apply_link') +def apply_manifest_ifort(self): + """ + Enables manifest embedding in Fortran DLLs when using ifort on Windows + See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx + """ + if self.env.IFORT_WIN32 and getattr(self, 'link_task', None): + # it seems ifort.exe cannot be called for linking + self.link_task.env.FC = self.env.LINK_FC + + if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None): + out_node = self.link_task.outputs[0] + man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') + self.link_task.outputs.append(man_node) + self.env.DO_MANIFEST = True diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py index 3ea9f4461e..af95ba80ed 100644 --- a/waflib/Tools/intltool.py +++ b/waflib/Tools/intltool.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ Support for translation tools such as msgfmt and intltool @@ -27,8 +27,10 @@ def build(bld): Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory. """ +from __future__ import with_statement + import os, re -from waflib import Configure, Context, TaskGen, Task, Utils, Runner, Options, Build, Logs +from waflib import Context, Task, Utils, Logs import waflib.Tools.ccroot from waflib.TaskGen import feature, before_method, taskgen_method from waflib.Logs import error @@ -47,6 +49,9 @@ def build(bld): @taskgen_method def ensure_localedir(self): + """ + Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale + """ # use the tool gnu_dirs to provide options to define this if not self.env.LOCALEDIR: if self.env.DATAROOTDIR: @@ -58,7 +63,7 @@ def ensure_localedir(self): @feature('intltool_in') def apply_intltool_in_f(self): """ - Create tasks to translate files by intltool-merge:: + Creates tasks to translate files by intltool-merge:: def build(bld): bld( @@ -75,16 +80,18 @@ def build(bld): :param source: source files to process :type source: list of string :param style: the intltool-merge mode of operation, can be one of the following values: - ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. - See the ``intltool-merge`` man page for more information about supported modes of operation. + ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. + See the ``intltool-merge`` man page for more information about supported modes of operation. :type style: string :param flags: compilation flags ("-quc" by default) :type flags: list of string :param install_path: installation path :type install_path: string """ - try: self.meths.remove('process_source') - except ValueError: pass + try: + self.meths.remove('process_source') + except ValueError: + pass self.ensure_localedir() @@ -117,12 +124,12 @@ def build(bld): task = self.create_task('intltool', node, node.change_ext('')) inst = getattr(self, 'install_path', None) if inst: - self.bld.install_files(inst, task.outputs) + self.add_install_files(install_to=inst, install_from=task.outputs) @feature('intltool_po') def apply_intltool_po(self): """ - Create tasks to process po files:: + Creates tasks to process po files:: def build(bld): bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") @@ -138,8 +145,10 @@ def build(bld): The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process. """ - try: self.meths.remove('process_source') - except ValueError: pass + try: + self.meths.remove('process_source') + except ValueError: + pass self.ensure_localedir() @@ -150,13 +159,12 @@ def build(bld): linguas = self.path.find_node(os.path.join(podir, 'LINGUAS')) if linguas: # scan LINGUAS file for locales to process - file = open(linguas.abspath()) - langs = [] - for line in file.readlines(): - # ignore lines containing comments - if not line.startswith('#'): - langs += line.split() - file.close() + with open(linguas.abspath()) as f: + langs = [] + for line in f.readlines(): + # ignore lines containing comments + if not line.startswith('#'): + langs += line.split() re_linguas = re.compile('[-a-zA-Z_@.]+') for lang in langs: # Make sure that we only process lines which contain locales @@ -168,31 +176,38 @@ def build(bld): filename = task.outputs[0].name (langname, ext) = os.path.splitext(filename) inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo' - self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env) + self.add_install_as(install_to=inst_file, install_from=task.outputs[0], + chmod=getattr(self, 'chmod', Utils.O644)) else: Logs.pprint('RED', "Error no LINGUAS file found in po directory") class po(Task.Task): """ - Compile .po files into .gmo files + Compiles .po files into .gmo files """ run_str = '${MSGFMT} -o ${TGT} ${SRC}' color = 'BLUE' class intltool(Task.Task): """ - Let intltool-merge translate an input file + Calls intltool-merge to update translation files """ run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' color = 'BLUE' @conf def find_msgfmt(conf): + """ + Detects msgfmt and sets the ``MSGFMT`` variable + """ conf.find_program('msgfmt', var='MSGFMT') @conf def find_intltool_merge(conf): + """ + Detects intltool-merge + """ if not conf.env.PERL: conf.find_program('perl', var='PERL') conf.env.INTLCACHE_ST = '--cache=%s' @@ -201,8 +216,8 @@ def find_intltool_merge(conf): def configure(conf): """ - Detect the program *msgfmt* and set *conf.env.MSGFMT*. - Detect the program *intltool-merge* and set *conf.env.INTLTOOL*. + Detects the program *msgfmt* and set *conf.env.MSGFMT*. + Detects the program *intltool-merge* and set *conf.env.INTLTOOL*. It is possible to set INTLTOOL in the environment, but it must not have spaces in it:: $ INTLTOOL="/path/to/the program/intltool" waf configure @@ -211,7 +226,6 @@ def configure(conf): """ conf.find_msgfmt() conf.find_intltool_merge() - if conf.env.CC or conf.env.CXX: conf.check(header_name='locale.h') diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py index be90eee0b9..0335c13cb6 100644 --- a/waflib/Tools/irixcc.py +++ b/waflib/Tools/irixcc.py @@ -1,59 +1,51 @@ #! /usr/bin/env python +# encoding: utf-8 # imported from samba """ -compiler definition for irix/MIPSpro cc compiler -based on suncc.py from waf +Compiler definition for irix/MIPSpro cc compiler """ -import os -from waflib import Utils +from waflib import Errors from waflib.Tools import ccroot, ar from waflib.Configure import conf @conf def find_irixcc(conf): v = conf.env - cc = None - if v['CC']: cc = v['CC'] - elif 'CC' in conf.environ: cc = conf.environ['CC'] - if not cc: cc = conf.find_program('cc', var='CC') - if not cc: conf.fatal('irixcc was not found') - + cc = conf.find_program('cc', var='CC') try: conf.cmd_and_log(cc + ['-version']) - except Exception: + except Errors.WafError: conf.fatal('%r -version could not be executed' % cc) - - v['CC'] = cc - v['CC_NAME'] = 'irix' + v.CC_NAME = 'irix' @conf def irixcc_common_flags(conf): v = conf.env - v['CC_SRC_F'] = '' - v['CC_TGT_F'] = ['-c', '-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + v.CC_SRC_F = '' + v.CC_TGT_F = ['-c', '-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' + + if not v.LINK_CC: + v.LINK_CC = v.CC - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = '' - v['CCLNK_TGT_F'] = ['-o'] + v.CCLNK_SRC_F = '' + v.CCLNK_TGT_F = ['-o'] - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' - v['cprogram_PATTERN'] = '%s' - v['cshlib_PATTERN'] = 'lib%s.so' - v['cstlib_PATTERN'] = 'lib%s.a' + v.cprogram_PATTERN = '%s' + v.cshlib_PATTERN = 'lib%s.so' + v.cstlib_PATTERN = 'lib%s.a' def configure(conf): conf.find_irixcc() - conf.find_cpp() conf.find_ar() conf.irixcc_common_flags() conf.cc_load_tools() diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py index ce9a812635..b7f5dd1f87 100644 --- a/waflib/Tools/javaw.py +++ b/waflib/Tools/javaw.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ Java support @@ -24,17 +24,99 @@ java -jar /path/to/jython.jar waf configure [1] http://www.jython.org/ + +Usage +===== + +Load the "java" tool. + +def configure(conf): + conf.load('java') + +Java tools will be autodetected and eventually, if present, the quite +standard JAVA_HOME environment variable will be used. The also standard +CLASSPATH variable is used for library searching. + +In configuration phase checks can be done on the system environment, for +example to check if a class is known in the classpath:: + + conf.check_java_class('java.io.FileOutputStream') + +or if the system supports JNI applications building:: + + conf.check_jni_headers() + + +The java tool supports compiling java code, creating jar files and +creating javadoc documentation. This can be either done separately or +together in a single definition. For example to manage them separately:: + + bld(features = 'javac', + srcdir = 'src', + compat = '1.7', + use = 'animals', + name = 'cats-src', + ) + + bld(features = 'jar', + basedir = '.', + destfile = '../cats.jar', + name = 'cats', + use = 'cats-src' + ) + + +Or together by defining all the needed attributes:: + + bld(features = 'javac jar javadoc', + srcdir = 'src/', # folder containing the sources to compile + outdir = 'src', # folder where to output the classes (in the build directory) + compat = '1.6', # java compatibility version number + classpath = ['.', '..'], + + # jar + basedir = 'src', # folder containing the classes and other files to package (must match outdir) + destfile = 'foo.jar', # do not put the destfile in the folder of the java classes! + use = 'NNN', + jaropts = ['-C', 'default/src/', '.'], # can be used to give files + manifest = 'src/Manifest.mf', # Manifest file to include + + # javadoc + javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'], + javadoc_output = 'javadoc', + ) + +External jar dependencies can be mapped to a standard waf "use" dependency by +setting an environment variable with a CLASSPATH prefix in the configuration, +for example:: + + conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar'] + +and then NNN can be freely used in rules as:: + + use = 'NNN', + +In the java tool the dependencies via use are not transitive by default, as +this necessity depends on the code. To enable recursive dependency scanning +use on a specific rule: + + recurse_use = True + +Or build-wise by setting RECURSE_JAVA: + + bld.env.RECURSE_JAVA = True + +Unit tests can be integrated in the waf unit test environment using the javatest extra. """ -import os, re, tempfile, shutil -from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node, Logs +import os, shutil +from waflib import Task, Utils, Errors, Node from waflib.Configure import conf -from waflib.TaskGen import feature, before_method, after_method +from waflib.TaskGen import feature, before_method, after_method, taskgen_method from waflib.Tools import ccroot ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS']) - SOURCE_RE = '**/*.java' JAR_RE = '**/*' @@ -69,8 +151,6 @@ def apply_java(self): sourcepath='.', srcdir='.', jar_mf_attributes={}, jar_mf_classpath=[]) - nodes_lst = [] - outdir = getattr(self, 'outdir', None) if outdir: if not isinstance(outdir, Node.Node): @@ -79,7 +159,7 @@ def apply_java(self): outdir = self.path.get_bld() outdir.mkdir() self.outdir = outdir - self.env['OUTDIR'] = outdir.abspath() + self.env.OUTDIR = outdir.abspath() self.javac_task = tsk = self.create_task('javac') tmp = [] @@ -95,10 +175,11 @@ def apply_java(self): if not y: self.bld.fatal('Could not find the folder %s from %s' % (x, self.path)) tmp.append(y) + tsk.srcdir = tmp if getattr(self, 'compat', None): - tsk.env.append_value('JAVACFLAGS', ['-source', self.compat]) + tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)]) if hasattr(self, 'sourcepath'): fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)] @@ -109,36 +190,86 @@ def apply_java(self): if names: tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) + +@taskgen_method +def java_use_rec(self, name, **kw): + """ + Processes recursively the *use* attribute for each referred java compilation + """ + if name in self.tmp_use_seen: + return + + self.tmp_use_seen.append(name) + + try: + y = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + return + else: + y.post() + # Add generated JAR name for CLASSPATH. Task ordering (set_run_after) + # is already guaranteed by ordering done between the single tasks + if hasattr(y, 'jar_task'): + self.use_lst.append(y.jar_task.outputs[0].abspath()) + else: + if hasattr(y,'outdir'): + self.use_lst.append(y.outdir.abspath()) + else: + self.use_lst.append(y.path.get_bld().abspath()) + + for x in self.to_list(getattr(y, 'use', [])): + self.java_use_rec(x) + @feature('javac') +@before_method('propagate_uselib_vars') @after_method('apply_java') def use_javac_files(self): """ - Process the *use* attribute referring to other java compilations + Processes the *use* attribute referring to other java compilations """ - lst = [] + self.use_lst = [] + self.tmp_use_seen = [] self.uselib = self.to_list(getattr(self, 'uselib', [])) names = self.to_list(getattr(self, 'use', [])) get = self.bld.get_tgen_by_name for x in names: try: - y = get(x) - except Exception: + tg = get(x) + except Errors.WafError: self.uselib.append(x) else: - y.post() - lst.append(y.jar_task.outputs[0].abspath()) - self.javac_task.set_run_after(y.jar_task) + tg.post() + if hasattr(tg, 'jar_task'): + self.use_lst.append(tg.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(tg.jar_task) + self.javac_task.dep_nodes.extend(tg.jar_task.outputs) + else: + if hasattr(tg, 'outdir'): + base_node = tg.outdir + else: + base_node = tg.path.get_bld() - if lst: - self.env.append_value('CLASSPATH', lst) + self.use_lst.append(base_node.abspath()) + self.javac_task.dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)]) + + for tsk in tg.tasks: + self.javac_task.set_run_after(tsk) + + # If recurse use scan is enabled recursively add use attribute for each used one + if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA: + self.java_use_rec(x) + + self.env.append_value('CLASSPATH', self.use_lst) @feature('javac') @after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files') def set_classpath(self): """ - Set the CLASSPATH value on the *javac* task previously created. + Sets the CLASSPATH value on the *javac* task previously created. """ - self.env.append_value('CLASSPATH', getattr(self, 'classpath', [])) + if getattr(self, 'classpath', None): + self.env.append_unique('CLASSPATH', getattr(self, 'classpath', [])) for x in self.tasks: x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep @@ -147,7 +278,7 @@ def set_classpath(self): @before_method('process_source') def jar_files(self): """ - Create a jar task. There can be only one jar task by task generator. + Creates a jar task (one maximum per task generator) """ destfile = getattr(self, 'destfile', 'test.jar') jaropts = getattr(self, 'jaropts', []) @@ -165,7 +296,12 @@ def jar_files(self): self.jar_task = tsk = self.create_task('jar_create') if manifest: jarcreate = getattr(self, 'jarcreate', 'cfm') - node = self.path.find_node(manifest) + if not isinstance(manifest,Node.Node): + node = self.path.find_resource(manifest) + else: + node = manifest + if not node: + self.bld.fatal('invalid manifest file %r for %r' % (manifest, self)) tsk.dep_nodes.append(node) jaropts.insert(0, node.abspath()) else: @@ -181,8 +317,8 @@ def jar_files(self): jaropts.append(basedir.bldpath()) jaropts.append('.') - tsk.env['JAROPTS'] = jaropts - tsk.env['JARCREATE'] = jarcreate + tsk.env.JAROPTS = jaropts + tsk.env.JARCREATE = jarcreate if getattr(self, 'javac_task', None): tsk.set_run_after(self.javac_task) @@ -191,25 +327,39 @@ def jar_files(self): @after_method('jar_files') def use_jar_files(self): """ - Process the *use* attribute to set the build order on the + Processes the *use* attribute to set the build order on the tasks created by another task generator. """ - lst = [] self.uselib = self.to_list(getattr(self, 'uselib', [])) names = self.to_list(getattr(self, 'use', [])) get = self.bld.get_tgen_by_name for x in names: try: y = get(x) - except Exception: + except Errors.WafError: self.uselib.append(x) else: y.post() self.jar_task.run_after.update(y.tasks) -class jar_create(Task.Task): +class JTask(Task.Task): + """ + Base class for java and jar tasks; provides functionality to run long commands + """ + def split_argfile(self, cmd): + inline = [cmd[0]] + infile = [] + for x in cmd[1:]: + # jar and javac do not want -J flags in @file + if x.startswith('-J'): + inline.append(x) + else: + infile.append(self.quote_flag(x)) + return (inline, infile) + +class jar_create(JTask): """ - Create a jar file + Creates a jar file """ color = 'GREEN' run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' @@ -223,93 +373,58 @@ def runnable_status(self): if not t.hasrun: return Task.ASK_LATER if not self.inputs: - global JAR_RE try: - self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])] + self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False, quiet=True) if id(x) != id(self.outputs[0])] except Exception: raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self)) return super(jar_create, self).runnable_status() -class javac(Task.Task): +class javac(JTask): """ - Compile java files + Compiles java files """ color = 'BLUE' - + run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}' vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR'] """ The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change. """ + def uid(self): + """Identify java tasks by input&output folder""" + lst = [self.__class__.__name__, self.generator.outdir.abspath()] + for x in self.srcdir: + lst.append(x.abspath()) + return Utils.h_list(lst) def runnable_status(self): """ - Wait for dependent tasks to be complete, then read the file system to find the input nodes. + Waits for dependent tasks to be complete, then read the file system to find the input nodes. """ for t in self.run_after: if not t.hasrun: return Task.ASK_LATER if not self.inputs: - global SOURCE_RE self.inputs = [] for x in self.srcdir: - self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False)) + if x.exists(): + self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False, quiet=True)) return super(javac, self).runnable_status() - def run(self): - """ - Execute the javac compiler - """ - env = self.env - gen = self.generator - bld = gen.bld - wd = bld.bldnode.abspath() - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - cmd = [] - cmd.extend(to_list(env['JAVAC'])) - cmd.extend(['-classpath']) - cmd.extend(to_list(env['CLASSPATH'])) - cmd.extend(['-d']) - cmd.extend(to_list(env['OUTDIR'])) - cmd.extend(to_list(env['JAVACFLAGS'])) - - files = [a.path_from(bld.bldnode) for a in self.inputs] - - # workaround for command line length limit: - # http://support.microsoft.com/kb/830473 - tmp = None - try: - if len(str(files)) + len(str(cmd)) > 8192: - (fd, tmp) = tempfile.mkstemp(dir=bld.bldnode.abspath()) - try: - os.write(fd, '\n'.join(files).encode()) - finally: - if tmp: - os.close(fd) - if Logs.verbose: - Logs.debug('runner: %r' % (cmd + files)) - cmd.append('@' + tmp) - else: - cmd += files - - ret = self.exec_command(cmd, cwd=wd, env=env.env or None) - finally: - if tmp: - os.remove(tmp) - return ret - def post_run(self): """ + List class files created """ - for n in self.generator.outdir.ant_glob('**/*.class'): - n.sig = Utils.h_file(n.abspath()) # careful with this + for node in self.generator.outdir.ant_glob('**/*.class', quiet=True): + self.generator.bld.node_sigs[node] = self.uid() self.generator.bld.task_sigs[self.uid()] = self.cache_sig @feature('javadoc') @after_method('process_rule') def create_javadoc(self): + """ + Creates a javadoc task (feature 'javadoc') + """ tsk = self.create_task('javadoc') tsk.classpath = getattr(self, 'classpath', []) self.javadoc_package = Utils.to_list(self.javadoc_package) @@ -317,6 +432,9 @@ def create_javadoc(self): self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output) class javadoc(Task.Task): + """ + Builds java documentation + """ color = 'BLUE' def __str__(self): @@ -325,7 +443,7 @@ def __str__(self): def run(self): env = self.env bld = self.generator.bld - wd = bld.bldnode.abspath() + wd = bld.bldnode #add src node + bld node (for generated java code) srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir @@ -338,7 +456,7 @@ def run(self): classpath = "".join(classpath) self.last_cmd = lst = [] - lst.extend(Utils.to_list(env['JAVADOC'])) + lst.extend(Utils.to_list(env.JAVADOC)) lst.extend(['-d', self.generator.javadoc_output.abspath()]) lst.extend(['-sourcepath', srcpath]) lst.extend(['-classpath', classpath]) @@ -349,14 +467,14 @@ def run(self): self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) def post_run(self): - nodes = self.generator.javadoc_output.ant_glob('**') - for x in nodes: - x.sig = Utils.h_file(x.abspath()) + nodes = self.generator.javadoc_output.ant_glob('**', quiet=True) + for node in nodes: + self.generator.bld.node_sigs[node] = self.uid() self.generator.bld.task_sigs[self.uid()] = self.cache_sig def configure(self): """ - Detect the javac, java and jar programs + Detects the javac, java and jar programs """ # If JAVA_PATH is set, we prepend it to the path list java_path = self.environ['PATH'].split(os.pathsep) @@ -364,36 +482,37 @@ def configure(self): if 'JAVA_HOME' in self.environ: java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path - self.env['JAVA_HOME'] = [self.environ['JAVA_HOME']] + self.env.JAVA_HOME = [self.environ['JAVA_HOME']] for x in 'javac java jar javadoc'.split(): - self.find_program(x, var=x.upper(), path_list=java_path) + self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc'))) if 'CLASSPATH' in self.environ: - v['CLASSPATH'] = self.environ['CLASSPATH'] + v.CLASSPATH = self.environ['CLASSPATH'] - if not v['JAR']: self.fatal('jar is required for making java packages') - if not v['JAVAC']: self.fatal('javac is required for compiling java classes') + if not v.JAR: + self.fatal('jar is required for making java packages') + if not v.JAVAC: + self.fatal('javac is required for compiling java classes') - v['JARCREATE'] = 'cf' # can use cvf - v['JAVACFLAGS'] = [] + v.JARCREATE = 'cf' # can use cvf + v.JAVACFLAGS = [] @conf def check_java_class(self, classname, with_classpath=None): """ - Check if the specified java class exists + Checks if the specified java class exists :param classname: class to check, like java.util.HashMap :type classname: string :param with_classpath: additional classpath to give :type with_classpath: string """ - javatestdir = '.waf-javatest' classpath = javatestdir - if self.env['CLASSPATH']: - classpath += os.pathsep + self.env['CLASSPATH'] + if self.env.CLASSPATH: + classpath += os.pathsep + self.env.CLASSPATH if isinstance(with_classpath, str): classpath += os.pathsep + with_classpath @@ -403,10 +522,10 @@ def check_java_class(self, classname, with_classpath=None): Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source) # Compile the source - self.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False) + self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False) # Try to run the app - cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname] + cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname] self.to_log("%s\n" % str(cmd)) found = self.exec_command(cmd, shell=False) @@ -419,7 +538,7 @@ def check_java_class(self, classname, with_classpath=None): @conf def check_jni_headers(conf): """ - Check for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets:: + Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets:: def options(opt): opt.load('compiler_c') @@ -431,7 +550,6 @@ def configure(conf): def build(bld): bld.shlib(source='a.c', target='app', use='JAVA') """ - if not conf.env.CC_NAME and not conf.env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') @@ -439,7 +557,7 @@ def build(bld): conf.fatal('set JAVA_HOME in the system environment') # jni requires the jvm - javaHome = conf.env['JAVA_HOME'][0] + javaHome = conf.env.JAVA_HOME[0] dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include') if dir is None: @@ -460,6 +578,8 @@ def build(bld): if f: libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f] + if conf.env.DEST_OS == 'freebsd': + conf.env.append_unique('LINKFLAGS_JAVA', '-pthread') for d in libDirs: try: conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', @@ -471,4 +591,3 @@ def build(bld): else: conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs) - diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py index a9ed04113d..a51c344b95 100644 --- a/waflib/Tools/ldc2.py +++ b/waflib/Tools/ldc2.py @@ -2,16 +2,14 @@ # encoding: utf-8 # Alex Rønne Petersen, 2012 (alexrp/Zor) -import sys from waflib.Tools import ar, d from waflib.Configure import conf @conf def find_ldc2(conf): """ - Find the program *ldc2* and set the variable *D* + Finds the program *ldc2* and set the variable *D* """ - conf.find_program(['ldc2'], var='D') out = conf.cmd_and_log(conf.env.D + ['-version']) @@ -21,39 +19,38 @@ def find_ldc2(conf): @conf def common_flags_ldc2(conf): """ - Set the D flags required by *ldc2* + Sets the D flags required by *ldc2* """ - v = conf.env - v['D_SRC_F'] = ['-c'] - v['D_TGT_F'] = '-of%s' + v.D_SRC_F = ['-c'] + v.D_TGT_F = '-of%s' - v['D_LINKER'] = v['D'] - v['DLNK_SRC_F'] = '' - v['DLNK_TGT_F'] = '-of%s' - v['DINC_ST'] = '-I%s' + v.D_LINKER = v.D + v.DLNK_SRC_F = '' + v.DLNK_TGT_F = '-of%s' + v.DINC_ST = '-I%s' - v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = '' - v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s' - v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s' + v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' + v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' + v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' - v['LINKFLAGS_dshlib'] = ['-L-shared'] + v.LINKFLAGS_dshlib = ['-L-shared'] - v['DHEADER_ext'] = '.di' - v['DFLAGS_d_with_header'] = ['-H', '-Hf'] - v['D_HDR_F'] = '%s' + v.DHEADER_ext = '.di' + v.DFLAGS_d_with_header = ['-H', '-Hf'] + v.D_HDR_F = '%s' - v['LINKFLAGS'] = [] - v['DFLAGS_dshlib'] = ['-relocation-model=pic'] + v.LINKFLAGS = [] + v.DFLAGS_dshlib = ['-relocation-model=pic'] def configure(conf): """ Configuration for *ldc2* """ - conf.find_ldc2() conf.load('ar') conf.load('d') conf.common_flags_ldc2() conf.d_platform_flags() + diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py index 814f77d761..15a333a995 100644 --- a/waflib/Tools/lua.py +++ b/waflib/Tools/lua.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Sebastian Schlingmann, 2008 -# Thomas Nagy, 2008-2010 (ita) +# Thomas Nagy, 2008-2018 (ita) """ Lua support. @@ -16,14 +16,14 @@ def build(bld): """ from waflib.TaskGen import extension -from waflib import Task, Utils +from waflib import Task @extension('.lua') def add_lua(self, node): tsk = self.create_task('luac', node, node.change_ext('.luac')) inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None) if inst_to: - self.bld.install_files(inst_to, tsk.outputs) + self.add_install_files(install_to=inst_to, install_from=tsk.outputs) return tsk class luac(Task.Task): diff --git a/waflib/Tools/md5_tstamp.py b/waflib/Tools/md5_tstamp.py new file mode 100644 index 0000000000..d1569fa9ec --- /dev/null +++ b/waflib/Tools/md5_tstamp.py @@ -0,0 +1,41 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Re-calculate md5 hashes of files only when the file time have changed:: + + def options(opt): + opt.load('md5_tstamp') + +The hashes can also reflect either the file contents (STRONGEST=True) or the +file time and file size. + +The performance benefits of this module are usually insignificant. +""" + +import os, stat +from waflib import Utils, Build, Node + +STRONGEST = True + +Build.SAVED_ATTRS.append('hashes_md5_tstamp') +def h_file(self): + filename = self.abspath() + st = os.stat(filename) + + cache = self.ctx.hashes_md5_tstamp + if filename in cache and cache[filename][0] == st.st_mtime: + return cache[filename][1] + + if STRONGEST: + ret = Utils.h_file(filename) + else: + if stat.S_ISDIR(st[stat.ST_MODE]): + raise IOError('Not a file') + ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest() + + cache[filename] = (st.st_mtime, ret) + return ret +h_file.__doc__ = Node.Node.h_file.__doc__ +Node.Node.h_file = h_file + diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py index 62862e1aab..d60f670268 100644 --- a/waflib/Tools/msvc.py +++ b/waflib/Tools/msvc.py @@ -8,6 +8,12 @@ """ Microsoft Visual C++/Intel C++ compiler support +If you get detection problems, first try any of the following:: + + chcp 65001 + set PYTHONIOENCODING=... + set PYTHONLEGACYWINDOWSSTDIO=1 + Usage:: $ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64" @@ -15,8 +21,8 @@ or:: def configure(conf): - conf.env['MSVC_VERSIONS'] = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0'] - conf.env['MSVC_TARGETS'] = ['x64'] + conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0'] + conf.env.MSVC_TARGETS = ['x64'] conf.load('msvc') or:: @@ -30,11 +36,15 @@ def build(bld): Platforms and targets will be tested in the order they appear; the first good configuration will be used. + +To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option +or set ``conf.env.MSVC_LAZY_AUTODETECT=False``. + Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm Compilers supported: -* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 12.0 (Visual Studio 2013) +* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017) * wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0 * icl => Intel compiler, versions 9, 10, 11, 13 * winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now) @@ -48,13 +58,12 @@ def build(bld): Setting PYTHONUNBUFFERED gives the unbuffered output. """ -import os, sys, re, tempfile -from waflib import Utils, Task, Logs, Options, Errors -from waflib.Logs import debug, warn +import os, sys, re, traceback +from waflib import Utils, Logs, Options, Errors from waflib.TaskGen import after_method, feature from waflib.Configure import conf -from waflib.Tools import ccroot, c, cxx, ar, winres +from waflib.Tools import ccroot, c, cxx, ar g_msvc_systemlibs = ''' aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet @@ -78,7 +87,9 @@ def build(bld): '''.split() """importlibs provided by MSVC/Platform SDK. Do NOT search them""" -all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('amd64_x86', 'x86'), ('amd64_arm', 'arm') ] +all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), + ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'), + ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ] """List of msvc platforms""" all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ] @@ -88,46 +99,101 @@ def build(bld): """List of icl platforms""" def options(opt): - opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') + default_ver = '' + vsver = os.getenv('VSCMD_VER') + if vsver: + m = re.match(r'(^\d+\.\d+).*', vsver) + if m: + default_ver = 'msvc %s' % m.group(1) + opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default=default_ver) opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') + opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy') + +class MSVCVersion(object): + def __init__(self, ver): + m = re.search(r'^(.*)\s+(\d+[.]\d+)', ver) + if m: + self.name = m.group(1) + self.number = float(m.group(2)) + else: + self.name = ver + self.number = 0. + + def __lt__(self, other): + if self.number == other.number: + return self.name < other.name + return self.number < other.number -def setup_msvc(conf, versions, arch = False): +@conf +def setup_msvc(conf, versiondict): + """ + Checks installed compilers and targets and returns the first combination from the user's + options, env, or the global supported lists that checks. + + :param versiondict: dict(platform -> dict(architecture -> configuration)) + :type versiondict: dict(string -> dict(string -> target_compiler) + :return: the compiler, revision, path, include dirs, library paths and target architecture + :rtype: tuple of strings + """ platforms = getattr(Options.options, 'msvc_targets', '').split(',') if platforms == ['']: - platforms=Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] desired_versions = getattr(Options.options, 'msvc_version', '').split(',') if desired_versions == ['']: - desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1] - versiondict = dict(versions) + desired_versions = conf.env.MSVC_VERSIONS or list(sorted(versiondict.keys(), key=MSVCVersion, reverse=True)) + + # Override lazy detection by evaluating after the fact. + lazy_detect = getattr(Options.options, 'msvc_lazy', True) + if conf.env.MSVC_LAZY_AUTODETECT is False: + lazy_detect = False + + if not lazy_detect: + for val in versiondict.values(): + for arch in list(val.keys()): + cfg = val[arch] + cfg.evaluate() + if not cfg.is_valid: + del val[arch] + conf.env.MSVC_INSTALLED_VERSIONS = versiondict for version in desired_versions: + Logs.debug('msvc: detecting %r - %r', version, desired_versions) try: - targets = dict(versiondict [version]) - for target in platforms: - try: - arch,(p1,p2,p3) = targets[target] - compiler,revision = version.rsplit(' ', 1) - if arch: - return compiler,revision,p1,p2,p3,arch - else: - return compiler,revision,p1,p2,p3 - except KeyError: continue - except KeyError: continue - conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') + targets = versiondict[version] + except KeyError: + continue + + seen = set() + for arch in platforms: + if arch in seen: + continue + else: + seen.add(arch) + try: + cfg = targets[arch] + except KeyError: + continue + + cfg.evaluate() + if cfg.is_valid: + compiler,revision = version.rsplit(' ', 1) + return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu + conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) @conf def get_msvc_version(conf, compiler, version, target, vcvars): """ - Create a bat file to obtain the location of the libraries - - :param compiler: ? - :param version: ? - :target: ? - :vcvars: ? - :return: the location of msvc, the location of include dirs, and the library paths + Checks that an installed compiler actually runs and uses vcvars to obtain the + environment needed by the compiler. + + :param compiler: compiler type, for looking up the executable name + :param version: compiler version, for debugging only + :param target: target architecture + :param vcvars: batch file to run to check the environment + :return: the location of the compiler executable, the location of include dirs, and the library paths :rtype: tuple of strings """ - debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) + Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) try: conf.msvc_cnt += 1 @@ -142,7 +208,7 @@ def get_msvc_version(conf, compiler, version, target, vcvars): echo INCLUDE=%%INCLUDE%% echo LIB=%%LIB%%;%%LIBPATH%% """ % (vcvars,target)) - sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) + sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()], stdin=getattr(Utils.subprocess, 'DEVNULL', None)) lines = sout.splitlines() if not lines[0]: @@ -167,63 +233,27 @@ def get_msvc_version(conf, compiler, version, target, vcvars): compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) - # delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically. + # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. if 'CL' in env: del(env['CL']) try: - try: - conf.cmd_and_log(cxx + ['/help'], env=env) - except Exception as e: - debug('msvc: get_msvc_version: %r %r %r -> failure' % (compiler, version, target)) - debug(str(e)) - conf.fatal('msvc: cannot run the compiler (in get_msvc_version)') - else: - debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) + conf.cmd_and_log(cxx + ['/help'], env=env) + except UnicodeError: + st = traceback.format_exc() + if conf.logger: + conf.logger.error(st) + conf.fatal('msvc: Unicode error - check the code page?') + except Exception as e: + Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e)) + conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') + else: + Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) finally: conf.env[compiler_name] = '' return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) -@conf -def gather_wsdk_versions(conf, versions): - """ - Use winreg to add the msvc versions to the input list - - :param versions: list to modify - :type versions: list - """ - version_pattern = re.compile('^v..?.?\...?.?') - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') - except WindowsError: - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') - except WindowsError: - return - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except WindowsError: - break - index = index + 1 - if not version_pattern.match(version): - continue - try: - msvc_version = Utils.winreg.OpenKey(all_versions, version) - path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') - except WindowsError: - continue - if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')): - targets = [] - for target,arch in all_msvc_platforms: - try: - targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))))) - except conf.errors.ConfigurationError: - pass - versions.append(('wsdk ' + version[1:], targets)) - def gather_wince_supported_platforms(): """ Checks SmartPhones SDKs @@ -234,131 +264,236 @@ def gather_wince_supported_platforms(): supported_wince_platforms = [] try: ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') - except WindowsError: + except OSError: try: ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') - except WindowsError: + except OSError: ce_sdk = '' if not ce_sdk: return supported_wince_platforms - ce_index = 0 + index = 0 while 1: try: - sdk_device = Utils.winreg.EnumKey(ce_sdk, ce_index) - except WindowsError: + sdk_device = Utils.winreg.EnumKey(ce_sdk, index) + sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device) + except OSError: break - ce_index = ce_index + 1 - sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device) + index += 1 try: path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir') - except WindowsError: + except OSError: try: path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation') - path,xml = os.path.split(path) - except WindowsError: + except OSError: continue - path=str(path) + path,xml = os.path.split(path) + path = str(path) path,device = os.path.split(path) if not device: path,device = os.path.split(path) + platforms = [] for arch,compiler in all_wince_platforms: - platforms = [] if os.path.isdir(os.path.join(path, device, 'Lib', arch)): platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) - if platforms: - supported_wince_platforms.append((device, platforms)) + if platforms: + supported_wince_platforms.append((device, platforms)) return supported_wince_platforms def gather_msvc_detected_versions(): #Detected MSVC versions! - version_pattern = re.compile('^(\d\d?\.\d\d?)(Exp)?$') + version_pattern = re.compile(r'^(\d\d?\.\d\d?)(Exp)?$') detected_versions = [] for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')): + prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver try: - prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) - except WindowsError: + except OSError: + prefix = 'SOFTWARE\\Microsoft\\' + vcver try: - prefix = 'SOFTWARE\\Microsoft\\'+vcver all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) - except WindowsError: + except OSError: continue index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) - except WindowsError: + except OSError: break - index = index + 1 + index += 1 match = version_pattern.match(version) - if not match: - continue - else: + if match: versionnumber = float(match.group(1)) - detected_versions.append((versionnumber, version+vcvar, prefix+"\\"+version)) + else: + continue + detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version)) def fun(tup): return tup[0] detected_versions.sort(key = fun) return detected_versions +class target_compiler(object): + """ + Wrap a compiler configuration; call evaluate() to determine + whether the configuration is usable. + """ + def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): + """ + :param ctx: configuration context to use to eventually get the version environment + :param compiler: compiler name + :param cpu: target cpu + :param version: compiler version number + :param bat_target: ? + :param bat: path to the batch file to run + """ + self.conf = ctx + self.name = None + self.is_valid = False + self.is_done = False + + self.compiler = compiler + self.cpu = cpu + self.version = version + self.bat_target = bat_target + self.bat = bat + self.callback = callback + + def evaluate(self): + if self.is_done: + return + self.is_done = True + try: + vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat) + except Errors.ConfigurationError: + self.is_valid = False + return + if self.callback: + vs = self.callback(self, vs) + self.is_valid = True + (self.bindirs, self.incdirs, self.libdirs) = vs + + def __str__(self): + return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) + + def __repr__(self): + return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) + +@conf +def gather_wsdk_versions(conf, versions): + """ + Use winreg to add the msvc versions to the input list + + :param versions: list to modify + :type versions: list + """ + version_pattern = re.compile(r'^v..?.?\...?.?') + try: + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except OSError: + try: + all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except OSError: + return + index = 0 + while 1: + try: + version = Utils.winreg.EnumKey(all_versions, index) + except OSError: + break + index += 1 + if not version_pattern.match(version): + continue + try: + msvc_version = Utils.winreg.OpenKey(all_versions, version) + path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + except OSError: + continue + if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')): + targets = {} + for target,arch in all_msvc_platforms: + targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')) + versions['wsdk ' + version[1:]] = targets + @conf def gather_msvc_targets(conf, versions, version, vc_path): #Looking for normal MSVC compilers! - targets = [] - if os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): + targets = {} + + if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')): for target,realtarget in all_msvc_platforms[::-1]: - try: - targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(vc_path, 'vcvarsall.bat'))))) - except conf.errors.ConfigurationError: - pass + targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')) + elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat')) elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): - try: - targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))))) - except conf.errors.ConfigurationError: - pass + targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')) elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): - try: - targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))))) - except conf.errors.ConfigurationError: - pass + targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat')) if targets: - versions.append(('msvc '+ version, targets)) + versions['msvc %s' % version] = targets @conf def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms): #Looking for Win CE compilers! for device,platforms in supported_platforms: - cetargets = [] + targets = {} for platform,compiler,include,lib in platforms: winCEpath = os.path.join(vc_path, 'ce') if not os.path.isdir(winCEpath): continue - try: - common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', vsvars) - except conf.errors.ConfigurationError: - continue + if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): - bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs + bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] - cetargets.append((platform, (platform, (bindirs,incdirs,libdirs)))) - if cetargets: - versions.append((device + ' ' + version, cetargets)) + def combine_common(obj, compiler_env): + # TODO this is likely broken, remove in waf 2.1 + (common_bindirs,_1,_2) = compiler_env + return (bindirs + common_bindirs, incdirs, libdirs) + targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common) + if targets: + versions[device + ' ' + version] = targets @conf def gather_winphone_targets(conf, versions, version, vc_path, vsvars): #Looking for WinPhone compilers - targets = [] + targets = {} for target,realtarget in all_msvc_platforms[::-1]: - try: - targets.append((target, (realtarget, conf.get_msvc_version('winphone', version, target, vsvars)))) - except conf.errors.ConfigurationError as e: - pass + targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars) if targets: - versions.append(('winphone '+ version, targets)) + versions['winphone ' + version] = targets + +@conf +def gather_vswhere_versions(conf, versions): + try: + import json + except ImportError: + Logs.error('Visual Studio 2017 detection requires Python 2.6') + return + + prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)')) + + vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe') + args = [vswhere, '-products', '*', '-legacy', '-format', 'json'] + try: + txt = conf.cmd_and_log(args) + except Errors.WafError as e: + Logs.debug('msvc: vswhere.exe failed %s', e) + return + + if sys.version_info[0] < 3: + txt = txt.decode(Utils.console_encoding()) + + arr = json.loads(txt) + arr.sort(key=lambda x: x['installationVersion']) + for entry in arr: + ver = entry['installationVersion'] + ver = str('.'.join(ver.split('.')[:2])) + path = str(os.path.abspath(entry['installationPath'])) + if os.path.exists(path) and ('msvc %s' % ver) not in versions: + conf.gather_msvc_targets(versions, ver, path) @conf def gather_msvc_versions(conf, versions): @@ -367,12 +502,20 @@ def gather_msvc_versions(conf, versions): try: try: msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC") - except WindowsError: + except OSError: msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++") path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir') - vc_paths.append((version, os.path.abspath(str(path)))) - except WindowsError: + except OSError: + try: + msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7") + path,type = Utils.winreg.QueryValueEx(msvc_version, version) + except OSError: + continue + else: + vc_paths.append((version, os.path.abspath(str(path)))) continue + else: + vc_paths.append((version, os.path.abspath(str(path)))) wince_supported_platforms = gather_wince_supported_platforms() @@ -382,9 +525,14 @@ def gather_msvc_versions(conf, versions): if wince_supported_platforms and os.path.isfile(vsvars): conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) + # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path. + # Stop after one is found. + for version,vc_path in vc_paths: + vs_path = os.path.dirname(vc_path) vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') if os.path.isfile(vsvars): conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) + break for version,vc_path in vc_paths: vs_path = os.path.dirname(vc_path) @@ -398,53 +546,51 @@ def gather_icl_versions(conf, versions): :param versions: list to modify :type versions: list """ - version_pattern = re.compile('^...?.?\....?.?') + version_pattern = re.compile(r'^...?.?\....?.?') try: all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') - except WindowsError: + except OSError: try: all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++') - except WindowsError: + except OSError: return index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) - except WindowsError: + except OSError: break - index = index + 1 + index += 1 if not version_pattern.match(version): continue - targets = [] + targets = {} for target,arch in all_icl_platforms: + if target=='intel64': + targetDir='EM64T_NATIVE' + else: + targetDir=target try: - if target=='intel64': targetDir='EM64T_NATIVE' - else: targetDir=target Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) icl_version=Utils.winreg.OpenKey(all_versions,version) path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + except OSError: + pass + else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - try: - targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) - except conf.errors.ConfigurationError: - pass - except WindowsError: - pass + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) for target,arch in all_icl_platforms: try: icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') + except OSError: + continue + else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - try: - targets.append((target, (arch, conf.get_msvc_version('intel', version, target, batch_file)))) - except conf.errors.ConfigurationError: - pass - except WindowsError: - continue + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) major = version[0:2] - versions.append(('intel ' + major, targets)) + versions['intel ' + major] = targets @conf def gather_intel_composer_versions(conf, versions): @@ -454,45 +600,47 @@ def gather_intel_composer_versions(conf, versions): :param versions: list to modify :type versions: list """ - version_pattern = re.compile('^...?.?\...?.?.?') + version_pattern = re.compile(r'^...?.?\...?.?.?') try: all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites') - except WindowsError: + except OSError: try: all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites') - except WindowsError: + except OSError: return index = 0 while 1: try: version = Utils.winreg.EnumKey(all_versions, index) - except WindowsError: + except OSError: break - index = index + 1 + index += 1 if not version_pattern.match(version): continue - targets = [] + targets = {} for target,arch in all_icl_platforms: + if target=='intel64': + targetDir='EM64T_NATIVE' + else: + targetDir=target try: - if target=='intel64': targetDir='EM64T_NATIVE' - else: targetDir=target try: defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) - except WindowsError: - if targetDir=='EM64T_NATIVE': + except OSError: + if targetDir == 'EM64T_NATIVE': defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') else: - raise WindowsError + raise uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey') Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + except OSError: + pass + else: batch_file=os.path.join(path,'bin','iclvars.bat') if os.path.isfile(batch_file): - try: - targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) - except conf.errors.ConfigurationError as e: - pass + targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 # http://software.intel.com/en-us/forums/topic/328487 compilervars_warning_attr = '_compilervars_warning_key' @@ -507,56 +655,44 @@ def gather_intel_composer_versions(conf, versions): if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)): Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ' - '(VSWinExpress.exe) but it does not seem to be installed at %r. ' - 'The intel command line set up will fail to configure unless the file %r' - 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) - except WindowsError: - pass + '(VSWinExpress.exe) but it does not seem to be installed at %r. ' + 'The intel command line set up will fail to configure unless the file %r' + 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) major = version[0:2] - versions.append(('intel ' + major, targets)) + versions['intel ' + major] = targets @conf -def get_msvc_versions(conf): - """ - :return: list of compilers installed - :rtype: list of string - """ - if not conf.env['MSVC_INSTALLED_VERSIONS']: - lst = [] - conf.gather_icl_versions(lst) - conf.gather_intel_composer_versions(lst) - conf.gather_wsdk_versions(lst) - conf.gather_msvc_versions(lst) - conf.env['MSVC_INSTALLED_VERSIONS'] = lst - return conf.env['MSVC_INSTALLED_VERSIONS'] +def detect_msvc(self): + return self.setup_msvc(self.get_msvc_versions()) @conf -def print_all_msvc_detected(conf): +def get_msvc_versions(self): """ - Print the contents of *conf.env.MSVC_INSTALLED_VERSIONS* + :return: platform to compiler configurations + :rtype: dict """ - for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: - Logs.info(version) - for target,l in targets: - Logs.info("\t"+target) - -@conf -def detect_msvc(conf, arch = False): - versions = get_msvc_versions(conf) - return setup_msvc(conf, versions, arch) + dct = Utils.ordered_iter_dict() + self.gather_icl_versions(dct) + self.gather_intel_composer_versions(dct) + self.gather_wsdk_versions(dct) + self.gather_msvc_versions(dct) + self.gather_vswhere_versions(dct) + Logs.debug('msvc: detected versions %r', list(dct.keys())) + return dct @conf def find_lt_names_msvc(self, libname, is_static=False): """ Win32/MSVC specific code to glean out information from libtool la files. - this function is not attached to the task_gen class + this function is not attached to the task_gen class. Returns a triplet: + (library absolute path, library name without extension, whether the library is static) """ lt_names=[ 'lib%s.la' % libname, '%s.la' % libname, ] - for path in self.env['LIBPATH']: + for path in self.env.LIBPATH: for la in lt_names: laf=os.path.join(path,la) dll=None @@ -568,7 +704,7 @@ def find_lt_names_msvc(self, libname, is_static=False): if not is_static and ltdict.get('library_names', ''): dllnames=ltdict['library_names'].split() dll=dllnames[0].lower() - dll=re.sub('\.dll$', '', dll) + dll=re.sub(r'\.dll$', '', dll) return (lt_libdir, dll, False) elif ltdict.get('old_library', ''): olib=ltdict['old_library'] @@ -585,7 +721,7 @@ def find_lt_names_msvc(self, libname, is_static=False): @conf def libname_msvc(self, libname, is_static=False): lib = libname.lower() - lib = re.sub('\.lib$','',lib) + lib = re.sub(r'\.lib$','',lib) if lib in g_msvc_systemlibs: return lib @@ -598,14 +734,14 @@ def libname_msvc(self, libname, is_static=False): (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static) if lt_path != None and lt_libname != None: - if lt_static == True: - # file existance check has been made by find_lt_names + if lt_static: + # file existence check has been made by find_lt_names return os.path.join(lt_path,lt_libname) if lt_path != None: - _libpaths=[lt_path] + self.env['LIBPATH'] + _libpaths = [lt_path] + self.env.LIBPATH else: - _libpaths=self.env['LIBPATH'] + _libpaths = self.env.LIBPATH static_libs=[ 'lib%ss.lib' % lib, @@ -631,19 +767,19 @@ def libname_msvc(self, libname, is_static=False): for path in _libpaths: for libn in libnames: if os.path.exists(os.path.join(path, libn)): - debug('msvc: lib found: %s' % os.path.join(path,libn)) - return re.sub('\.lib$', '',libn) + Logs.debug('msvc: lib found: %s', os.path.join(path,libn)) + return re.sub(r'\.lib$', '',libn) #if no lib can be found, just return the libname as msvc expects it - self.fatal("The library %r could not be found" % libname) - return re.sub('\.lib$', '', libname) + self.fatal('The library %r could not be found' % libname) + return re.sub(r'\.lib$', '', libname) @conf def check_lib_msvc(self, libname, is_static=False, uselib_store=None): """ Ideally we should be able to place the lib in the right env var, either STLIB or LIB, but we don't distinguish static libs from shared libs. - This is ok since msvc doesn't have any special linker flag to select static libs (no env['STLIB_MARKER']) + This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER) """ libn = self.libname_msvc(libname, is_static) @@ -680,27 +816,26 @@ def no_autodetect(conf): configure(conf) @conf -def autodetect(conf, arch = False): +def autodetect(conf, arch=False): v = conf.env if v.NO_MSVC_DETECT: return + + compiler, version, path, includes, libdirs, cpu = conf.detect_msvc() if arch: - compiler, version, path, includes, libdirs, arch = conf.detect_msvc(True) - v['DEST_CPU'] = arch - else: - compiler, version, path, includes, libdirs = conf.detect_msvc() + v.DEST_CPU = cpu - v['PATH'] = path - v['INCLUDES'] = includes - v['LIBPATH'] = libdirs - v['MSVC_COMPILER'] = compiler + v.PATH = path + v.INCLUDES = includes + v.LIBPATH = libdirs + v.MSVC_COMPILER = compiler try: - v['MSVC_VERSION'] = float(version) - except Exception: - v['MSVC_VERSION'] = float(version[:-3]) + v.MSVC_VERSION = float(version) + except ValueError: + v.MSVC_VERSION = float(version[:-3]) def _get_prog_names(conf, compiler): - if compiler=='intel': + if compiler == 'intel': compiler_name = 'ICL' linker_name = 'XILINK' lib_name = 'XILIB' @@ -719,63 +854,59 @@ def find_msvc(conf): # the autodetection is supposed to be performed before entering in this method v = conf.env - path = v['PATH'] - compiler = v['MSVC_COMPILER'] - version = v['MSVC_VERSION'] + path = v.PATH + compiler = v.MSVC_COMPILER + version = v.MSVC_VERSION compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11) # compiler - cxx = None - if v['CXX']: cxx = v['CXX'] - elif 'CXX' in conf.environ: cxx = conf.environ['CXX'] cxx = conf.find_program(compiler_name, var='CXX', path_list=path) # before setting anything, check if the compiler is really msvc env = dict(conf.environ) - if path: env.update(PATH = ';'.join(path)) + if path: + env.update(PATH = ';'.join(path)) if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env): conf.fatal('the msvc compiler could not be identified') # c/c++ compiler - v['CC'] = v['CXX'] = cxx - v['CC_NAME'] = v['CXX_NAME'] = 'msvc' + v.CC = v.CXX = cxx + v.CC_NAME = v.CXX_NAME = 'msvc' # linker - if not v['LINK_CXX']: - link = conf.find_program(linker_name, path_list=path) - if link: v['LINK_CXX'] = link - else: conf.fatal('%s was not found (linker)' % linker_name) - v['LINK'] = link + if not v.LINK_CXX: + conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX') - if not v['LINK_CC']: - v['LINK_CC'] = v['LINK_CXX'] + if not v.LINK_CC: + v.LINK_CC = v.LINK_CXX # staticlib linker - if not v['AR']: + if not v.AR: stliblink = conf.find_program(lib_name, path_list=path, var='AR') - if not stliblink: return - v['ARFLAGS'] = ['/NOLOGO'] + if not stliblink: + return + v.ARFLAGS = ['/nologo'] # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later if v.MSVC_MANIFEST: conf.find_program('MT', path_list=path, var='MT') - v['MTFLAGS'] = ['/NOLOGO'] + v.MTFLAGS = ['/nologo'] try: conf.load('winres') - except Errors.WafError: - warn('Resource compiler not found. Compiling resource file is disabled') + except Errors.ConfigurationError: + Logs.warn('Resource compiler not found. Compiling resource file is disabled') @conf def visual_studio_add_flags(self): """visual studio flags found in the system environment""" v = self.env - try: v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S' - except Exception: pass - try: v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x]) - except Exception: pass + if self.environ.get('INCLUDE'): + v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S' + if self.environ.get('LIB'): + v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x]) @conf def msvc_common_flags(conf): @@ -784,62 +915,53 @@ def msvc_common_flags(conf): """ v = conf.env - v['DEST_BINFMT'] = 'pe' + v.DEST_BINFMT = 'pe' v.append_value('CFLAGS', ['/nologo']) v.append_value('CXXFLAGS', ['/nologo']) - v['DEFINES_ST'] = '/D%s' + v.append_value('LINKFLAGS', ['/nologo']) + v.DEFINES_ST = '/D%s' - v['CC_SRC_F'] = '' - v['CC_TGT_F'] = ['/c', '/Fo'] - v['CXX_SRC_F'] = '' - v['CXX_TGT_F'] = ['/c', '/Fo'] + v.CC_SRC_F = '' + v.CC_TGT_F = ['/c', '/Fo'] + v.CXX_SRC_F = '' + v.CXX_TGT_F = ['/c', '/Fo'] if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6): - v['CC_TGT_F']= ['/FC'] + v['CC_TGT_F'] - v['CXX_TGT_F']= ['/FC'] + v['CXX_TGT_F'] - - v['CPPPATH_ST'] = '/I%s' # template for adding include paths + v.CC_TGT_F = ['/FC'] + v.CC_TGT_F + v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F - v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:' + v.CPPPATH_ST = '/I%s' # template for adding include paths - # Subsystem specific flags - v['CFLAGS_CONSOLE'] = v['CXXFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE'] - v['CFLAGS_NATIVE'] = v['CXXFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE'] - v['CFLAGS_POSIX'] = v['CXXFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX'] - v['CFLAGS_WINDOWS'] = v['CXXFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS'] - v['CFLAGS_WINDOWSCE'] = v['CXXFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE'] + v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:' # CRT specific flags - v['CFLAGS_CRT_MULTITHREADED'] = v['CXXFLAGS_CRT_MULTITHREADED'] = ['/MT'] - v['CFLAGS_CRT_MULTITHREADED_DLL'] = v['CXXFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD'] + v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT'] + v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD'] - v['CFLAGS_CRT_MULTITHREADED_DBG'] = v['CXXFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd'] - v['CFLAGS_CRT_MULTITHREADED_DLL_DBG'] = v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd'] + v.CFLAGS_CRT_MULTITHREADED_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DBG = ['/MTd'] + v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd'] - # linker - v['LIB_ST'] = '%s.lib' # template for adding shared libs - v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths - v['STLIB_ST'] = '%s.lib' - v['STLIBPATH_ST'] = '/LIBPATH:%s' + v.LIB_ST = '%s.lib' + v.LIBPATH_ST = '/LIBPATH:%s' + v.STLIB_ST = '%s.lib' + v.STLIBPATH_ST = '/LIBPATH:%s' - v.append_value('LINKFLAGS', ['/NOLOGO']) - if v['MSVC_MANIFEST']: + if v.MSVC_MANIFEST: v.append_value('LINKFLAGS', ['/MANIFEST']) - # shared library - v['CFLAGS_cshlib'] = [] - v['CXXFLAGS_cxxshlib'] = [] - v['LINKFLAGS_cshlib'] = v['LINKFLAGS_cxxshlib'] = ['/DLL'] - v['cshlib_PATTERN'] = v['cxxshlib_PATTERN'] = '%s.dll' - v['implib_PATTERN'] = '%s.lib' - v['IMPLIB_ST'] = '/IMPLIB:%s' + v.CFLAGS_cshlib = [] + v.CXXFLAGS_cxxshlib = [] + v.LINKFLAGS_cshlib = v.LINKFLAGS_cxxshlib = ['/DLL'] + v.cshlib_PATTERN = v.cxxshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.lib' + v.IMPLIB_ST = '/IMPLIB:%s' + + v.LINKFLAGS_cstlib = [] + v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib' - # static library - v['LINKFLAGS_cstlib'] = [] - v['cstlib_PATTERN'] = v['cxxstlib_PATTERN'] = '%s.lib' + v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe' - # program - v['cprogram_PATTERN'] = v['cxxprogram_PATTERN'] = '%s.exe' + v.def_PATTERN = '/def:%s' ####################################################################################################### @@ -868,17 +990,15 @@ def build(bld): if not is_static: for f in self.env.LINKFLAGS: d = f.lower() - if d[1:] == 'debug': + if d[1:] in ('debug', 'debug:full', 'debug:fastlink'): pdbnode = self.link_task.outputs[0].change_ext('.pdb') self.link_task.outputs.append(pdbnode) if getattr(self, 'install_task', None): - self.pdb_install_task = self.bld.install_files(self.install_task.dest, pdbnode, env=self.env) - + self.pdb_install_task = self.add_install_files( + install_to=self.install_task.install_to, install_from=pdbnode) break -# split the manifest file processing from the link task, like for the rc processing - @feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib') @after_method('apply_link') def apply_manifest(self): @@ -888,161 +1008,16 @@ def apply_manifest(self): the manifest file, the binaries are unusable. See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx """ - if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None): out_node = self.link_task.outputs[0] man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') self.link_task.outputs.append(man_node) - self.link_task.do_manifest = True - -def exec_mf(self): - """ - Create the manifest file - """ - env = self.env - mtool = env['MT'] - if not mtool: - return 0 - - self.do_manifest = False - - outfile = self.outputs[0].abspath() - - manifest = None - for out_node in self.outputs: - if out_node.name.endswith('.manifest'): - manifest = out_node.abspath() - break - if manifest is None: - # Should never get here. If we do, it means the manifest file was - # never added to the outputs list, thus we don't have a manifest file - # to embed, so we just return. - return 0 - - # embedding mode. Different for EXE's and DLL's. - # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx - mode = '' - if 'cprogram' in self.generator.features or 'cxxprogram' in self.generator.features: - mode = '1' - elif 'cshlib' in self.generator.features or 'cxxshlib' in self.generator.features: - mode = '2' - - debug('msvc: embedding manifest in mode %r' % mode) - - lst = [] + mtool - lst.extend(Utils.to_list(env['MTFLAGS'])) - lst.extend(['-manifest', manifest]) - lst.append('-outputresource:%s;%s' % (outfile, mode)) - - return self.exec_command(lst) - -def quote_response_command(self, flag): - if flag.find(' ') > -1: - for x in ('/LIBPATH:', '/IMPLIB:', '/OUT:', '/I'): - if flag.startswith(x): - flag = '%s"%s"' % (x, flag[len(x):]) - break - else: - flag = '"%s"' % flag - return flag - -def exec_response_command(self, cmd, **kw): - # not public yet - try: - tmp = None - if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192: - program = cmd[0] #unquoted program name, otherwise exec_command will fail - cmd = [self.quote_response_command(x) for x in cmd] - (fd, tmp) = tempfile.mkstemp() - os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode()) - os.close(fd) - cmd = [program, '@' + tmp] - # no return here, that's on purpose - ret = self.generator.bld.exec_command(cmd, **kw) - finally: - if tmp: - try: - os.remove(tmp) - except OSError: - pass # anti-virus and indexers can keep the files open -_- - return ret - -########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token - -def exec_command_msvc(self, *k, **kw): - """ - Change the command-line execution for msvc programs. - Instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in - """ - if isinstance(k[0], list): - lst = [] - carry = '' - for a in k[0]: - if a == '/Fo' or a == '/doc' or a[-1] == ':': - carry = a - else: - lst.append(carry + a) - carry = '' - k = [lst] - - if self.env['PATH']: - env = dict(self.env.env or os.environ) - env.update(PATH = ';'.join(self.env['PATH'])) - kw['env'] = env - - bld = self.generator.bld - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir - - ret = self.exec_response_command(k[0], **kw) - if not ret and getattr(self, 'do_manifest', None): - ret = self.exec_mf() - return ret - -def wrap_class(class_name): - """ - Manifest file processing and @response file workaround for command-line length limits on Windows systems - The indicated task class is replaced by a subclass to prevent conflicts in case the class is wrapped more than once - """ - cls = Task.classes.get(class_name, None) - - if not cls: - return None - - derived_class = type(class_name, (cls,), {}) - - def exec_command(self, *k, **kw): - if self.env['CC_NAME'] == 'msvc': - return self.exec_command_msvc(*k, **kw) - else: - return super(derived_class, self).exec_command(*k, **kw) - - # Chain-up monkeypatch needed since exec_command() is in base class API - derived_class.exec_command = exec_command - - # No chain-up behavior needed since the following methods aren't in - # base class API - derived_class.exec_response_command = exec_response_command - derived_class.quote_response_command = quote_response_command - derived_class.exec_command_msvc = exec_command_msvc - derived_class.exec_mf = exec_mf - - if hasattr(cls, 'hcode'): - derived_class.hcode = cls.hcode - - return derived_class - -for k in 'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): - wrap_class(k) + self.env.DO_MANIFEST = True def make_winapp(self, family): append = self.env.append_unique append('DEFINES', 'WINAPI_FAMILY=%s' % family) - append('CXXFLAGS', '/ZW') - append('CXXFLAGS', '/TP') + append('CXXFLAGS', ['/ZW', '/TP']) for lib_path in self.env.LIBPATH: append('CXXFLAGS','/AI%s'%lib_path) @@ -1050,13 +1025,17 @@ def make_winapp(self, family): @after_method('process_use') @after_method('propagate_uselib_vars') def make_winphone_app(self): + """ + Insert configuration flags for windows phone applications (adds /ZW, /TP...) + """ make_winapp(self, 'WINAPI_FAMILY_PHONE_APP') - conf.env.append_unique('LINKFLAGS', '/NODEFAULTLIB:ole32.lib') - conf.env.append_unique('LINKFLAGS', 'PhoneAppModelHost.lib') - + self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib']) @feature('winapp') @after_method('process_use') @after_method('propagate_uselib_vars') def make_windows_app(self): + """ + Insert configuration flags for windows applications (adds /ZW, /TP...) + """ make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP') diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py index d2a6a84638..9c51c18de1 100644 --- a/waflib/Tools/nasm.py +++ b/waflib/Tools/nasm.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2008-2010 (ita) +# Thomas Nagy, 2008-2018 (ita) """ Nasm tool (asm processing) @@ -19,8 +19,13 @@ def configure(conf): """ Detect nasm/yasm and set the variable *AS* """ - nasm = conf.find_program(['nasm', 'yasm'], var='AS') + conf.find_program(['nasm', 'yasm'], var='AS') conf.env.AS_TGT_F = ['-o'] conf.env.ASLNK_TGT_F = ['-o'] conf.load('asm') conf.env.ASMPATH_ST = '-I%s' + os.sep + txt = conf.cmd_and_log(conf.env.AS + ['--version']) + if 'yasm' in txt.lower(): + conf.env.ASM_NAME = 'yasm' + else: + conf.env.ASM_NAME = 'nasm' diff --git a/waflib/extras/nobuild.py b/waflib/Tools/nobuild.py old mode 100755 new mode 100644 similarity index 100% rename from waflib/extras/nobuild.py rename to waflib/Tools/nobuild.py diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py index cc645261dc..32b03fbaa7 100644 --- a/waflib/Tools/perl.py +++ b/waflib/Tools/perl.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # andersg at 0x63.nu 2007 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) """ Support for Perl extensions. A C/C++ compiler is required:: @@ -24,7 +24,7 @@ def build(bld): """ import os -from waflib import Task, Options, Utils +from waflib import Task, Options, Utils, Errors from waflib.Configure import conf from waflib.TaskGen import extension, feature, before_method @@ -36,8 +36,9 @@ def init_perlext(self): *lib* prefix from library names. """ self.uselib = self.to_list(getattr(self, 'uselib', [])) - if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT') - self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['perlext_PATTERN'] + if not 'PERLEXT' in self.uselib: + self.uselib.append('PERLEXT') + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN @extension('.xs') def xsubpp_file(self, node): @@ -63,7 +64,6 @@ def check_perl_version(self, minver=None): minver is supposed to be a tuple """ res = True - if minver: cver = '.'.join(map(str,minver)) else: @@ -71,18 +71,8 @@ def check_perl_version(self, minver=None): self.start_msg('Checking for minimum perl version %s' % cver) - perl = getattr(Options.options, 'perlbinary', None) - - if not perl: - perl = self.find_program('perl', var='PERL') - - if not perl: - self.end_msg("Perl not found", color="YELLOW") - return False - - self.env['PERL'] = perl - - version = self.cmd_and_log(self.env.PERL + ["-e", 'printf \"%vd\", $^V']) + perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None)) + version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V']) if not version: res = False version = "Unknown" @@ -91,7 +81,7 @@ def check_perl_version(self, minver=None): if ver < minver: res = False - self.end_msg(version, color=res and "GREEN" or "YELLOW") + self.end_msg(version, color=res and 'GREEN' or 'YELLOW') return res @conf @@ -109,7 +99,7 @@ def configure(conf): self.start_msg('perl module %s' % module) try: r = self.cmd_and_log(cmd) - except Exception: + except Errors.WafError: self.end_msg(False) return None self.end_msg(r or True) @@ -137,19 +127,25 @@ def cfg_str(cfg): return self.cmd_and_log(cmd_perl_config(cfg)) def cfg_lst(cfg): return Utils.to_list(cfg_str(cfg)) - - env['LINKFLAGS_PERLEXT'] = cfg_lst('$Config{lddlflags}') - env['INCLUDES_PERLEXT'] = cfg_lst('$Config{archlib}/CORE') - env['CFLAGS_PERLEXT'] = cfg_lst('$Config{ccflags} $Config{cccdlflags}') - env['XSUBPP'] = cfg_lst('$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}') - env['EXTUTILS_TYPEMAP'] = cfg_lst('$Config{privlib}/ExtUtils/typemap') + def find_xsubpp(): + for var in ('privlib', 'vendorlib'): + xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var) + if xsubpp and os.path.isfile(xsubpp[0]): + return xsubpp + return self.find_program('xsubpp') + + env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}') + env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE') + env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}') + env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap') + env.XSUBPP = find_xsubpp() if not getattr(Options.options, 'perlarchdir', None): - env['ARCHDIR_PERL'] = cfg_str('$Config{sitearch}') + env.ARCHDIR_PERL = cfg_str('$Config{sitearch}') else: - env['ARCHDIR_PERL'] = getattr(Options.options, 'perlarchdir') + env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir') - env['perlext_PATTERN'] = '%s.' + cfg_str('$Config{dlext}') + env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}') def options(opt): """ diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py index 33530dd311..b2dd1a9bcc 100644 --- a/waflib/Tools/python.py +++ b/waflib/Tools/python.py @@ -19,7 +19,7 @@ def build(bld): """ import os, sys -from waflib import Utils, Options, Errors, Logs, Task, Node +from waflib import Errors, Logs, Node, Options, Task, Utils from waflib.TaskGen import extension, before_method, after_method, feature from waflib.Configure import conf @@ -50,10 +50,20 @@ def build(bld): py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) ''' """ -Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files +Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files """ -DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib'] +DISTUTILS_IMP = """ +try: + from distutils.sysconfig import get_config_var, get_python_lib +except ImportError: + from sysconfig import get_config_var, get_path + def get_python_lib(*k, **kw): + keyword='platlib' if kw.get('plat_specific') else 'purelib' + if 'prefix' in kw: + return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']}) + return get_path(keyword) +""".splitlines() @before_method('process_source') @feature('py') @@ -79,15 +89,19 @@ def process_py(self, node): """ Add signature of .py file, so it will be byte-compiled when necessary """ - assert(node.get_bld_sig()) - assert(getattr(self, 'install_path')), 'add features="py"' + assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path()) + self.install_from = getattr(self, 'install_from', None) + relative_trick = getattr(self, 'relative_trick', True) + if self.install_from: + assert isinstance(self.install_from, Node.Node), \ + 'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from)) # where to install the python file if self.install_path: if self.install_from: - self.bld.install_files(self.install_path, [node], cwd=self.install_from, relative_trick=True) + self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick) else: - self.bld.install_files(self.install_path, [node], relative_trick=True) + self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick) lst = [] if self.env.PYC: @@ -97,14 +111,16 @@ def process_py(self, node): if self.install_path: if self.install_from: - pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env) + target_dir = node.path_from(self.install_from) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: - pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env) + target_dir = node.path_from(self.path) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: pyd = node.abspath() for ext in lst: - if self.env.PYTAG: + if self.env.PYTAG and not self.env.NOPYCACHE: # __pycache__ installation for python 3.2 - PEP 3147 name = node.name[:-3] pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext)) @@ -116,13 +132,16 @@ def process_py(self, node): tsk.pyd = pyd if self.install_path: - self.bld.install_files(os.path.dirname(pyd), pyobj, cwd=node.parent.get_bld(), relative_trick=True) + self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick) class pyc(Task.Task): """ Byte-compiling python files """ color = 'PINK' + def __str__(self): + node = self.outputs[0] + return node.path_from(node.ctx.launch_node()) def run(self): cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] ret = self.generator.bld.exec_command(cmd) @@ -133,6 +152,9 @@ class pyo(Task.Task): Byte-compiling python files """ color = 'PINK' + def __str__(self): + node = self.outputs[0] + return node.path_from(node.ctx.launch_node()) def run(self): cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd] ret = self.generator.bld.exec_command(cmd) @@ -162,6 +184,7 @@ def init_pyext(self): @feature('pyext') @before_method('apply_link', 'apply_bundle') def set_bundle(self): + """Mac-specific pyext extension that enables bundles from c_osx.py""" if Utils.unversioned_sys_platform() == 'darwin': self.mac_bundle = True @@ -206,7 +229,7 @@ def get_python_variables(self, variables, imports=None): try: out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env) except Errors.WafError: - self.fatal('The distutils module is unusable: install "python-devel"?') + self.fatal('Could not run %r' % self.env.PYTHON) self.to_log(out) return_values = [] for s in out.splitlines(): @@ -222,6 +245,18 @@ def get_python_variables(self, variables, imports=None): else: break return return_values +@conf +def test_pyembed(self, mode, msg='Testing pyembed configuration'): + self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg, + fragment=FRAG, errmsg='Could not build a python embedded interpreter', + features='%s %sprogram pyembed' % (mode, mode)) + +@conf +def test_pyext(self, mode, msg='Testing pyext configuration'): + self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg, + fragment=FRAG, errmsg='Could not build python extensions', + features='%s %sshlib pyext' % (mode, mode)) + @conf def python_cross_compile(self, features='pyembed pyext'): """ @@ -248,28 +283,26 @@ def python_cross_compile(self, features='pyembed pyext'): xx = self.env.CXX_NAME and 'cxx' or 'c' if 'pyext' in features: - flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None)) + flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS')) if flags is None: self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') else: self.parse_flags(flags, 'PYEXT') - - self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Testing pyext configuration', - features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions') + self.test_pyext(xx) if 'pyembed' in features: - flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS', None)) + flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS')) if flags is None: self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') else: self.parse_flags(flags, 'PYEMBED') - self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Testing pyembed configuration', - fragment=FRAG, errmsg='Could not build a python embedded interpreter', features='%s %sprogram pyembed' % (xx, xx)) + self.test_pyembed(xx) return True @conf def check_python_headers(conf, features='pyembed pyext'): """ - Check for headers and libraries necessary to extend or embed python by using the module *distutils*. + Check for headers and libraries necessary to extend or embed python. + It may use the module *distutils* or sysconfig in newer Python versions. On success the environment variables xxx_PYEXT and xxx_PYEMBED are added: * PYEXT: for compiling python extensions @@ -278,14 +311,14 @@ def check_python_headers(conf, features='pyembed pyext'): features = Utils.to_list(features) assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'" env = conf.env - if not env['CC_NAME'] and not env['CXX_NAME']: + if not env.CC_NAME and not env.CXX_NAME: conf.fatal('load a compiler first (gcc, g++, ..)') # bypass all the code below for cross-compilation if conf.python_cross_compile(features): return - if not env['PYTHON_VERSION']: + if not env.PYTHON_VERSION: conf.check_python_version() pybin = env.PYTHON @@ -293,7 +326,7 @@ def check_python_headers(conf, features='pyembed pyext'): conf.fatal('Could not find the python executable') # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below - v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + v = 'prefix SO EXT_SUFFIX LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() try: lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v]) except RuntimeError: @@ -305,15 +338,19 @@ def check_python_headers(conf, features='pyembed pyext'): dct = dict(zip(v, lst)) x = 'MACOSX_DEPLOYMENT_TARGET' if dct[x]: - env[x] = conf.environ[x] = dct[x] - env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake + env[x] = conf.environ[x] = str(dct[x]) + env.pyext_PATTERN = '%s' + (dct['EXT_SUFFIX'] or dct['SO']) # SO is deprecated in 3.5 and removed in 3.11 # Try to get pythonX.Y-config - num = '.'.join(env['PYTHON_VERSION'].split('.')[:2]) + num = '.'.join(env.PYTHON_VERSION.split('.')[:2]) conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False) if env.PYTHON_CONFIG: + # check python-config output only once + if conf.env.HAVE_PYTHON_H: + return + # python2.6-config requires 3 runs all_flags = [['--cflags', '--libs', '--ldflags']] if sys.hexversion < 0x2070000: @@ -323,18 +360,37 @@ def check_python_headers(conf, features='pyembed pyext'): if 'pyembed' in features: for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) - - conf.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg='Getting pyembed flags from python-config', - fragment=FRAG, errmsg='Could not build a python embedded interpreter', - features='%s %sprogram pyembed' % (xx, xx)) + # Python 3.8 has different flags for pyembed, needs --embed + embedflags = flags + ['--embed'] + try: + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags) + except conf.errors.ConfigurationError: + # However Python < 3.8 doesn't accept --embed, so we need a fallback + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) + + try: + conf.test_pyembed(xx) + except conf.errors.ConfigurationError: + # python bug 7352 + if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']]) + conf.test_pyembed(xx) + else: + raise if 'pyext' in features: for flags in all_flags: conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags) - conf.check(header_name='Python.h', define_name='HAVE_PYEXT', msg='Getting pyext flags from python-config', - features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions') + try: + conf.test_pyext(xx) + except conf.errors.ConfigurationError: + # python bug 7352 + if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']]) + conf.test_pyext(xx) + else: + raise conf.define('HAVE_PYTHON_H', 1) return @@ -348,14 +404,14 @@ def check_python_headers(conf, features='pyembed pyext'): result = None if not dct["LDVERSION"]: - dct["LDVERSION"] = env['PYTHON_VERSION'] + dct["LDVERSION"] = env.PYTHON_VERSION # further simplification will be complicated - for name in ('python' + dct['LDVERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')): + for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')): # LIBPATH_PYEMBED is already set; see if it works. - if not result and env['LIBPATH_PYEMBED']: - path = env['LIBPATH_PYEMBED'] + if not result and env.LIBPATH_PYEMBED: + path = env.LIBPATH_PYEMBED conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path) result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name) @@ -371,14 +427,19 @@ def check_python_headers(conf, features='pyembed pyext'): if not result: path = [os.path.join(dct['prefix'], "libs")] - conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY rather than pythonX.Y (win32)\n") result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name) + if not result: + path = [os.path.normpath(os.path.join(dct['INCLUDEPY'], '..', 'libs'))] + conf.to_log("\n\n# try again with -L$INCLUDEPY/../libs, and pythonXY rather than pythonX.Y (win32)\n") + result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $INCLUDEPY/../libs' % name) + if result: break # do not forget to set LIBPATH_PYEMBED if result: - env['LIBPATH_PYEMBED'] = path + env.LIBPATH_PYEMBED = path env.append_value('LIB_PYEMBED', [name]) else: conf.to_log("\n\n### LIB NOT FOUND\n") @@ -386,31 +447,37 @@ def check_python_headers(conf, features='pyembed pyext'): # under certain conditions, python extensions must link to # python libraries, not just python embedding programs. if Utils.is_win32 or dct['Py_ENABLE_SHARED']: - env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED'] - env['LIB_PYEXT'] = env['LIB_PYEMBED'] + env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED + env.LIB_PYEXT = env.LIB_PYEMBED - conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],)) - env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']] - env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']] + conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],)) + env.INCLUDES_PYEXT = [dct['INCLUDEPY']] + env.INCLUDES_PYEMBED = [dct['INCLUDEPY']] # Code using the Python API needs to be compiled with -fno-strict-aliasing - if env['CC_NAME'] == 'gcc': - env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) - env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing']) - if env['CXX_NAME'] == 'gcc': - env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) - env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) + if env.CC_NAME == 'gcc': + env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing']) + if env.CXX_NAME == 'gcc': + env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) if env.CC_NAME == "msvc": - from distutils.msvccompiler import MSVCCompiler - dist_compiler = MSVCCompiler() - dist_compiler.initialize() - env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options) - env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options) - env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared) + try: + from distutils.msvccompiler import MSVCCompiler + except ImportError: + # From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py + env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG']) + env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG']) + env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO']) + else: + dist_compiler = MSVCCompiler() + dist_compiler.initialize() + env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared) - # See if it compiles - conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!') + conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter') @conf def check_python_version(conf, minver=None): @@ -418,9 +485,9 @@ def check_python_version(conf, minver=None): Check if the python interpreter is found matching a given minimum version. minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver. - If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' - (eg. '2.4') of the actual python version found, and PYTHONDIR is - defined, pointing to the site-packages directory appropriate for + If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4') + of the actual python version found, and PYTHONDIR and PYTHONARCHDIR + are defined, pointing to the site-packages directories appropriate for this python version, where modules/packages/extensions should be installed. @@ -428,55 +495,47 @@ def check_python_version(conf, minver=None): :type minver: tuple of int """ assert minver is None or isinstance(minver, tuple) - pybin = conf.env['PYTHON'] + pybin = conf.env.PYTHON if not pybin: conf.fatal('could not find the python executable') # Get python version string cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))'] - Logs.debug('python: Running python command %r' % cmd) + Logs.debug('python: Running python command %r', cmd) lines = conf.cmd_and_log(cmd).split() - assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines) + assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines) pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4])) - # compare python version with the minimum required + # Compare python version with the minimum required result = (minver is None) or (pyver_tuple >= minver) if result: # define useful environment variables pyver = '.'.join([str(x) for x in pyver_tuple[:2]]) - conf.env['PYTHON_VERSION'] = pyver + conf.env.PYTHON_VERSION = pyver if 'PYTHONDIR' in conf.env: # Check if --pythondir was specified - pydir = conf.env['PYTHONDIR'] + pydir = conf.env.PYTHONDIR elif 'PYTHONDIR' in conf.environ: # Check environment for PYTHONDIR pydir = conf.environ['PYTHONDIR'] else: # Finally, try to guess if Utils.is_win32: - (python_LIBDEST, pydir) = conf.get_python_variables( - ["get_config_var('LIBDEST') or ''", - "get_python_lib(standard_lib=0) or ''"]) + (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"]) else: - python_LIBDEST = None - (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0) or ''"]) - if python_LIBDEST is None: - if conf.env['LIBDIR']: - python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver) - else: - python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver) + (pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX]) if 'PYTHONARCHDIR' in conf.env: # Check if --pythonarchdir was specified - pyarchdir = conf.env['PYTHONARCHDIR'] + pyarchdir = conf.env.PYTHONARCHDIR elif 'PYTHONARCHDIR' in conf.environ: # Check environment for PYTHONDIR pyarchdir = conf.environ['PYTHONARCHDIR'] else: # Finally, try to guess - (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0) or ''"]) + (pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX]) if not pyarchdir: pyarchdir = pydir @@ -484,8 +543,8 @@ def check_python_version(conf, minver=None): conf.define('PYTHONDIR', pydir) conf.define('PYTHONARCHDIR', pyarchdir) - conf.env['PYTHONDIR'] = pydir - conf.env['PYTHONARCHDIR'] = pyarchdir + conf.env.PYTHONDIR = pydir + conf.env.PYTHONARCHDIR = pyarchdir # Feedback pyver_full = '.'.join(map(str, pyver_tuple[:3])) @@ -493,7 +552,7 @@ def check_python_version(conf, minver=None): conf.msg('Checking for python version', pyver_full) else: minver_str = '.'.join(map(str, minver)) - conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW') + conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW') if not result: conf.fatal('The python version is too old, expecting %r' % (minver,)) @@ -519,13 +578,13 @@ def configure(conf): :param module_name: module :type module_name: string """ - msg = "Checking for python module '%s'" % module_name + msg = "Checking for python module %r" % module_name if condition: msg = '%s (%s)' % (msg, condition) conf.start_msg(msg) try: - ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name]) - except Exception: + ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name]) + except Errors.WafError: conf.end_msg(False) conf.fatal('Could not find the python module %r' % module_name) @@ -535,13 +594,12 @@ def configure(conf): if ret == 'unknown version': conf.fatal('Could not check the %s version' % module_name) - from distutils.version import LooseVersion def num(*k): if isinstance(k[0], int): - return LooseVersion('.'.join([str(x) for x in k])) + return Utils.loose_version('.'.join([str(x) for x in k])) else: - return LooseVersion(k[0]) - d = {'num': num, 'ver': LooseVersion(ret)} + return Utils.loose_version(k[0]) + d = {'num': num, 'ver': Utils.loose_version(ret)} ev = eval(condition, {}, d) if not ev: conf.fatal('The %s version does not satisfy the requirements' % module_name) @@ -556,22 +614,26 @@ def configure(conf): Detect the python interpreter """ v = conf.env - v['PYTHON'] = Options.options.python or os.environ.get('PYTHON', sys.executable) - if Options.options.pythondir: - v['PYTHONDIR'] = Options.options.pythondir - if Options.options.pythonarchdir: - v['PYTHONARCHDIR'] = Options.options.pythonarchdir - + if getattr(Options.options, 'pythondir', None): + v.PYTHONDIR = Options.options.pythondir + if getattr(Options.options, 'pythonarchdir', None): + v.PYTHONARCHDIR = Options.options.pythonarchdir + if getattr(Options.options, 'nopycache', None): + v.NOPYCACHE=Options.options.nopycache + + if not v.PYTHON: + v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable] + v.PYTHON = Utils.to_list(v.PYTHON) conf.find_program('python', var='PYTHON') - v['PYFLAGS'] = '' - v['PYFLAGS_OPT'] = '-O' + v.PYFLAGS = '' + v.PYFLAGS_OPT = '-O' - v['PYC'] = getattr(Options.options, 'pyc', 1) - v['PYO'] = getattr(Options.options, 'pyo', 1) + v.PYC = getattr(Options.options, 'pyc', 1) + v.PYO = getattr(Options.options, 'pyo', 1) try: - v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip() + v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip() except Errors.WafError: pass @@ -584,6 +646,8 @@ def options(opt): help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]') pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1, help='Do not install optimised compiled .pyo files (configuration) [Default:install]') + pyopt.add_option('--nopycache',dest='nopycache', action='store_true', + help='Do not use __pycache__ directory to install objects [Default:auto]') pyopt.add_option('--python', dest="python", help='python binary to be used [Default: %s]' % sys.executable) pyopt.add_option('--pythondir', dest='pythondir', diff --git a/waflib/Tools/qt5.py b/waflib/Tools/qt5.py index e4a21148b8..7f26ac5f76 100644 --- a/waflib/Tools/qt5.py +++ b/waflib/Tools/qt5.py @@ -1,14 +1,11 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2015 (ita) +# Thomas Nagy, 2006-2018 (ita) +# Rafaël Kooi, 2023 (RA-Kooi) """ - -Tool Description -================ - -This tool helps with finding Qt5 tools and libraries, -and also provides syntactic sugar for using Qt5 tools. +This tool helps with finding Qt5 and Qt6 tools and libraries, +and also provides syntactic sugar for using Qt5 and Qt6 tools. The following snippet illustrates the tool usage:: @@ -21,11 +18,28 @@ def configure(conf): def build(bld): bld( features = 'qt5 cxx cxxprogram', - uselib = 'QTCORE QTGUI QTOPENGL QTSVG', + uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG', source = 'main.cpp textures.qrc aboutDialog.ui', target = 'window', ) +Alternatively the following snippet illustrates Qt6 tool usage:: + + def options(opt): + opt.load('compiler_cxx qt5') + + def configure(conf): + conf.want_qt6 = True + conf.load('compiler_cxx qt5') + + def build(bld): + bld( + features = 'qt6 cxx cxxprogram', + uselib = 'QT6CORE QT6GUI QT6OPENGL QT6SVG', + source = 'main.cpp textures.qrc aboutDialog.ui', + target = 'window', + ) + Here, the UI description and resource files will be processed to generate code. @@ -52,7 +66,7 @@ def add_includes_paths(self): incs = set(self.to_list(getattr(self, 'includes', ''))) for x in self.compiled_tasks: incs.add(x.inputs[0].parent.path_from(self.path)) - self.includes = list(incs) + self.includes = sorted(incs) Note: another tool provides Qt processing that does not require .moc includes, see 'playground/slow_qt/'. @@ -60,9 +74,36 @@ def add_includes_paths(self): A few options (--qt{dir,bin,...}) and environment variables (QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool, tool path selection, etc; please read the source for more info. +For Qt6 replace the QT5_ prefix with QT6_. + +The detection uses pkg-config on Linux by default. The list of +libraries to be requested to pkg-config is formulated by scanning +in the QTLIBS directory (that can be passed via --qtlibs or by +setting the environment variable QT5_LIBDIR or QT6_LIBDIR otherwise is +derived by querying qmake for QT_INSTALL_LIBS directory) for +shared/static libraries present. +Alternatively the list of libraries to be requested via pkg-config +can be set using the qt5_vars attribute, ie: + + conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test']; +For Qt6 use the qt6_vars attribute. + +This can speed up configuration phase if needed libraries are +known beforehand, can improve detection on systems with a +sparse QT5/Qt6 libraries installation (ie. NIX) and can improve +detection of some header-only Qt modules (ie. Qt5UiPlugin). + +To force static library detection use: +QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure + +To use Qt6 set the want_qt6 attribute, ie: + + conf.want_qt6 = True; """ +from __future__ import with_statement + try: from xml.sax import make_parser from xml.sax.handler import ContentHandler @@ -72,16 +113,16 @@ def add_includes_paths(self): else: has_xml = True -import os, sys +import os, sys, re from waflib.Tools import cxx -from waflib import Task, Utils, Options, Errors, Context -from waflib.TaskGen import feature, after_method, extension +from waflib import Build, Task, Utils, Options, Errors, Context +from waflib.TaskGen import feature, after_method, extension, before_method from waflib.Configure import conf from waflib import Logs MOC_H = ['.h', '.hpp', '.hxx', '.hh'] """ -File extensions associated to the .moc files +File extensions associated to .moc files """ EXT_RCC = ['.qrc'] @@ -99,45 +140,6 @@ def add_includes_paths(self): File extensions of C++ files that may require a .moc processing """ -QT5_LIBS = ''' -qtmain -Qt5Bluetooth -Qt5CLucene -Qt5Concurrent -Qt5Core -Qt5DBus -Qt5Declarative -Qt5DesignerComponents -Qt5Designer -Qt5Gui -Qt5Help -Qt5MultimediaQuick_p -Qt5Multimedia -Qt5MultimediaWidgets -Qt5Network -Qt5Nfc -Qt5OpenGL -Qt5Positioning -Qt5PrintSupport -Qt5Qml -Qt5QuickParticles -Qt5Quick -Qt5QuickTest -Qt5Script -Qt5ScriptTools -Qt5Sensors -Qt5SerialPort -Qt5Sql -Qt5Svg -Qt5Test -Qt5WebKit -Qt5WebKitWidgets -Qt5Widgets -Qt5WinExtras -Qt5X11Extras -Qt5XmlPatterns -Qt5Xml''' - class qxx(Task.classes['cxx']): """ Each C++ file can have zero or several .moc files to create. @@ -171,9 +173,6 @@ def create_moc_task(self, h_node, m_node): If several libraries use the same classes, it is possible that moc will run several times (Issue 1318) It is not possible to change the file names, but we can assume that the moc transformation will be identical, and the moc tasks can be shared in a global cache. - - The defines passed to moc will then depend on task generator order. If this is not acceptable, then - use the tool slow_qt5 instead (and enjoy the slow builds... :-( ) """ try: moc_cache = self.generator.bld.moc_cache @@ -186,13 +185,14 @@ def create_moc_task(self, h_node, m_node): tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator) tsk.set_inputs(h_node) tsk.set_outputs(m_node) + tsk.env.append_unique('MOC_FLAGS', '-i') if self.generator: self.generator.tasks.append(tsk) # direct injection in the build phase (safe because called from the main thread) gen = self.generator.bld.producer - gen.outstanding.insert(0, tsk) + gen.outstanding.append(tsk) gen.total += 1 return tsk @@ -201,22 +201,17 @@ def create_moc_task(self, h_node, m_node): # remove the signature, it must be recomputed with the moc task delattr(self, 'cache_sig') - def moc_h_ext(self): - try: - ext = Options.options.qt_header_ext.split() - except AttributeError: - pass - if not ext: - ext = MOC_H - return ext - def add_moc_tasks(self): """ - Create the moc tasks by looking in ``bld.raw_deps[self.uid()]`` + Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]`` """ node = self.inputs[0] bld = self.generator.bld + # skip on uninstall due to generated files + if bld.is_install == Build.UNINSTALL: + return + try: # compute the signature once to know if there is a moc file to create self.signature() @@ -230,7 +225,7 @@ def add_moc_tasks(self): include_nodes = [node.parent] + self.generator.includes_nodes moctasks = [] - mocfiles = set([]) + mocfiles = set() for d in bld.raw_deps.get(self.uid(), []): if not d.endswith('.moc'): continue @@ -242,29 +237,26 @@ def add_moc_tasks(self): # find the source associated with the moc file h_node = None - base2 = d[:-4] - for x in include_nodes: - for e in self.moc_h_ext(): - h_node = x.find_node(base2 + e) - if h_node: - break - if h_node: - m_node = h_node.change_ext('.moc') - break + + # foo.moc from foo.cpp + prefix = node.name[:node.name.rfind('.')] + if base2 == prefix: + h_node = node else: - # foo.cpp -> foo.cpp.moc - for k in EXT_QT5: - if base2.endswith(k): - for x in include_nodes: - h_node = x.find_node(base2) - if h_node: - break + # this deviates from the standard + # if bar.cpp includes foo.moc, then assume it is from foo.h + for x in include_nodes: + for e in MOC_H: + h_node = x.find_node(base2 + e) if h_node: - m_node = h_node.change_ext(k + '.moc') break - - if not h_node: + else: + continue + break + if h_node: + m_node = h_node.change_ext('.moc') + else: raise Errors.WafError('No source found for %r which is a moc file' % d) # create the moc task @@ -276,16 +268,16 @@ def add_moc_tasks(self): self.moc_done = 1 class trans_update(Task.Task): - """Update a .ts files from a list of C++ files""" + """Updates a .ts files from a list of C++ files""" run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' color = 'BLUE' -Task.update_outputs(trans_update) class XMLHandler(ContentHandler): """ - Parser for *.qrc* files + Parses ``.qrc`` files """ def __init__(self): + ContentHandler.__init__(self) self.buf = [] self.files = [] def startElement(self, name, attrs): @@ -299,8 +291,8 @@ def characters(self, cars): @extension(*EXT_RCC) def create_rcc_task(self, node): - "Create rcc and cxx tasks for *.qrc* files" - rcnode = node.change_ext('_rc.cpp') + "Creates rcc and cxx tasks for ``.qrc`` files" + rcnode = node.change_ext('_rc.%d.cpp' % self.idx) self.create_task('rcc', node, rcnode) cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o')) try: @@ -311,31 +303,66 @@ def create_rcc_task(self, node): @extension(*EXT_UI) def create_uic_task(self, node): - "hook for uic tasks" - uictask = self.create_task('ui5', node) - uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])] + "Create uic tasks for user interface ``.ui`` definition files" + + """ + If UIC file is used in more than one bld, we would have a conflict in parallel execution + It is not possible to change the file names (like .self.idx. as for objects) as they have + to be referenced by the source file, but we can assume that the transformation will be identical + and the tasks can be shared in a global cache. + """ + try: + uic_cache = self.bld.uic_cache + except AttributeError: + uic_cache = self.bld.uic_cache = {} + + if node not in uic_cache: + uictask = uic_cache[node] = self.create_task('ui5', node) + uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])] @extension('.ts') def add_lang(self, node): - """add all the .ts file into self.lang""" + """Adds all the .ts file into ``self.lang``""" self.lang = self.to_list(getattr(self, 'lang', [])) + [node] -@feature('qt5') +@feature('qt5', 'qt6') +@before_method('process_source') +def process_mocs(self): + """ + Processes MOC files included in headers:: + + def build(bld): + bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h') + + The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name + is provided to avoid name clashes when the same headers are used by several targets. + """ + lst = self.to_nodes(getattr(self, 'moc', [])) + self.source = self.to_list(getattr(self, 'source', [])) + for x in lst: + prefix = x.name[:x.name.rfind('.')] # foo.h -> foo + moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx) + moc_node = x.parent.find_or_declare(moc_target) + self.source.append(moc_node) + + self.create_task('moc', x, moc_node) + +@feature('qt5', 'qt6') @after_method('apply_link') def apply_qt5(self): """ - Add MOC_FLAGS which may be necessary for moc:: + Adds MOC_FLAGS which may be necessary for moc:: def build(bld): - bld.program(features='qt5', source='main.cpp', target='app', use='QTCORE') + bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE') The additional parameters are: - :param lang: list of translation files (\*.ts) to process + :param lang: list of translation files (\\*.ts) to process :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension - :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**) + :param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**) :type update: bool - :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension """ if getattr(self, 'lang', None): @@ -343,11 +370,11 @@ def build(bld): for x in self.to_list(self.lang): if isinstance(x, str): x = self.path.find_resource(x + '.ts') - qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) + qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx))) if getattr(self, 'update', None) and Options.options.trans_qt5: cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [ - a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')] + a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')] for x in qmtasks: self.create_task('trans_update', cxxnodes, x.inputs) @@ -355,14 +382,15 @@ def build(bld): qmnodes = [x.outputs[0] for x in qmtasks] rcnode = self.langname if isinstance(rcnode, str): - rcnode = self.path.find_or_declare(rcnode + '.qrc') + rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx)) t = self.create_task('qm2rcc', qmnodes, rcnode) k = create_rcc_task(self, t.outputs[0]) self.link_task.inputs.append(k.outputs[0]) lst = [] - for flag in self.to_list(self.env['CXXFLAGS']): - if len(flag) < 2: continue + for flag in self.to_list(self.env.CXXFLAGS): + if len(flag) < 2: + continue f = flag[0:2] if f in ('-D', '-I', '/D', '/I'): if (f[0] == '/'): @@ -374,13 +402,13 @@ def build(bld): @extension(*EXT_QT5) def cxx_hook(self, node): """ - Re-map C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task. + Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task. """ return self.create_compiled_task('qxx', node) class rcc(Task.Task): """ - Process *.qrc* files + Processes ``.qrc`` files """ color = 'BLUE' run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' @@ -392,37 +420,62 @@ def rcname(self): def scan(self): """Parse the *.qrc* files""" if not has_xml: - Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') return ([], []) parser = make_parser() curHandler = XMLHandler() parser.setContentHandler(curHandler) - fi = open(self.inputs[0].abspath(), 'r') - try: - parser.parse(fi) - finally: - fi.close() + with open(self.inputs[0].abspath(), 'r') as f: + parser.parse(f) nodes = [] names = [] root = self.inputs[0].parent for x in curHandler.files: nd = root.find_resource(x) - if nd: nodes.append(nd) - else: names.append(x) + if nd: + nodes.append(nd) + else: + names.append(x) return (nodes, names) + def quote_flag(self, x): + """ + Override Task.quote_flag. QT parses the argument files + differently than cl.exe and link.exe + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ + return x + + class moc(Task.Task): """ - Create *.moc* files + Creates ``.moc`` files """ color = 'BLUE' run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' + def quote_flag(self, x): + """ + Override Task.quote_flag. QT parses the argument files + differently than cl.exe and link.exe + + :param x: flag + :type x: string + :return: quoted flag + :rtype: string + """ + return x + + class ui5(Task.Task): """ - Process *.ui* files + Processes ``.ui`` files """ color = 'BLUE' run_str = '${QT_UIC} ${SRC} -o ${TGT}' @@ -430,18 +483,17 @@ class ui5(Task.Task): class ts2qm(Task.Task): """ - Create *.qm* files from *.ts* files + Generates ``.qm`` files from ``.ts`` files """ color = 'BLUE' run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' class qm2rcc(Task.Task): """ - Transform *.qm* files into *.rc* files + Generates ``.qrc`` files from ``.qm`` files """ color = 'BLUE' after = 'ts2qm' - def run(self): """Create a qrc file including the inputs""" txt = '\n'.join(['%s' % k.path_from(self.outputs[0].parent) for k in self.inputs]) @@ -453,22 +505,76 @@ def configure(self): Besides the configuration options, the environment variable QT5_ROOT may be used to give the location of the qt5 libraries (absolute path). - The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg` + The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg` """ + if 'COMPILER_CXX' not in self.env: + self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?') + + self.want_qt6 = getattr(self, 'want_qt6', False) + + if self.want_qt6: + self.qt_vars = Utils.to_list(getattr(self, 'qt6_vars', [])) + else: + self.qt_vars = Utils.to_list(getattr(self, 'qt5_vars', [])) + self.find_qt5_binaries() + self.set_qt5_libs_dir() self.set_qt5_libs_to_check() self.set_qt5_defines() self.find_qt5_libraries() self.add_qt5_rpath() self.simplify_qt5_libs() + # warn about this during the configuration too + if not has_xml: + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') + + feature = 'qt6' if self.want_qt6 else 'qt5' + + # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC? + frag = '#include \nint main(int argc, char **argv) {QMap m;return m.keys().size();}\n' + uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE' + + # Qt6 requires C++17 (https://www.qt.io/blog/qt-6.0-released) + flag_list = [] + if self.env.CXX_NAME == 'msvc': + stdflag = '/std:c++17' if self.want_qt6 else '/std:c++11' + flag_list = [[], ['/Zc:__cplusplus', '/permissive-', stdflag]] + else: + stdflag = '-std=c++17' if self.want_qt6 else '-std=c++11' + flag_list = [[], '-fPIE', '-fPIC', stdflag, [stdflag, '-fPIE'], [stdflag, '-fPIC']] + for flag in flag_list: + msg = 'See if Qt files compile ' + if flag: + msg += 'with %s' % flag + try: + self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flag, fragment=frag, msg=msg) + except self.errors.ConfigurationError: + pass + else: + break + else: + self.fatal('Could not build a simple Qt application') + + # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/ + if Utils.unversioned_sys_platform() == 'freebsd': + frag = '#include \nint main(int argc, char **argv) {QMap m;return m.keys().size();}\n' + try: + self.check(features=feature + ' cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?') + except self.errors.ConfigurationError: + self.check(features=feature + ' cxx cxxprogram', use=uses, uselib_store=feature, libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?') + @conf def find_qt5_binaries(self): + """ + Detects Qt programs such as qmake, moc, uic, lrelease + """ env = self.env opt = Options.options qtdir = getattr(opt, 'qtdir', '') qtbin = getattr(opt, 'qtbin', '') + qt_ver = '6' if self.want_qt6 else '5' paths = [] @@ -477,16 +583,19 @@ def find_qt5_binaries(self): # the qt directory has been given from QT5_ROOT - deduce the qt binary path if not qtdir: - qtdir = os.environ.get('QT5_ROOT', '') - qtbin = os.environ.get('QT5_BIN', None) or os.path.join(qtdir, 'bin') + qtdir = self.environ.get('QT' + qt_ver + '_ROOT', '') + qtbin = self.environ.get('QT' + qt_ver + '_BIN') or os.path.join(qtdir, 'bin') if qtbin: paths = [qtbin] # no qtdir, look in the path and in /usr/local/Trolltech if not qtdir: - paths = os.environ.get('PATH', '').split(os.pathsep) - paths.append('/usr/share/qt5/bin/') + paths = self.environ.get('PATH', '').split(os.pathsep) + paths.extend([ + '/usr/share/qt' + qt_ver + '/bin', + '/usr/local/lib/qt' + qt_ver + '/bin']) + try: lst = Utils.listdir('/usr/local/Trolltech/') except OSError: @@ -504,8 +613,10 @@ def find_qt5_binaries(self): # at the end, try to find qmake in the paths given # keep the one with the highest version cand = None - prev_ver = ['5', '0', '0'] - for qmk in ('qmake-qt5', 'qmake5', 'qmake'): + prev_ver = ['0', '0', '0'] + qmake_vars = ['qmake-qt' + qt_ver, 'qmake' + qt_ver, 'qmake'] + + for qmk in qmake_vars: try: qmake = self.find_program(qmk, path_list=paths) except self.errors.ConfigurationError: @@ -518,7 +629,7 @@ def find_qt5_binaries(self): else: if version: new_ver = version.split('.') - if new_ver > prev_ver: + if new_ver[0] == qt_ver and new_ver > prev_ver: cand = qmake prev_ver = new_ver @@ -529,7 +640,7 @@ def find_qt5_binaries(self): except self.errors.ConfigurationError: pass else: - cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake'] + cmd = self.env.QTCHOOSER + ['-qt=' + qt_ver, '-run-tool=qmake'] try: version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION']) except self.errors.WafError: @@ -540,10 +651,17 @@ def find_qt5_binaries(self): if cand: self.env.QMAKE = cand else: - self.fatal('Could not find qmake for qt5') + self.fatal('Could not find qmake for qt' + qt_ver) - self.env.QT_INSTALL_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep - paths.insert(0, qtbin) + # Once we have qmake, we want to query qmake for the paths where we want to look for tools instead + paths = [] + + self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip() + paths.append(qtbin) + + if self.want_qt6: + self.env.QT_HOST_LIBEXECS = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_LIBEXECS']).strip() + paths.append(self.env.QT_HOST_LIBEXECS) def find_bin(lst, var): if var in env: @@ -557,115 +675,129 @@ def find_bin(lst, var): env[var]=ret break - find_bin(['uic-qt5', 'uic'], 'QT_UIC') + find_bin(['uic-qt' + qt_ver, 'uic'], 'QT_UIC') if not env.QT_UIC: - self.fatal('cannot find the uic compiler for qt5') + self.fatal('cannot find the uic compiler for qt' + qt_ver) self.start_msg('Checking for uic version') uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH) uicver = ''.join(uicver).strip() uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '') self.end_msg(uicver) - if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1: - self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path') - - find_bin(['moc-qt5', 'moc'], 'QT_MOC') - find_bin(['rcc-qt5', 'rcc'], 'QT_RCC') - find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE') - find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE') - - env['UIC_ST'] = '%s -o %s' - env['MOC_ST'] = '-o' - env['ui_PATTERN'] = 'ui_%s.h' - env['QT_LRELEASE_FLAGS'] = ['-silent'] + if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1 or (self.want_qt6 and uicver.find(' 5.') != -1): + if self.want_qt6: + self.fatal('this uic compiler is for qt3 or qt4 or qt5, add uic for qt6 to your path') + else: + self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path') + + find_bin(['moc-qt' + qt_ver, 'moc'], 'QT_MOC') + find_bin(['rcc-qt' + qt_ver, 'rcc'], 'QT_RCC') + find_bin(['lrelease-qt' + qt_ver, 'lrelease'], 'QT_LRELEASE') + find_bin(['lupdate-qt' + qt_ver, 'lupdate'], 'QT_LUPDATE') + + env.UIC_ST = '%s -o %s' + env.MOC_ST = '-o' + env.ui_PATTERN = 'ui_%s.h' + env.QT_LRELEASE_FLAGS = ['-silent'] env.MOCCPPPATH_ST = '-I%s' env.MOCDEFINES_ST = '-D%s' @conf -def find_qt5_libraries(self): - qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT5_LIBDIR", None) +def set_qt5_libs_dir(self): + env = self.env + qt_ver = '6' if self.want_qt6 else '5' + + qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT' + qt_ver + '_LIBDIR') + if not qtlibs: try: - qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() + qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() except Errors.WafError: - qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep + qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() qtlibs = os.path.join(qtdir, 'lib') - self.msg('Found the Qt5 libraries in', qtlibs) - qtincludes = os.environ.get("QT5_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() + self.msg('Found the Qt' + qt_ver + ' library path', qtlibs) + + env.QTLIBS = qtlibs + +@conf +def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static): + env = self.env + qt_ver = '6' if self.want_qt6 else '5' + + if force_static: + exts = ('.a', '.lib') + prefix = 'STLIB' + else: + exts = ('.so', '.lib') + prefix = 'LIB' + + def lib_names(): + for x in exts: + for k in ('', qt_ver) if Utils.is_win32 else ['']: + for p in ('lib', ''): + yield (p, name, k, x) + + for tup in lib_names(): + k = ''.join(tup) + path = os.path.join(qtlibs, k) + if os.path.exists(path): + if env.DEST_OS == 'win32': + libval = ''.join(tup[:-1]) + else: + libval = name + env.append_unique(prefix + '_' + uselib, libval) + env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs) + env.append_unique('INCLUDES_' + uselib, qtincludes) + env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt' + qt_ver, 'Qt'))) + return k + return False + +@conf +def find_qt5_libraries(self): env = self.env - if not 'PKG_CONFIG_PATH' in os.environ: - os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (qtlibs, qtlibs) + qt_ver = '6' if self.want_qt6 else '5' + + qtincludes = self.environ.get('QT' + qt_ver + '_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() + force_static = self.environ.get('QT' + qt_ver + '_FORCE_STATIC') try: - if os.environ.get("QT5_XCOMPILE", None): - raise self.errors.ConfigurationError() + if self.environ.get('QT' + qt_ver + '_XCOMPILE'): + self.fatal('QT' + qt_ver + '_XCOMPILE Disables pkg-config detection') self.check_cfg(atleast_pkgconfig_version='0.1') except self.errors.ConfigurationError: - for i in self.qt5_vars: + for i in self.qt_vars: uselib = i.upper() - if Utils.unversioned_sys_platform() == "darwin": + if Utils.unversioned_sys_platform() == 'darwin': # Since at least qt 4.7.3 each library locates in separate directory - frameworkName = i + ".framework" - qtDynamicLib = os.path.join(qtlibs, frameworkName, i) - if os.path.exists(qtDynamicLib): - env.append_unique('FRAMEWORK_' + uselib, i) - self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') - else: - self.msg('Checking for %s' % i, False, 'YELLOW') - env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers')) - elif env.DEST_OS != "win32": - qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so") - qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a") + fwk = i.replace('Qt' + qt_ver, 'Qt') + frameworkName = fwk + '.framework' + + qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk) if os.path.exists(qtDynamicLib): - env.append_unique('LIB_' + uselib, i) + env.append_unique('FRAMEWORK_' + uselib, fwk) + env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS) self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN') - elif os.path.exists(qtStaticLib): - env.append_unique('LIB_' + uselib, i) - self.msg('Checking for %s' % i, qtStaticLib, 'GREEN') else: self.msg('Checking for %s' % i, False, 'YELLOW') - - env.append_unique('LIBPATH_' + uselib, qtlibs) - env.append_unique('INCLUDES_' + uselib, qtincludes) - env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) + env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers')) else: - # Release library names are like QtCore5 - for k in ("lib%s.a", "lib%s5.a", "%s.lib", "%s5.lib"): - lib = os.path.join(qtlibs, k % i) - if os.path.exists(lib): - env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) - self.msg('Checking for %s' % i, lib, 'GREEN') - break - else: - self.msg('Checking for %s' % i, False, 'YELLOW') - - env.append_unique('LIBPATH_' + uselib, qtlibs) - env.append_unique('INCLUDES_' + uselib, qtincludes) - env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) - - # Debug library names are like QtCore5d - uselib = i.upper() + "_debug" - for k in ("lib%sd.a", "lib%sd5.a", "%sd.lib", "%sd5.lib"): - lib = os.path.join(qtlibs, k % i) - if os.path.exists(lib): - env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')]) - self.msg('Checking for %s' % i, lib, 'GREEN') - break - else: - self.msg('Checking for %s' % i, False, 'YELLOW') - - env.append_unique('LIBPATH_' + uselib, qtlibs) - env.append_unique('INCLUDES_' + uselib, qtincludes) - env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i)) + ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static) + if not force_static and not ret: + ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True) + self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW') else: - for i in self.qt5_vars_debug + self.qt5_vars: - self.check_cfg(package=i, args='--cflags --libs', mandatory=False) + path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % ( + self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS, qt_ver, qt_ver, qt_ver, qt_ver) + for i in self.qt_vars: + self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path) @conf def simplify_qt5_libs(self): - # the libpaths make really long command-lines - # remove the qtcore ones from qtgui, etc + """ + Since library paths make really long command-lines, + and since everything depends on qtcore, remove the qtcore ones from qtgui, etc + """ env = self.env def process_lib(vars_, coreval): for d in vars_: @@ -682,19 +814,19 @@ def process_lib(vars_, coreval): continue accu.append(lib) env['LIBPATH_'+var] = accu - - process_lib(self.qt5_vars, 'LIBPATH_QTCORE') - process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG') + process_lib(self.qt_vars, 'LIBPATH_QTCORE') @conf def add_qt5_rpath(self): - # rpath if wanted + """ + Defines rpath entries for Qt libraries + """ env = self.env if getattr(Options.options, 'want_rpath', False): def process_rpath(vars_, coreval): for d in vars_: var = d.upper() - value = env['LIBPATH_'+var] + value = env['LIBPATH_' + var] if value: core = env[coreval] accu = [] @@ -703,42 +835,62 @@ def process_rpath(vars_, coreval): if lib in core: continue accu.append('-Wl,--rpath='+lib) - env['RPATH_'+var] = accu - process_rpath(self.qt5_vars, 'LIBPATH_QTCORE') - process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG') + env['RPATH_' + var] = accu + process_rpath(self.qt_vars, 'LIBPATH_QTCORE') @conf def set_qt5_libs_to_check(self): - if not hasattr(self, 'qt5_vars'): - self.qt5_vars = QT5_LIBS - self.qt5_vars = Utils.to_list(self.qt5_vars) - if not hasattr(self, 'qt5_vars_debug'): - self.qt5_vars_debug = [a + '_debug' for a in self.qt5_vars] - self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug) + qt_ver = '6' if self.want_qt6 else '5' + + if not self.qt_vars: + dirlst = Utils.listdir(self.env.QTLIBS) + + pat = self.env.cxxshlib_PATTERN + if Utils.is_win32: + pat = pat.replace('.dll', '.lib') + if self.environ.get('QT' + qt_ver + '_FORCE_STATIC'): + pat = self.env.cxxstlib_PATTERN + if Utils.unversioned_sys_platform() == 'darwin': + pat = r"%s\.framework" + + if self.want_qt6: + # match Qt6Name or QtName but not Qt5Name + mid_pattern = pat % 'Qt6?(?P[^5]\\w+)' + else: + # match Qt5Name or QtName but not Qt6Name + mid_pattern = pat % 'Qt5?(?P[^6]\\w+)' + re_qt = re.compile('^%s$' % mid_pattern) + + for x in sorted(dirlst): + m = re_qt.match(x) + if m: + self.qt_vars.append("Qt%s%s" % (qt_ver, m.group('name'))) + if not self.qt_vars: + self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, self.env.QTLIBS)) + + qtextralibs = getattr(Options.options, 'qtextralibs', None) + if qtextralibs: + self.qt_vars.extend(qtextralibs.split(',')) @conf def set_qt5_defines(self): + qt_ver = '6' if self.want_qt6 else '5' + if sys.platform != 'win32': return - for x in self.qt5_vars: - y = x[2:].upper() + + for x in self.qt_vars: + y=x.replace('Qt' + qt_ver, 'Qt')[2:].upper() self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y) - self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y) def options(opt): """ Command-line options """ opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries') - - opt.add_option('--header-ext', - type='string', - default='', - help='header extension for moc files', - dest='qt_header_ext') - for i in 'qtdir qtbin qtlibs'.split(): opt.add_option('--'+i, type='string', default='', dest=i) - opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt5", default=False) + opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False) + opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated') diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py index 996bb80669..8d92a79a16 100644 --- a/waflib/Tools/ruby.py +++ b/waflib/Tools/ruby.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # daniel.svensson at purplescout.se 2008 -# Thomas Nagy 2010 (ita) +# Thomas Nagy 2016-2018 (ita) """ Support for Ruby extensions. A C/C++ compiler is required:: @@ -23,12 +23,12 @@ def build(bld): """ import os -from waflib import Task, Options, Utils -from waflib.TaskGen import before_method, feature, after_method, Task, extension +from waflib import Errors, Options, Task, Utils +from waflib.TaskGen import before_method, feature, extension from waflib.Configure import conf @feature('rubyext') -@before_method('apply_incpaths', 'apply_lib_vars', 'apply_bundle', 'apply_link') +@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link') def init_rubyext(self): """ Add required variables for ruby extensions @@ -41,12 +41,12 @@ def init_rubyext(self): self.uselib.append('RUBYEXT') @feature('rubyext') -@before_method('apply_link', 'propagate_uselib') +@before_method('apply_link', 'propagate_uselib_vars') def apply_ruby_so_name(self): """ Strip the *lib* prefix from ruby extensions """ - self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['rubyext_PATTERN'] + self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN @conf def check_ruby_version(self, minver=()): @@ -56,33 +56,26 @@ def check_ruby_version(self, minver=()): The ruby binary can be overridden by ``--with-ruby-binary`` command-line option. """ - if Options.options.rubybinary: - self.env.RUBY = Options.options.rubybinary - else: - self.find_program('ruby', var='RUBY') - - ruby = self.env.RUBY + ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary) try: version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() - except Exception: + except Errors.WafError: self.fatal('could not determine ruby version') self.env.RUBY_VERSION = version try: - ver = tuple(map(int, version.split("."))) - except Exception: + ver = tuple(map(int, version.split('.'))) + except Errors.WafError: self.fatal('unsupported ruby version %r' % version) cver = '' if minver: + cver = '> ' + '.'.join(str(x) for x in minver) if ver < minver: self.fatal('ruby is too old %r' % ver) - cver = '.'.join([str(x) for x in minver]) - else: - cver = ver - self.msg('Checking for ruby version %s' % str(minver or ''), cver) + self.msg('Checking for ruby version %s' % cver, version) @conf def check_ruby_ext_devel(self): @@ -103,16 +96,16 @@ def read_out(cmd): def read_config(key): return read_out('puts RbConfig::CONFIG[%r]' % key) - ruby = self.env['RUBY'] - archdir = read_config('archdir') - cpppath = archdir + cpppath = archdir = read_config('archdir') if version >= (1, 9, 0): ruby_hdrdir = read_config('rubyhdrdir') cpppath += ruby_hdrdir + if version >= (2, 0, 0): + cpppath += read_config('rubyarchhdrdir') cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])] - self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file') + self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False) self.env.LIBPATH_RUBYEXT = read_config('libdir') self.env.LIBPATH_RUBYEXT += archdir @@ -158,27 +151,27 @@ def configure(conf): self.start_msg('Ruby module %s' % module_name) try: self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name]) - except Exception: + except Errors.WafError: self.end_msg(False) self.fatal('Could not find the ruby module %r' % module_name) self.end_msg(True) @extension('.rb') def process(self, node): - tsk = self.create_task('run_ruby', node) + return self.create_task('run_ruby', node) class run_ruby(Task.Task): """ Task to run ruby files detected by file extension .rb:: - + def options(opt): opt.load('ruby') - + def configure(ctx): ctx.check_ruby_version() - + def build(bld): - bld.env['RBFLAGS'] = '-e puts "hello world"' + bld.env.RBFLAGS = '-e puts "hello world"' bld(source='a_ruby_file.rb') """ run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py index e705c328ed..33d34fc9ea 100644 --- a/waflib/Tools/suncc.py +++ b/waflib/Tools/suncc.py @@ -1,28 +1,26 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) -from waflib import Utils +from waflib import Errors from waflib.Tools import ccroot, ar from waflib.Configure import conf @conf def find_scc(conf): """ - Detect the Sun C compiler + Detects the Sun C compiler """ v = conf.env cc = conf.find_program('cc', var='CC') - try: conf.cmd_and_log(cc + ['-flags']) - except Exception: + except Errors.WafError: conf.fatal('%r is not a Sun compiler' % cc) v.CC_NAME = 'sun' conf.get_suncc_version(cc) - @conf def scc_common_flags(conf): """ @@ -30,36 +28,34 @@ def scc_common_flags(conf): """ v = conf.env - v['CC_SRC_F'] = [] - v['CC_TGT_F'] = ['-c', '-o'] + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o', ''] + + if not v.LINK_CC: + v.LINK_CC = v.CC - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = '' - v['CCLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + v.CCLNK_SRC_F = '' + v.CCLNK_TGT_F = ['-o', ''] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' - v['SONAME_ST'] = '-Wl,-h,%s' - v['SHLIB_MARKER'] = '-Bdynamic' - v['STLIB_MARKER'] = '-Bstatic' + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Bdynamic' + v.STLIB_MARKER = '-Bstatic' - # program - v['cprogram_PATTERN'] = '%s' + v.cprogram_PATTERN = '%s' - # shared library - v['CFLAGS_cshlib'] = ['-Kpic', '-DPIC'] - v['LINKFLAGS_cshlib'] = ['-G'] - v['cshlib_PATTERN'] = 'lib%s.so' + v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC'] + v.LINKFLAGS_cshlib = ['-G'] + v.cshlib_PATTERN = 'lib%s.so' - # static lib - v['LINKFLAGS_cstlib'] = ['-Bstatic'] - v['cstlib_PATTERN'] = 'lib%s.a' + v.LINKFLAGS_cstlib = ['-Bstatic'] + v.cstlib_PATTERN = 'lib%s.a' def configure(conf): conf.find_scc() diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py index 01b3f8d251..3b384f6f99 100644 --- a/waflib/Tools/suncxx.py +++ b/waflib/Tools/suncxx.py @@ -1,22 +1,22 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) -from waflib import Utils +from waflib import Errors from waflib.Tools import ccroot, ar from waflib.Configure import conf @conf def find_sxx(conf): """ - Detect the sun C++ compiler + Detects the sun C++ compiler """ v = conf.env cc = conf.find_program(['CC', 'c++'], var='CXX') try: conf.cmd_and_log(cc + ['-flags']) - except Exception: + except Errors.WafError: conf.fatal('%r is not a Sun compiler' % cc) v.CXX_NAME = 'sun' conf.get_suncc_version(cc) @@ -28,36 +28,34 @@ def sxx_common_flags(conf): """ v = conf.env - v['CXX_SRC_F'] = [] - v['CXX_TGT_F'] = ['-c', '-o'] + v.CXX_SRC_F = [] + v.CXX_TGT_F = ['-c', '-o', ''] - # linker - if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] - v['CXXLNK_SRC_F'] = [] - v['CXXLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + if not v.LINK_CXX: + v.LINK_CXX = v.CXX - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' + v.CXXLNK_SRC_F = [] + v.CXXLNK_TGT_F = ['-o', ''] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['SONAME_ST'] = '-Wl,-h,%s' - v['SHLIB_MARKER'] = '-Bdynamic' - v['STLIB_MARKER'] = '-Bstatic' + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' - # program - v['cxxprogram_PATTERN'] = '%s' + v.SONAME_ST = '-Wl,-h,%s' + v.SHLIB_MARKER = '-Bdynamic' + v.STLIB_MARKER = '-Bstatic' - # shared library - v['CXXFLAGS_cxxshlib'] = ['-Kpic', '-DPIC'] - v['LINKFLAGS_cxxshlib'] = ['-G'] - v['cxxshlib_PATTERN'] = 'lib%s.so' + v.cxxprogram_PATTERN = '%s' - # static lib - v['LINKFLAGS_cxxstlib'] = ['-Bstatic'] - v['cxxstlib_PATTERN'] = 'lib%s.a' + v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC'] + v.LINKFLAGS_cxxshlib = ['-G'] + v.cxxshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cxxstlib = ['-Bstatic'] + v.cxxstlib_PATTERN = 'lib%s.a' def configure(conf): conf.find_sxx() diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py index e224f79d12..22f78d0ef8 100644 --- a/waflib/Tools/tex.py +++ b/waflib/Tools/tex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) """ TeX/LaTeX/PDFLaTeX/XeLaTeX support @@ -20,7 +20,7 @@ def build(bld): outs = 'ps', # 'pdf' or 'ps pdf' deps = 'crossreferencing.lst', # to give dependencies directly prompt = 1, # 0 for the batch mode - ) + ) Notes: @@ -28,10 +28,9 @@ def build(bld): $ PDFLATEX=luatex waf configure -- This tool doesn't use the target attribute of the task generator +- This tool does not use the target attribute of the task generator (``bld(target=...)``); the target file name is built from the source - base name and the out type(s) - + base name and the output type(s) """ import os, re @@ -41,7 +40,7 @@ def build(bld): re_bibunit = re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) def bibunitscan(self): """ - Parse the inputs and try to find the *bibunit* dependencies + Parses TeX inputs and try to find the *bibunit* file dependencies :return: list of bibunit files :rtype: list of :py:class:`waflib.Node.Node` @@ -49,24 +48,26 @@ def bibunitscan(self): node = self.inputs[0] nodes = [] - if not node: return nodes + if not node: + return nodes code = node.read() - for match in re_bibunit.finditer(code): path = match.group('file') if path: + found = None for k in ('', '.bib'): # add another loop for the tex include paths? - Logs.debug('tex: trying %s%s' % (path, k)) + Logs.debug('tex: trying %s%s', path, k) fi = node.parent.find_resource(path + k) if fi: + found = True nodes.append(fi) - # no break, people are crazy - else: - Logs.debug('tex: could not find %s' % path) + # no break + if not found: + Logs.debug('tex: could not find %s', path) - Logs.debug("tex: found the following bibunit files: %s" % nodes) + Logs.debug('tex: found the following bibunit files: %s', nodes) return nodes exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty'] @@ -86,9 +87,10 @@ def bibunitscan(self): class tex(Task.Task): """ - Compile a tex/latex file. + Compiles a tex/latex file. .. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex + :top-classes: waflib.Tools.tex.tex """ bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False) @@ -108,23 +110,19 @@ class tex(Task.Task): def exec_command(self, cmd, **kw): """ - Override :py:meth:`waflib.Task.Task.exec_command` to execute the command without buffering (latex may prompt for inputs) + Executes TeX commands without buffering (latex may prompt for inputs) :return: the return code :rtype: int """ - bld = self.generator.bld - Logs.info('runner: %r' % cmd) - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir - return Utils.subprocess.Popen(cmd, **kw).wait() + if self.env.PROMPT_LATEX: + # capture the outputs in configuration tests + kw['stdout'] = kw['stderr'] = None + return super(tex, self).exec_command(cmd, **kw) def scan_aux(self, node): """ - A recursive regex-based scanner that finds included auxiliary files. + Recursive regex-based scanner that finds included auxiliary files. """ nodes = [node] re_aux = re.compile(r'\\@input{(?P[^{}]*)}', re.M) @@ -135,16 +133,15 @@ def parse_node(node): path = match.group('file') found = node.parent.find_or_declare(path) if found and found not in nodes: - Logs.debug('tex: found aux node ' + found.abspath()) + Logs.debug('tex: found aux node %r', found) nodes.append(found) parse_node(found) - parse_node(node) return nodes def scan(self): """ - A recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex` + Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex` Depending on your needs you might want: @@ -163,14 +160,14 @@ def scan(self): nodes = [] names = [] seen = [] - if not node: return (nodes, names) + if not node: + return (nodes, names) def parse_node(node): if node in seen: return seen.append(node) code = node.read() - global re_tex for match in re_tex.finditer(code): multibib = match.group('type') @@ -189,7 +186,7 @@ def parse_node(node): # issue 1067, scan in all texinputs folders for up in self.texinputs_nodes: - Logs.debug('tex: trying %s%s' % (path, k)) + Logs.debug('tex: trying %s%s', path, k) found = up.find_resource(path + k) if found: break @@ -221,12 +218,12 @@ def parse_node(node): for x in nodes: x.parent.get_bld().mkdir() - Logs.debug("tex: found the following : %s and names %s" % (nodes, names)) + Logs.debug("tex: found the following : %s and names %s", nodes, names) return (nodes, names) def check_status(self, msg, retcode): """ - Check an exit status and raise an error with a particular message + Checks an exit status and raise an error with a particular message :param msg: message to display if the code is non-zero :type msg: string @@ -234,22 +231,29 @@ def check_status(self, msg, retcode): :type retcode: boolean """ if retcode != 0: - raise Errors.WafError("%r command exit status %r" % (msg, retcode)) + raise Errors.WafError('%r command exit status %r' % (msg, retcode)) + + def info(self, *k, **kw): + try: + info = self.generator.bld.conf.logger.info + except AttributeError: + info = Logs.info + info(*k, **kw) def bibfile(self): """ - Parse the *.aux* files to find bibfiles to process. - If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` + Parses *.aux* files to find bibfiles to process. + If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun` """ for aux_node in self.aux_nodes: try: ct = aux_node.read() except EnvironmentError: - Logs.error('Error reading %s: %r' % aux_node.abspath()) + Logs.error('Error reading %s: %r', aux_node.abspath()) continue if g_bibtex_re.findall(ct): - Logs.info('calling bibtex') + self.info('calling bibtex') self.env.env = {} self.env.env.update(os.environ) @@ -266,8 +270,8 @@ def bibfile(self): def bibunits(self): """ - Parse the *.aux* file to find bibunit files. If there are bibunit files, - execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. + Parses *.aux* file to find bibunit files. If there are bibunit files, + runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`. """ try: bibunits = bibunitscan(self) @@ -277,7 +281,7 @@ def bibunits(self): if bibunits: fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)] if fn: - Logs.info('calling bibtex on bibunits') + self.info('calling bibtex on bibunits') for f in fn: self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()} @@ -286,17 +290,17 @@ def bibunits(self): def makeindex(self): """ - Look on the filesystem if there is a *.idx* file to process. If yes, execute - :py:meth:`waflib.Tools.tex.tex.makeindex_fun` + Searches the filesystem for *.idx* files to process. If present, + runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun` """ self.idx_node = self.inputs[0].change_ext('.idx') try: idx_path = self.idx_node.abspath() os.stat(idx_path) except OSError: - Logs.info('index file %s absent, not calling makeindex' % idx_path) + self.info('index file %s absent, not calling makeindex', idx_path) else: - Logs.info('calling makeindex') + self.info('calling makeindex') self.env.SRCFILE = self.idx_node.name self.env.env = {} @@ -304,13 +308,16 @@ def makeindex(self): def bibtopic(self): """ - Additional .aux files from the bibtopic package + Lists additional .aux files from the bibtopic package """ p = self.inputs[0].parent.get_bld() if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')): self.aux_nodes += p.ant_glob('*[0-9].aux') def makeglossaries(self): + """ + Lists additional glossaries from .aux files. If present, runs the makeglossaries program. + """ src_file = self.inputs[0].abspath() base_file = os.path.basename(src_file) base, _ = os.path.splitext(base_file) @@ -318,7 +325,7 @@ def makeglossaries(self): try: ct = aux_node.read() except EnvironmentError: - Logs.error('Error reading %s: %r' % aux_node.abspath()) + Logs.error('Error reading %s: %r', aux_node.abspath()) continue if g_glossaries_re.findall(ct): @@ -330,29 +337,32 @@ def makeglossaries(self): return def texinputs(self): + """ + Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables + + :rtype: string + """ return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep def run(self): """ - Runs the TeX build process. + Runs the whole TeX build process - It may require multiple passes, depending on the usage of cross-references, - bibliographies, content susceptible of needing such passes. + Multiple passes are required depending on the usage of cross-references, + bibliographies, glossaries, indexes and additional contents The appropriate TeX compiler is called until the *.aux* files stop changing. - - Makeindex and bibtex are called if necessary. """ env = self.env - if not env['PROMPT_LATEX']: - env.append_value('LATEXFLAGS', '-interaction=batchmode') - env.append_value('PDFLATEXFLAGS', '-interaction=batchmode') - env.append_value('XELATEXFLAGS', '-interaction=batchmode') + if not env.PROMPT_LATEX: + env.append_value('LATEXFLAGS', '-interaction=nonstopmode') + env.append_value('PDFLATEXFLAGS', '-interaction=nonstopmode') + env.append_value('XELATEXFLAGS', '-interaction=nonstopmode') # important, set the cwd for everybody - self.cwd = self.inputs[0].parent.get_bld().abspath() + self.cwd = self.inputs[0].parent.get_bld() - Logs.info('first pass on %s' % self.__class__.__name__) + self.info('first pass on %s', self.__class__.__name__) # Hash .aux files before even calling the LaTeX compiler cur_hash = self.hash_aux_nodes() @@ -378,12 +388,17 @@ def run(self): break # run the command - Logs.info('calling %s' % self.__class__.__name__) + self.info('calling %s', self.__class__.__name__) self.call_latex() def hash_aux_nodes(self): + """ + Returns a hash of the .aux file contents + + :rtype: string or bytes + """ try: - nodes = self.aux_nodes + self.aux_nodes except AttributeError: try: self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux')) @@ -392,31 +407,41 @@ def hash_aux_nodes(self): return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes]) def call_latex(self): + """ + Runs the TeX compiler once + """ self.env.env = {} self.env.env.update(os.environ) self.env.env.update({'TEXINPUTS': self.texinputs()}) self.env.SRCFILE = self.inputs[0].abspath() self.check_status('error when calling latex', self.texfun()) - class latex(tex): + "Compiles LaTeX files" texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False) + class pdflatex(tex): + "Compiles PdfLaTeX files" texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False) + class xelatex(tex): + "XeLaTeX files" texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False) class dvips(Task.Task): + "Converts dvi files to postscript" run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' color = 'BLUE' after = ['latex', 'pdflatex', 'xelatex'] class dvipdf(Task.Task): + "Converts dvi files to pdf" run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' color = 'BLUE' after = ['latex', 'pdflatex', 'xelatex'] class pdf2ps(Task.Task): + "Converts pdf files to postscript" run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' color = 'BLUE' after = ['latex', 'pdflatex', 'xelatex'] @@ -425,16 +450,22 @@ class pdf2ps(Task.Task): @before_method('process_source') def apply_tex(self): """ - Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc). + Creates :py:class:`waflib.Tools.tex.tex` objects, and + dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc). """ if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'): self.type = 'pdflatex' - tree = self.bld outs = Utils.to_list(getattr(self, 'outs', [])) - # prompt for incomplete files (else the batchmode is used) - self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1) + # prompt for incomplete files (else the nonstopmode is used) + try: + self.generator.bld.conf + except AttributeError: + default_prompt = False + else: + default_prompt = True + self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt) deps_lst = [] @@ -451,7 +482,6 @@ def apply_tex(self): deps_lst.append(dep) for node in self.to_nodes(self.source): - if self.type == 'latex': task = self.create_task('latex', node, node.change_ext('.dvi')) elif self.type == 'pdflatex': @@ -484,9 +514,9 @@ def apply_tex(self): if p: task.texinputs_nodes.append(p) else: - Logs.error('Invalid TEXINPUTS folder %s' % x) + Logs.error('Invalid TEXINPUTS folder %s', x) else: - Logs.error('Cannot resolve relative paths in TEXINPUTS %s' % x) + Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x) if self.type == 'latex': if 'ps' in outs: @@ -502,8 +532,7 @@ def apply_tex(self): def configure(self): """ - Try to find the programs tex, latex and others. Do not raise any error if they - are not found. + Find the programs tex, latex and others without raising errors. """ v = self.env for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): @@ -511,5 +540,5 @@ def configure(self): self.find_program(p, var=p.upper()) except self.errors.ConfigurationError: pass - v['DVIPSFLAGS'] = '-Ppdf' + v.DVIPSFLAGS = '-Ppdf' diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py index b09377e47b..822ec502e0 100644 --- a/waflib/Tools/vala.py +++ b/waflib/Tools/vala.py @@ -8,14 +8,14 @@ this tool to be too stable either (apis, etc) """ -import os.path, shutil, re -from waflib import Context, Task, Utils, Logs, Options, Errors +import re +from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils from waflib.TaskGen import extension, taskgen_method from waflib.Configure import conf class valac(Task.Task): """ - Task to compile vala files. + Compiles vala files """ #run_str = "${VALAC} ${VALAFLAGS}" # ideally #vars = ['VALAC_VERSION'] @@ -24,31 +24,33 @@ class valac(Task.Task): def run(self): cmd = self.env.VALAC + self.env.VALAFLAGS - cmd.extend([a.abspath() for a in self.inputs]) - ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath()) + resources = getattr(self, 'vala_exclude', []) + cmd.extend([a.abspath() for a in self.inputs if a not in resources]) + ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath()) if ret: return ret - for x in self.outputs: - if id(x.parent) != id(self.outputs[0].parent): - shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name, x.abspath()) - if self.generator.dump_deps_node: self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) return ret -valac = Task.update_outputs(valac) # no decorators for python2 classes - @taskgen_method def init_vala_task(self): + """ + Initializes the vala task with the relevant data (acts as a constructor) + """ self.profile = getattr(self, 'profile', 'gobject') + self.packages = packages = Utils.to_list(getattr(self, 'packages', [])) + self.use = Utils.to_list(getattr(self, 'use', [])) + if packages and not self.use: + self.use = packages[:] # copy + if self.profile == 'gobject': - self.uselib = Utils.to_list(getattr(self, 'uselib', [])) - if not 'GOBJECT' in self.uselib: - self.uselib.append('GOBJECT') + if not 'GOBJECT' in self.use: + self.use.append('GOBJECT') def addflags(flags): self.env.append_value('VALAFLAGS', flags) @@ -56,32 +58,46 @@ def addflags(flags): if self.profile: addflags('--profile=%s' % self.profile) - if hasattr(self, 'threading'): + valatask = self.valatask + + # output directory + if hasattr(self, 'vala_dir'): + if isinstance(self.vala_dir, str): + valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir) + try: + valatask.vala_dir_node.mkdir() + except OSError: + raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node) + else: + valatask.vala_dir_node = self.vala_dir + else: + valatask.vala_dir_node = self.path.get_bld() + addflags('--directory=%s' % valatask.vala_dir_node.abspath()) + + if hasattr(self, 'thread'): if self.profile == 'gobject': - if not 'GTHREAD' in self.uselib: - self.uselib.append('GTHREAD') + if not 'GTHREAD' in self.use: + self.use.append('GTHREAD') else: #Vala doesn't have threading support for dova nor posix - Logs.warn("Profile %s means no threading support" % self.profile) - self.threading = False + Logs.warn('Profile %s means no threading support', self.profile) + self.thread = False - if self.threading: - addflags('--threading') - - valatask = self.valatask + if self.thread: + addflags('--thread') self.is_lib = 'cprogram' not in self.features if self.is_lib: addflags('--library=%s' % self.target) - h_node = self.path.find_or_declare('%s.h' % self.target) + h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target) valatask.outputs.append(h_node) addflags('--header=%s' % h_node.name) - valatask.outputs.append(self.path.find_or_declare('%s.vapi' % self.target)) + valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target)) if getattr(self, 'gir', None): - gir_node = self.path.find_or_declare('%s.gir' % self.gir) + gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir) addflags('--gir=%s' % gir_node.name) valatask.outputs.append(gir_node) @@ -89,13 +105,11 @@ def addflags(flags): if self.vala_target_glib: addflags('--target-glib=%s' % self.vala_target_glib) - addflags(['--define=%s' % x for x in getattr(self, 'vala_defines', [])]) - + addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))]) packages_private = Utils.to_list(getattr(self, 'packages_private', [])) addflags(['--pkg=%s' % x for x in packages_private]) - def _get_api_version(): api_version = '1.0' if hasattr(Context.g_module, 'API_VERSION'): @@ -107,17 +121,15 @@ def _get_api_version(): return api_version self.includes = Utils.to_list(getattr(self, 'includes', [])) - self.uselib = self.to_list(getattr(self, 'uselib', [])) valatask.install_path = getattr(self, 'install_path', '') valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi') - valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE']) + valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE) valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version())) valatask.install_binding = getattr(self, 'install_binding', True) - self.packages = packages = Utils.to_list(getattr(self, 'packages', [])) self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', [])) - includes = [] + #includes = [] if hasattr(self, 'use'): local_packages = Utils.to_list(self.use)[:] # make sure to have a copy @@ -133,20 +145,22 @@ def _get_api_version(): package_obj = self.bld.get_tgen_by_name(package) except Errors.WafError: continue - package_name = package_obj.target - package_node = package_obj.path - package_dir = package_node.path_from(self.path) - for task in package_obj.tasks: + # in practice the other task is already processed + # but this makes it explicit + package_obj.post() + package_name = package_obj.target + task = getattr(package_obj, 'valatask', None) + if task: for output in task.outputs: if output.name == package_name + ".vapi": valatask.set_run_after(task) if package_name not in packages: packages.append(package_name) - if package_dir not in vapi_dirs: - vapi_dirs.append(package_dir) - if package_dir not in includes: - includes.append(package_dir) + if output.parent not in vapi_dirs: + vapi_dirs.append(output.parent) + if output.parent not in self.includes: + self.includes.append(output.parent) if hasattr(package_obj, 'use'): lst = self.to_list(package_obj.use) @@ -156,46 +170,40 @@ def _get_api_version(): addflags(['--pkg=%s' % p for p in packages]) for vapi_dir in vapi_dirs: - v_node = self.path.find_dir(vapi_dir) + if isinstance(vapi_dir, Node.Node): + v_node = vapi_dir + else: + v_node = self.path.find_dir(vapi_dir) if not v_node: - Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir) + Logs.warn('Unable to locate Vala API directory: %r', vapi_dir) else: addflags('--vapidir=%s' % v_node.abspath()) - addflags('--vapidir=%s' % v_node.get_bld().abspath()) self.dump_deps_node = None if self.is_lib and self.packages: - self.dump_deps_node = self.path.find_or_declare('%s.deps' % self.target) + self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target) valatask.outputs.append(self.dump_deps_node) - self.includes.append(self.bld.srcnode.abspath()) - self.includes.append(self.bld.bldnode.abspath()) - for include in includes: - try: - self.includes.append(self.path.find_dir(include).abspath()) - self.includes.append(self.path.find_dir(include).get_bld().abspath()) - except AttributeError: - Logs.warn("Unable to locate include directory: '%s'" % include) - - if self.is_lib and valatask.install_binding: headers_list = [o for o in valatask.outputs if o.suffix() == ".h"] - try: - self.install_vheader.source = headers_list - except AttributeError: - self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env) + if headers_list: + self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list) vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))] - try: - self.install_vapi.source = vapi_list - except AttributeError: - self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env) + if vapi_list: + self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list) gir_list = [o for o in valatask.outputs if o.suffix() == '.gir'] - try: - self.install_gir.source = gir_list - except AttributeError: - self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env) + if gir_list: + self.install_gir = self.add_install_files( + install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list) + + if hasattr(self, 'vala_resources'): + nodes = self.to_nodes(self.vala_resources) + valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes + valatask.inputs.extend(nodes) + for x in nodes: + addflags(['--gresources', x.abspath()]) @extension('.vala', '.gs') def vala_file(self, node): @@ -207,7 +215,7 @@ def build(bld): bld.program( packages = 'gtk+-2.0', target = 'vala-gtk-example', - uselib = 'GTK GLIB', + use = 'GTK GLIB', source = 'vala-gtk-example.vala foo.vala', vala_defines = ['DEBUG'] # adds --define= values to the command-line @@ -220,7 +228,7 @@ def build(bld): #install_binding = False # profile = 'xyz' # adds --profile= to enable profiling - # threading = True, # add --threading, except if profile is on or not on 'gobject' + # thread = True, # adds --thread, except if profile is on or not on 'gobject' # vala_target_glib = 'xyz' # adds --target-glib=, can be given through the command-line option --vala-target-glib= ) @@ -236,10 +244,20 @@ def build(bld): self.init_vala_task() valatask.inputs.append(node) - c_node = node.change_ext('.c') + name = node.name[:node.name.rfind('.')] + '.c' + c_node = valatask.vala_dir_node.find_or_declare(name) valatask.outputs.append(c_node) self.source.append(c_node) +@extension('.vapi') +def vapi_file(self, node): + try: + valatask = self.valatask + except AttributeError: + valatask = self.valatask = self.create_task('valac') + self.init_vala_task() + valatask.inputs.append(node) + @conf def find_valac(self, valac_name, min_version): """ @@ -254,10 +272,10 @@ def find_valac(self, valac_name, min_version): valac = self.find_program(valac_name, var='VALAC') try: output = self.cmd_and_log(valac + ['--version']) - except Exception: + except Errors.WafError: valac_version = None else: - ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.') + ver = re.search(r'\d+.\d+.\d+', output).group().split('.') valac_version = tuple([int(x) for x in ver]) self.msg('Checking for %s version >= %r' % (valac_name, min_version), @@ -265,7 +283,7 @@ def find_valac(self, valac_name, min_version): if valac and valac_version < min_version: self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version)) - self.env['VALAC_VERSION'] = valac_version + self.env.VALAC_VERSION = valac_version return valac @conf @@ -279,6 +297,10 @@ def check_vala(self, min_version=(0,8,0), branch=None): :param branch: first part of the version number, in case a snapshot is used (0, 8) :type branch: tuple of int """ + if self.env.VALA_MINVER: + min_version = self.env.VALA_MINVER + if self.env.VALA_MINVER_BRANCH: + branch = self.env.VALA_MINVER_BRANCH if not branch: branch = min_version[:2] try: @@ -291,7 +313,7 @@ def check_vala_deps(self): """ Load the gobject and gthread packages if they are missing. """ - if not self.env['HAVE_GOBJECT']: + if not self.env.HAVE_GOBJECT: pkg_args = {'package': 'gobject-2.0', 'uselib_store': 'GOBJECT', 'args': '--cflags --libs'} @@ -299,7 +321,7 @@ def check_vala_deps(self): pkg_args['atleast_version'] = Options.options.vala_target_glib self.check_cfg(**pkg_args) - if not self.env['HAVE_GTHREAD']: + if not self.env.HAVE_GTHREAD: pkg_args = {'package': 'gthread-2.0', 'uselib_store': 'GTHREAD', 'args': '--cflags --libs'} @@ -312,13 +334,14 @@ def configure(self): Use the following to enforce minimum vala version:: def configure(conf): - conf.load('vala', funs='') - conf.check_vala(min_version=(0,10,0)) + conf.env.VALA_MINVER = (0, 10, 0) + conf.load('vala') """ self.load('gnu_dirs') self.check_vala_deps() self.check_vala() - self.env.VALAFLAGS = ['-C', '--quiet'] + self.add_os_flags('VALAFLAGS') + self.env.append_unique('VALAFLAGS', ['-C']) def options(opt): """ @@ -326,7 +349,7 @@ def options(opt): """ opt.load('gnu_dirs') valaopts = opt.add_option_group('Vala Compiler Options') - valaopts.add_option ('--vala-target-glib', default=None, + valaopts.add_option('--vala-target-glib', default=None, dest='vala_target_glib', metavar='MAJOR.MINOR', help='Target version of glib for Vala GObject code generation') diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py index 27cd9a4027..8cff89bdeb 100644 --- a/waflib/Tools/waf_unit_test.py +++ b/waflib/Tools/waf_unit_test.py @@ -1,10 +1,10 @@ #!/usr/bin/env python # encoding: utf-8 # Carlos Rafael Giani, 2006 -# Thomas Nagy, 2010 +# Thomas Nagy, 2010-2018 (ita) """ -Unit testing system for C/C++/D providing test execution: +Unit testing system for C/C++/D and interpreted languages providing test execution: * in parallel, by using ``waf -j`` * partial (only the tests that have changed) or full (by using ``waf --alltests``) @@ -31,38 +31,142 @@ def build(bld): bld(features='cxx cxxprogram test', source='main.c', target='app') from waflib.Tools import waf_unit_test bld.add_post_fun(waf_unit_test.summary) + +By passing --dump-test-scripts the build outputs corresponding python files +(with extension _run.py) that are useful for debugging purposes. """ -import os +import os, shlex, sys from waflib.TaskGen import feature, after_method, taskgen_method from waflib import Utils, Task, Logs, Options +from waflib.Tools import ccroot testlock = Utils.threading.Lock() +SCRIPT_TEMPLATE = """#! %(python)s +import subprocess, sys +cmd = %(cmd)r +# if you want to debug with gdb: +#cmd = ['gdb', '-args'] + cmd +env = %(env)r +status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str)) +sys.exit(status) +""" + +@taskgen_method +def handle_ut_cwd(self, key): + """ + Task generator method, used internally to limit code duplication. + This method may disappear anytime. + """ + cwd = getattr(self, key, None) + if cwd: + if isinstance(cwd, str): + # we want a Node instance + if os.path.isabs(cwd): + self.ut_cwd = self.bld.root.make_node(cwd) + else: + self.ut_cwd = self.path.make_node(cwd) + +@feature('test_scripts') +def make_interpreted_test(self): + """Create interpreted unit tests.""" + for x in ['test_scripts_source', 'test_scripts_template']: + if not hasattr(self, x): + Logs.warn('a test_scripts taskgen i missing %s' % x) + return + + self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False)) + + script_nodes = self.to_nodes(self.test_scripts_source) + for script_node in script_nodes: + tsk = self.create_task('utest', [script_node]) + tsk.vars = lst + tsk.vars + tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd()) + + self.handle_ut_cwd('test_scripts_cwd') + + env = getattr(self, 'test_scripts_env', None) + if env: + self.ut_env = env + else: + self.ut_env = dict(os.environ) + + paths = getattr(self, 'test_scripts_paths', {}) + for (k,v) in paths.items(): + p = self.ut_env.get(k, '').split(os.pathsep) + if isinstance(v, str): + v = v.split(os.pathsep) + self.ut_env[k] = os.pathsep.join(p + v) + self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env]) + @feature('test') -@after_method('apply_link') +@after_method('apply_link', 'process_use') def make_test(self): """Create the unit test task. There can be only one unit test task by task generator.""" - if getattr(self, 'link_task', None): - self.create_task('utest', self.link_task.outputs) - + if not getattr(self, 'link_task', None): + return + + tsk = self.create_task('utest', self.link_task.outputs) + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = tsk.vars + lst + self.env.append_value('UT_DEPS', self.ut_str) + + self.handle_ut_cwd('ut_cwd') + + if not hasattr(self, 'ut_paths'): + paths = [] + for x in self.tmp_use_sorted: + try: + y = self.bld.get_tgen_by_name(x).link_task + except AttributeError: + pass + else: + if not isinstance(y, ccroot.stlink_task): + paths.append(y.outputs[0].parent.abspath()) + self.ut_paths = os.pathsep.join(paths) + os.pathsep + + if not hasattr(self, 'ut_env'): + self.ut_env = dct = dict(os.environ) + def add_path(var): + dct[var] = self.ut_paths + dct.get(var,'') + if Utils.is_win32: + add_path('PATH') + elif Utils.unversioned_sys_platform() == 'darwin': + add_path('DYLD_LIBRARY_PATH') + add_path('LD_LIBRARY_PATH') + else: + add_path('LD_LIBRARY_PATH') + + if not hasattr(self, 'ut_cmd'): + self.ut_cmd = getattr(Options.options, 'testcmd', False) + + self.env.append_value('UT_DEPS', str(self.ut_cmd)) + self.env.append_value('UT_DEPS', self.ut_paths) + self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env]) @taskgen_method def add_test_results(self, tup): """Override and return tup[1] to interrupt the build immediately if a test does not run""" Logs.debug("ut: %r", tup) - self.utest_result = tup + try: + self.utest_results.append(tup) + except AttributeError: + self.utest_results = [tup] try: self.bld.utest_results.append(tup) except AttributeError: self.bld.utest_results = [tup] +@Task.deep_inputs class utest(Task.Task): """ Execute a unit test """ color = 'PINK' after = ['vnum', 'inst'] - vars = [] + vars = ['UT_DEPS'] + def runnable_status(self): """ Always execute the task if `waf --alltests` was used or no @@ -77,37 +181,17 @@ def runnable_status(self): return Task.RUN_ME return ret - def add_path(self, dct, path, var): - dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')]) - def get_test_env(self): """ In general, tests may require any library built anywhere in the project. Override this method if fewer paths are needed """ - try: - fu = getattr(self.generator.bld, 'all_test_paths') - except AttributeError: - # this operation may be performed by at most #maxjobs - fu = os.environ.copy() - - lst = [] - for g in self.generator.bld.groups: - for tg in g: - if getattr(tg, 'link_task', None): - s = tg.link_task.outputs[0].parent.abspath() - if s not in lst: - lst.append(s) - - if Utils.is_win32: - self.add_path(fu, lst, 'PATH') - elif Utils.unversioned_sys_platform() == 'darwin': - self.add_path(fu, lst, 'DYLD_LIBRARY_PATH') - self.add_path(fu, lst, 'LD_LIBRARY_PATH') - else: - self.add_path(fu, lst, 'LD_LIBRARY_PATH') - self.generator.bld.all_test_paths = fu - return fu + return self.generator.ut_env + + def post_run(self): + super(utest, self).post_run() + if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]: + self.generator.bld.task_sigs[self.uid()] = None def run(self): """ @@ -116,29 +200,44 @@ def run(self): Override ``add_test_results`` to interrupt the build """ - - filename = self.inputs[0].abspath() - self.ut_exec = getattr(self.generator, 'ut_exec', [filename]) - if getattr(self.generator, 'ut_fun', None): - self.generator.ut_fun(self) - - - cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath() - - testcmd = getattr(self.generator, 'ut_cmd', False) or getattr(Options.options, 'testcmd', False) - if testcmd: - self.ut_exec = (testcmd % self.ut_exec[0]).split(' ') - - proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=self.get_test_env(), stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE) + if hasattr(self.generator, 'ut_run'): + return self.generator.ut_run(self) + + self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()]) + ut_cmd = getattr(self.generator, 'ut_cmd', False) + if ut_cmd: + self.ut_exec = shlex.split(ut_cmd % Utils.shell_escape(self.ut_exec)) + + return self.exec_command(self.ut_exec) + + def exec_command(self, cmd, **kw): + self.generator.bld.log_command(cmd, kw) + if getattr(Options.options, 'dump_test_scripts', False): + script_code = SCRIPT_TEMPLATE % { + 'python': sys.executable, + 'env': self.get_test_env(), + 'cwd': self.get_cwd().abspath(), + 'cmd': cmd + } + script_file = self.inputs[0].abspath() + '_run.py' + Utils.writef(script_file, script_code, encoding='utf-8') + os.chmod(script_file, Utils.O755) + if Logs.verbose > 1: + Logs.info('Test debug file written as %r' % script_file) + + proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(), + stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str)) (stdout, stderr) = proc.communicate() - - tup = (filename, proc.returncode, stdout, stderr) + self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr) testlock.acquire() try: return self.generator.add_test_results(tup) finally: testlock.release() + def get_cwd(self): + return getattr(self.generator, 'ut_cwd', self.inputs[0].parent) + def summary(bld): """ Display an execution summary:: @@ -155,15 +254,15 @@ def build(bld): total = len(lst) tfail = len([x for x in lst if x[1]]) - Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total)) + Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total)) for (f, code, out, err) in lst: if not code: - Logs.pprint('CYAN', ' %s' % f) + Logs.pprint('GREEN', ' %s' % f) - Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total)) + Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total)) for (f, code, out, err) in lst: if code: - Logs.pprint('CYAN', ' %s' % f) + Logs.pprint('RED', ' %s' % f) def set_exit_code(bld): """ @@ -194,8 +293,10 @@ def options(opt): """ opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests') opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests') - opt.add_option('--testcmd', action='store', default=False, - help = 'Run the unit tests using the test-cmd string' - ' example "--test-cmd="valgrind --error-exitcode=1' - ' %s" to run under valgrind', dest='testcmd') + opt.add_option('--clear-failed', action='store_true', default=False, + help='Force failed unit tests to run again next time', dest='clear_failed_tests') + opt.add_option('--testcmd', action='store', default=False, dest='testcmd', + help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind') + opt.add_option('--dump-test-scripts', action='store_true', default=False, + help='Create python scripts to help debug tests', dest='dump_test_scripts') diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py index ddddd94904..73c0e95315 100644 --- a/waflib/Tools/winres.py +++ b/waflib/Tools/winres.py @@ -4,18 +4,20 @@ "Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}" -import re, traceback -from waflib import Task, Logs, Utils +import os +import re +from waflib import Task from waflib.TaskGen import extension from waflib.Tools import c_preproc +from waflib import Utils @extension('.rc') def rc_file(self, node): """ - Bind the .rc extension to a winrc task + Binds the .rc extension to a winrc task """ obj_ext = '.rc.o' - if self.env['WINRC_TGT_F'] == '/fo': + if self.env.WINRC_TGT_F == '/fo': obj_ext = '.res' rctask = self.create_task('winrc', node, node.change_ext(obj_ext)) try: @@ -24,15 +26,22 @@ def rc_file(self, node): self.compiled_tasks = [rctask] re_lines = re.compile( - '(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\ - '(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)', + r'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\ + r'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)', re.IGNORECASE | re.MULTILINE) class rc_parser(c_preproc.c_parser): - def filter_comments(self, filepath): - code = Utils.readf(filepath) + """ + Calculates dependencies in .rc files + """ + def filter_comments(self, node): + """ + Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments` + """ + code = node.read() if c_preproc.use_trigraphs: - for (a, b) in c_preproc.trig_def: code = code.split(a).join(b) + for (a, b) in c_preproc.trig_def: + code = code.split(a).join(b) code = c_preproc.re_nl.sub('', code) code = c_preproc.re_cpp.sub(c_preproc.repl, code) ret = [] @@ -43,72 +52,62 @@ def filter_comments(self, filepath): ret.append(('include', m.group(5))) return ret - def addlines(self, node): - self.currentnode_stack.append(node.parent) - filepath = node.abspath() - - self.count_files += 1 - if self.count_files > c_preproc.recursion_limit: - raise c_preproc.PreprocError("recursion limit exceeded") - pc = self.parse_cache - Logs.debug('preproc: reading file %r', filepath) - try: - lns = pc[filepath] - except KeyError: - pass - else: - self.lines.extend(lns) - return - - try: - lines = self.filter_comments(filepath) - lines.append((c_preproc.POPFILE, '')) - lines.reverse() - pc[filepath] = lines - self.lines.extend(lines) - except IOError: - raise c_preproc.PreprocError("could not read the file %s" % filepath) - except Exception: - if Logs.verbose > 0: - Logs.error("parsing %s failed" % filepath) - traceback.print_exc() - class winrc(Task.Task): """ - Task for compiling resource files + Compiles resource files """ run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' color = 'BLUE' - def scan(self): tmp = rc_parser(self.generator.includes_nodes) tmp.start(self.inputs[0], self.env) - nodes = tmp.nodes - names = tmp.names - - if Logs.verbose: - Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(self), nodes, names)) + return (tmp.nodes, tmp.names) + + def exec_command(self, cmd, **kw): + if self.env.WINRC_TGT_F == '/fo': + # Since winres include paths may contain spaces, they do not fit in + # response files and are best passed as environment variables + replace_cmd = [] + incpaths = [] + while cmd: + # filter include path flags + flag = cmd.pop(0) + if flag.upper().startswith('/I'): + if len(flag) == 2: + incpaths.append(cmd.pop(0)) + else: + incpaths.append(flag[2:]) + else: + replace_cmd.append(flag) + cmd = replace_cmd + if incpaths: + # append to existing environment variables in INCLUDE + env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) + pre_includes = env.get('INCLUDE', '') + env['INCLUDE'] = pre_includes + os.pathsep + os.pathsep.join(incpaths) + + return super(winrc, self).exec_command(cmd, **kw) + + def quote_flag(self, flag): + if self.env.WINRC_TGT_F == '/fo': + # winres does not support quotes around flags in response files + return flag + + return super(winrc, self).quote_flag(flag) - return (nodes, names) def configure(conf): """ - Detect the programs RC or windres, depending on the C/C++ compiler in use + Detects the programs RC or windres, depending on the C/C++ compiler in use """ v = conf.env - v['WINRC_TGT_F'] = '-o' - v['WINRC_SRC_F'] = '-i' - - # find rc.exe - if not conf.env.WINRC: + if not v.WINRC: if v.CC_NAME == 'msvc': - conf.find_program('RC', var='WINRC', path_list = v['PATH']) - v['WINRC_TGT_F'] = '/fo' - v['WINRC_SRC_F'] = '' + conf.find_program('RC', var='WINRC', path_list=v.PATH) + v.WINRC_TGT_F = '/fo' + v.WINRC_SRC_F = '' else: - conf.find_program('windres', var='WINRC', path_list = v['PATH']) - if not conf.env.WINRC: - conf.fatal('winrc was not found!') - - v['WINRCFLAGS'] = [] + conf.find_program('windres', var='WINRC', path_list=v.PATH) + v.WINRC_TGT_F = '-o' + v.WINRC_SRC_F = '-i' diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py index 3bd8d02637..134dd4152d 100644 --- a/waflib/Tools/xlc.py +++ b/waflib/Tools/xlc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) # Yinon Ehrlich, 2009 # Michael Kuhn, 2009 @@ -11,7 +11,7 @@ @conf def find_xlc(conf): """ - Detect the Aix C compiler + Detects the Aix C compiler """ cc = conf.find_program(['xlc_r', 'xlc'], var='CC') conf.get_xlc_version(cc) @@ -24,38 +24,36 @@ def xlc_common_flags(conf): """ v = conf.env - v['CC_SRC_F'] = [] - v['CC_TGT_F'] = ['-c', '-o'] + v.CC_SRC_F = [] + v.CC_TGT_F = ['-c', '-o'] - # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] - v['CCLNK_SRC_F'] = [] - v['CCLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + if not v.LINK_CC: + v.LINK_CC = v.CC - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' - v['RPATH_ST'] = '-Wl,-rpath,%s' + v.CCLNK_SRC_F = [] + v.CCLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['SONAME_ST'] = [] - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' - # program - v['LINKFLAGS_cprogram'] = ['-Wl,-brtl'] - v['cprogram_PATTERN'] = '%s' + v.SONAME_ST = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] - # shared library - v['CFLAGS_cshlib'] = ['-fPIC'] - v['LINKFLAGS_cshlib'] = ['-G', '-Wl,-brtl,-bexpfull'] - v['cshlib_PATTERN'] = 'lib%s.so' + v.LINKFLAGS_cprogram = ['-Wl,-brtl'] + v.cprogram_PATTERN = '%s' - # static lib - v['LINKFLAGS_cstlib'] = [] - v['cstlib_PATTERN'] = 'lib%s.a' + v.CFLAGS_cshlib = ['-fPIC'] + v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull'] + v.cshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cstlib = [] + v.cstlib_PATTERN = 'lib%s.a' def configure(conf): conf.find_xlc() diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py index 150aeaa489..76aa59bc91 100644 --- a/waflib/Tools/xlcxx.py +++ b/waflib/Tools/xlcxx.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) +# Thomas Nagy, 2006-2018 (ita) # Ralf Habacker, 2006 (rh) # Yinon Ehrlich, 2009 # Michael Kuhn, 2009 @@ -11,7 +11,7 @@ @conf def find_xlcxx(conf): """ - Detect the Aix C++ compiler + Detects the Aix C++ compiler """ cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX') conf.get_xlc_version(cxx) @@ -24,38 +24,36 @@ def xlcxx_common_flags(conf): """ v = conf.env - v['CXX_SRC_F'] = [] - v['CXX_TGT_F'] = ['-c', '-o'] + v.CXX_SRC_F = [] + v.CXX_TGT_F = ['-c', '-o'] - # linker - if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX'] - v['CXXLNK_SRC_F'] = [] - v['CXXLNK_TGT_F'] = ['-o'] - v['CPPPATH_ST'] = '-I%s' - v['DEFINES_ST'] = '-D%s' + if not v.LINK_CXX: + v.LINK_CXX = v.CXX - v['LIB_ST'] = '-l%s' # template for adding libs - v['LIBPATH_ST'] = '-L%s' # template for adding libpaths - v['STLIB_ST'] = '-l%s' - v['STLIBPATH_ST'] = '-L%s' - v['RPATH_ST'] = '-Wl,-rpath,%s' + v.CXXLNK_SRC_F = [] + v.CXXLNK_TGT_F = ['-o'] + v.CPPPATH_ST = '-I%s' + v.DEFINES_ST = '-D%s' - v['SONAME_ST'] = [] - v['SHLIB_MARKER'] = [] - v['STLIB_MARKER'] = [] + v.LIB_ST = '-l%s' # template for adding libs + v.LIBPATH_ST = '-L%s' # template for adding libpaths + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-L%s' + v.RPATH_ST = '-Wl,-rpath,%s' - # program - v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl'] - v['cxxprogram_PATTERN'] = '%s' + v.SONAME_ST = [] + v.SHLIB_MARKER = [] + v.STLIB_MARKER = [] - # shared library - v['CXXFLAGS_cxxshlib'] = ['-fPIC'] - v['LINKFLAGS_cxxshlib'] = ['-G', '-Wl,-brtl,-bexpfull'] - v['cxxshlib_PATTERN'] = 'lib%s.so' + v.LINKFLAGS_cxxprogram= ['-Wl,-brtl'] + v.cxxprogram_PATTERN = '%s' - # static lib - v['LINKFLAGS_cxxstlib'] = [] - v['cxxstlib_PATTERN'] = 'lib%s.a' + v.CXXFLAGS_cxxshlib = ['-fPIC'] + v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull'] + v.cxxshlib_PATTERN = 'lib%s.so' + + v.LINKFLAGS_cxxstlib = [] + v.cxxstlib_PATTERN = 'lib%s.a' def configure(conf): conf.find_xlcxx() diff --git a/waflib/Utils.py b/waflib/Utils.py index 25f91c7014..ea0f7a9db8 100644 --- a/waflib/Utils.py +++ b/waflib/Utils.py @@ -1,16 +1,37 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) """ Utilities and platform-specific fixes The portability fixes try to provide a consistent behavior of the Waf API -through Python versions 2.3 to 3.X and across different platforms (win32, linux, etc) +through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc) """ -import os, sys, errno, traceback, inspect, re, shutil, datetime, gc, platform -import subprocess # <- leave this! +from __future__ import with_statement + +import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time, shlex + +try: + import cPickle +except ImportError: + import pickle as cPickle + +# leave this +if os.name == 'posix' and sys.version_info[0] < 3: + try: + import subprocess32 as subprocess + except ImportError: + import subprocess +else: + import subprocess + +try: + TimeoutExpired = subprocess.TimeoutExpired +except AttributeError: + class TimeoutExpired(Exception): + pass from collections import deque, defaultdict @@ -24,19 +45,20 @@ from waflib import Errors -try: - from collections import UserDict -except ImportError: - from UserDict import UserDict - try: from hashlib import md5 except ImportError: try: - from md5 import md5 + from hashlib import sha1 as md5 except ImportError: - # never fail to enable fixes from another module + # never fail to enable potential fixes from another module pass +else: + try: + md5().digest() + except ValueError: + # Fips? #2213 + from hashlib import sha1 as md5 try: import threading @@ -47,8 +69,8 @@ class threading(object): """ - A fake threading class for platforms lacking the threading module. - Use ``waf -j1`` on those platforms + A fake threading class for platforms lacking the threading module. + Use ``waf -j1`` on those platforms """ pass class Lock(object): @@ -58,19 +80,9 @@ def acquire(self): def release(self): pass threading.Lock = threading.Thread = Lock -else: - run_old = threading.Thread.run - def run(*args, **kwargs): - try: - run_old(*args, **kwargs) - except (KeyboardInterrupt, SystemExit): - raise - except Exception: - sys.excepthook(*sys.exc_info()) - threading.Thread.run = run -SIG_NIL = 'iluvcuteoverload'.encode() -"""Arbitrary null value for a md5 hash. This value must be changed when the hash value is replaced (size)""" +SIG_NIL = 'SIG_NIL_SIG_NIL_'.encode() +"""Arbitrary null value for hashes. Modify this value according to the hash function in use""" O644 = 420 """Constant representing the permissions for regular files (0644 raises a syntax error on python 3)""" @@ -84,42 +96,126 @@ def run(*args, **kwargs): rot_idx = 0 "Index of the current throbber character (progress bar)" -try: - from collections import OrderedDict as ordered_iter_dict -except ImportError: - class ordered_iter_dict(dict): - def __init__(self, *k, **kw): - self.lst = [] - dict.__init__(self, *k, **kw) - def clear(self): - dict.clear(self) - self.lst = [] - def __setitem__(self, key, value): - dict.__setitem__(self, key, value) - try: - self.lst.remove(key) - except ValueError: - pass - self.lst.append(key) - def __delitem__(self, key): - dict.__delitem__(self, key) - try: - self.lst.remove(key) - except ValueError: - pass - def __iter__(self): - for x in self.lst: - yield x - def keys(self): - return self.lst - -is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2 - -def readf(fname, m='r', encoding='ISO8859-1'): - """ - Read an entire file into a string, use this function instead of os.open() whenever possible. - - In practice the wrapper node.read(..) should be preferred to this function:: +class ordered_iter_dict(dict): + """Ordered dictionary that provides iteration from the most recently inserted keys first""" + def __init__(self, *k, **kw): + self.lst = deque() + dict.__init__(self, *k, **kw) + def clear(self): + dict.clear(self) + self.lst = deque() + def __setitem__(self, key, value): + if key in dict.keys(self): + self.lst.remove(key) + dict.__setitem__(self, key, value) + self.lst.append(key) + def __delitem__(self, key): + dict.__delitem__(self, key) + try: + self.lst.remove(key) + except ValueError: + pass + def __iter__(self): + return reversed(self.lst) + def keys(self): + return reversed(self.lst) + +class lru_node(object): + """ + Used by :py:class:`waflib.Utils.lru_cache` + """ + __slots__ = ('next', 'prev', 'key', 'val') + def __init__(self): + self.next = self + self.prev = self + self.key = None + self.val = None + +class lru_cache(object): + """ + A simple least-recently used cache with lazy allocation + """ + __slots__ = ('maxlen', 'table', 'head') + def __init__(self, maxlen=100): + self.maxlen = maxlen + """ + Maximum amount of elements in the cache + """ + self.table = {} + """ + Mapping key-value + """ + self.head = lru_node() + self.head.next = self.head + self.head.prev = self.head + + def __getitem__(self, key): + node = self.table[key] + # assert(key==node.key) + if node is self.head: + return node.val + + # detach the node found + node.prev.next = node.next + node.next.prev = node.prev + + # replace the head + node.next = self.head.next + node.prev = self.head + self.head = node.next.prev = node.prev.next = node + + return node.val + + def __setitem__(self, key, val): + if key in self.table: + # update the value for an existing key + node = self.table[key] + node.val = val + self.__getitem__(key) + else: + if len(self.table) < self.maxlen: + # the very first item is unused until the maximum is reached + node = lru_node() + node.prev = self.head + node.next = self.head.next + node.prev.next = node.next.prev = node + else: + node = self.head = self.head.next + try: + # that's another key + del self.table[node.key] + except KeyError: + pass + + node.key = key + node.val = val + self.table[key] = node + +class lazy_generator(object): + def __init__(self, fun, params): + self.fun = fun + self.params = params + + def __iter__(self): + return self + + def __next__(self): + try: + it = self.it + except AttributeError: + it = self.it = self.fun(*self.params) + return next(it) + + next = __next__ + +is_win32 = os.sep == '\\' or sys.platform == 'win32' or os.name == 'nt' # msys2 +""" +Whether this system is a Windows series +""" + +def readf(fname, m='r', encoding='latin-1'): + """ + Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`:: def build(ctx): from waflib import Utils @@ -138,28 +234,21 @@ def build(ctx): if sys.hexversion > 0x3000000 and not 'b' in m: m += 'b' - f = open(fname, m) - try: + with open(fname, m) as f: txt = f.read() - finally: - f.close() if encoding: txt = txt.decode(encoding) else: txt = txt.decode() else: - f = open(fname, m) - try: + with open(fname, m) as f: txt = f.read() - finally: - f.close() return txt -def writef(fname, data, m='w', encoding='ISO8859-1'): +def writef(fname, data, m='w', encoding='latin-1'): """ - Write an entire file from a string, use this function instead of os.open() whenever possible. - - In practice the wrapper node.write(..) should be preferred to this function:: + Writes an entire file from a string. + See also :py:meth:`waflib.Node.Node.writef`:: def build(ctx): from waflib import Utils @@ -178,44 +267,27 @@ def build(ctx): if sys.hexversion > 0x3000000 and not 'b' in m: data = data.encode(encoding) m += 'b' - f = open(fname, m) - try: + with open(fname, m) as f: f.write(data) - finally: - f.close() def h_file(fname): """ - Compute a hash value for a file by using md5. This method may be replaced by - a faster version if necessary. The following uses the file size and the timestamp value:: - - import stat - from waflib import Utils - def h_file(fname): - st = os.stat(fname) - if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') - m = Utils.md5() - m.update(str(st.st_mtime)) - m.update(str(st.st_size)) - m.update(fname) - return m.digest() - Utils.h_file = h_file + Computes a hash value for a file by using md5. Use the md5_tstamp + extension to get faster build hashes if necessary. :type fname: string :param fname: path to the file to hash :return: hash of the file contents + :rtype: string or bytes """ - f = open(fname, 'rb') m = md5() - try: + with open(fname, 'rb') as f: while fname: fname = f.read(200000) m.update(fname) - finally: - f.close() return m.digest() -def readf_win32(f, m='r', encoding='ISO8859-1'): +def readf_win32(f, m='r', encoding='latin-1'): flags = os.O_NOINHERIT | os.O_RDONLY if 'b' in m: flags |= os.O_BINARY @@ -228,24 +300,18 @@ def readf_win32(f, m='r', encoding='ISO8859-1'): if sys.hexversion > 0x3000000 and not 'b' in m: m += 'b' - f = os.fdopen(fd, m) - try: + with os.fdopen(fd, m) as f: txt = f.read() - finally: - f.close() if encoding: txt = txt.decode(encoding) else: txt = txt.decode() else: - f = os.fdopen(fd, m) - try: + with os.fdopen(fd, m) as f: txt = f.read() - finally: - f.close() return txt -def writef_win32(f, data, m='w', encoding='ISO8859-1'): +def writef_win32(f, data, m='w', encoding='latin-1'): if sys.hexversion > 0x3000000 and not 'b' in m: data = data.encode(encoding) m += 'b' @@ -257,26 +323,20 @@ def writef_win32(f, data, m='w', encoding='ISO8859-1'): try: fd = os.open(f, flags) except OSError: - raise IOError('Cannot write to %r' % f) - f = os.fdopen(fd, m) - try: + raise OSError('Cannot write to %r' % f) + with os.fdopen(fd, m) as f: f.write(data) - finally: - f.close() def h_file_win32(fname): try: fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT) except OSError: - raise IOError('Cannot read from %r' % fname) - f = os.fdopen(fd, 'rb') + raise OSError('Cannot read from %r' % fname) m = md5() - try: + with os.fdopen(fd, 'rb') as f: while fname: fname = f.read(200000) m.update(fname) - finally: - f.close() return m.digest() # always save these @@ -311,8 +371,8 @@ def to_hex(s): def listdir_win32(s): """ - List the contents of a folder in a portable manner. - On Win32, return the list of drive letters: ['C:', 'X:', 'Z:'] + Lists the contents of a folder in a portable manner. + On Win32, returns the list of drive letters: ['C:', 'X:', 'Z:'] when an empty string is given. :type s: string :param s: a string, which can be empty on Windows @@ -322,7 +382,7 @@ def listdir_win32(s): import ctypes except ImportError: # there is nothing much we can do - return [x + ':\\' for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] + return [x + ':\\' for x in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'] else: dlen = 4 # length of "?:\\x00" maxdrives = 26 @@ -345,7 +405,7 @@ def listdir_win32(s): def num2ver(ver): """ - Convert a string, tuple or version number into an integer. The number is supposed to have at most 4 digits:: + Converts a string, tuple or version number into an integer. The number is supposed to have at most 4 digits:: from waflib.Utils import num2ver num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0)) @@ -363,33 +423,39 @@ def num2ver(ver): return ret return ver -def ex_stack(): +def to_list(val): """ - Extract the stack to display exceptions - - :return: a string represening the last exception - """ - exc_type, exc_value, tb = sys.exc_info() - exc_lines = traceback.format_exception(exc_type, exc_value, tb) - return ''.join(exc_lines) - -def to_list(sth): - """ - Convert a string argument to a list by splitting on spaces, and pass - through a list argument unchanged:: + Converts a string argument to a list by splitting it by spaces. + Returns the object if not a string:: from waflib.Utils import to_list - lst = to_list("a b c d") + lst = to_list('a b c d') - :param sth: List or a string of items separated by spaces + :param val: list of string or space-separated string :rtype: list :return: Argument converted to list - """ - if isinstance(sth, str): - return sth.split() + if isinstance(val, str): + return val.split() + else: + return val + +def console_encoding(): + try: + import ctypes + except ImportError: + pass else: - return sth + try: + codepage = ctypes.windll.kernel32.GetConsoleCP() + except AttributeError: + pass + else: + if codepage: + if 65001 == codepage and sys.version_info < (3, 3): + return 'utf-8' + return 'cp%d' % codepage + return sys.stdout.encoding or ('cp1252' if is_win32 else 'latin-1') def split_path_unix(path): return path.split('/') @@ -401,35 +467,55 @@ def split_path_cygwin(path): return ret return path.split('/') -re_sp = re.compile('[/\\\\]') +re_sp = re.compile('[/\\\\]+') def split_path_win32(path): if path.startswith('\\\\'): - ret = re.split(re_sp, path)[2:] - ret[0] = '\\' + ret[0] + ret = re_sp.split(path)[1:] + ret[0] = '\\\\' + ret[0] + if ret[0] == '\\\\?': + return ret[1:] return ret - return re.split(re_sp, path) + return re_sp.split(path) + +msysroot = None +def split_path_msys(path): + if path.startswith(('/', '\\')) and not path.startswith(('//', '\\\\')): + # msys paths can be in the form /usr/bin + global msysroot + if not msysroot: + # msys has python 2.7 or 3, so we can use this + msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'latin-1') + msysroot = msysroot.strip() + path = os.path.normpath(msysroot + os.sep + path) + return split_path_win32(path) if sys.platform == 'cygwin': split_path = split_path_cygwin elif is_win32: - split_path = split_path_win32 + # Consider this an MSYSTEM environment if $MSYSTEM is set and python + # reports is executable from a unix like path on a windows host. + if os.environ.get('MSYSTEM') and sys.executable.startswith('/'): + split_path = split_path_msys + else: + split_path = split_path_win32 else: split_path = split_path_unix split_path.__doc__ = """ -Split a path by / or \\. This function is not like os.path.split +Splits a path by / or \\; do not confuse this function with with ``os.path.split`` :type path: string :param path: path to split -:return: list of strings +:return: list of string """ def check_dir(path): """ - Ensure that a directory exists (similar to ``mkdir -p``). + Ensures that a directory exists (similar to ``mkdir -p``). :type path: string :param path: Path to directory + :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added. """ if not os.path.isdir(path): try: @@ -440,10 +526,14 @@ def check_dir(path): def check_exe(name, env=None): """ - Ensure that a program exists + Ensures that a program exists + :type name: string - :param name: name or path to program + :param name: path to the program + :param env: configuration object + :type env: :py:class:`waflib.ConfigSet.ConfigSet` :return: path of the program or None + :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added. """ if not name: raise ValueError('Cannot execute an empty string!') @@ -455,7 +545,7 @@ def is_exe(fpath): return os.path.abspath(name) else: env = env or os.environ - for path in env["PATH"].split(os.pathsep): + for path in env['PATH'].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, name) if is_exe(exe_file): @@ -464,7 +554,7 @@ def is_exe(fpath): def def_attrs(cls, **kw): """ - Set default attributes on a class instance + Sets default attributes on a class instance :type cls: class :param cls: the class to update the given attributes in. @@ -477,7 +567,7 @@ def def_attrs(cls, **kw): def quote_define_name(s): """ - Convert a string to an identifier suitable for C defines. + Converts a string into an identifier suitable for C defines. :type s: string :param s: String to convert @@ -489,18 +579,41 @@ def quote_define_name(s): fu = fu.upper() return fu +# shlex.quote didn't exist until python 3.3. Prior to that it was a non-documented +# function in pipes. +try: + shell_quote = shlex.quote +except AttributeError: + import pipes + shell_quote = pipes.quote + +def shell_escape(cmd): + """ + Escapes a command: + ['ls', '-l', 'arg space'] -> ls -l 'arg space' + """ + if isinstance(cmd, str): + return cmd + return ' '.join(shell_quote(x) for x in cmd) + def h_list(lst): """ - Hash lists. For tuples, using hash(tup) is much more efficient, - except on python >= 3.3 where hash randomization assumes everybody is running a web application. + Hashes lists of ordered data. + + Using hash(tup) for tuples would be much more efficient, + but Python now enforces hash randomization :param lst: list to hash :type lst: list of strings :return: hash of the list """ - m = md5() - m.update(str(lst).encode()) - return m.digest() + return md5(repr(lst).encode()).digest() + +if sys.hexversion < 0x3000000: + def h_list_python2(lst): + return md5(repr(lst)).digest() + h_list_python2.__doc__ = h_list.__doc__ + h_list = h_list_python2 def h_fun(fun): """ @@ -509,24 +622,59 @@ def h_fun(fun): :param fun: function to hash :type fun: function :return: hash of the function + :rtype: string or bytes """ try: return fun.code except AttributeError: + if isinstance(fun, functools.partial): + code = list(fun.args) + # The method items() provides a sequence of tuples where the first element + # represents an optional argument of the partial function application + # + # The sorting result outcome will be consistent because: + # 1. tuples are compared in order of their elements + # 2. optional argument namess are unique + code.extend(sorted(fun.keywords.items())) + code.append(h_fun(fun.func)) + fun.code = h_list(code) + return fun.code try: h = inspect.getsource(fun) - except IOError: - h = "nocode" + except EnvironmentError: + h = 'nocode' try: fun.code = h except AttributeError: pass return h +def h_cmd(ins): + """ + Hashes objects recursively + + :param ins: input object + :type ins: string or list or tuple or function + :rtype: string or bytes + """ + # this function is not meant to be particularly fast + if isinstance(ins, str): + # a command is either a string + ret = ins + elif isinstance(ins, list) or isinstance(ins, tuple): + # or a list of functions/strings + ret = str([h_cmd(x) for x in ins]) + else: + # or just a python function + ret = str(h_fun(ins)) + if sys.hexversion > 0x3000000: + ret = ret.encode('latin-1', 'xmlcharrefreplace') + return ret + reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") def subst_vars(expr, params): """ - Replace ${VAR} with the value of VAR taken from a dict or a config set:: + Replaces ${VAR} with the value of VAR taken from a dict or a config set:: from waflib import Utils s = Utils.subst_vars('${PREFIX}/bin', env) @@ -545,11 +693,14 @@ def repl_var(m): return params.get_flat(m.group(3)) except AttributeError: return params[m.group(3)] + # if you get a TypeError, it means that 'expr' is not a string... + # Utils.subst_vars(None, env) will not work return reg_subst.sub(repl_var, expr) def destos_to_binfmt(key): """ - Return the binary format based on the unversioned platform name. + Returns the binary format based on the unversioned platform name, + and defaults to ``elf`` if nothing is found. :param key: platform name :type key: string @@ -563,7 +714,7 @@ def destos_to_binfmt(key): def unversioned_sys_platform(): """ - Return the unversioned platform name. + Returns the unversioned platform name. Some Python platform names contain versions, that depend on the build environment, e.g. linux2, freebsd6, etc. This returns the name without the version number. Exceptions are @@ -595,11 +746,14 @@ def unversioned_sys_platform(): return 'darwin' if s == 'win32' or s == 'os2': return s - return re.split('\d+$', s)[0] + if s == 'cli' and os.name == 'nt': + # ironpython is only on windows as far as we know + return 'win32' + return re.split(r'\d+$', s)[0] def nada(*k, **kw): """ - A function that does nothing + Does nothing :return: None """ @@ -608,7 +762,7 @@ def nada(*k, **kw): class Timer(object): """ Simple object for timing the execution of commands. - Its string representation is the current time:: + Its string representation is the duration:: from waflib.Utils import Timer timer = Timer() @@ -616,10 +770,12 @@ class Timer(object): s = str(timer) """ def __init__(self): - self.start_time = datetime.datetime.utcnow() + self.start_time = self.now() def __str__(self): - delta = datetime.datetime.utcnow() - self.start_time + delta = self.now() - self.start_time + if not isinstance(delta, datetime.timedelta): + delta = datetime.timedelta(seconds=delta) days = delta.days hours, rem = divmod(delta.seconds, 3600) minutes, seconds = divmod(rem, 60) @@ -633,28 +789,16 @@ def __str__(self): result += '%dm' % minutes return '%s%.3fs' % (result, seconds) -if is_win32: - old = shutil.copy2 - def copy2(src, dst): - """ - shutil.copy2 does not copy the file attributes on windows, so we - hack into the shutil module to fix the problem - """ - old(src, dst) - shutil.copystat(src, dst) - setattr(shutil, 'copy2', copy2) + def now(self): + return datetime.datetime.utcnow() -if os.name == 'java': - # Jython cannot disable the gc but they can enable it ... wtf? - try: - gc.disable() - gc.enable() - except NotImplementedError: - gc.disable = gc.enable + if hasattr(time, 'perf_counter'): + def now(self): + return time.perf_counter() def read_la_file(path): """ - Read property files, used by msvc.py + Reads property files, used by msvc.py :param path: file to read :type path: string @@ -669,25 +813,6 @@ def read_la_file(path): pass return dc -def nogc(fun): - """ - Decorator: let a function disable the garbage collector during its execution. - It is used in the build context when storing/loading the build cache file (pickle) - - :param fun: function to execute - :type fun: function - :return: the return value of the function executed - """ - def f(*k, **kw): - try: - gc.disable() - ret = fun(*k, **kw) - finally: - gc.enable() - return ret - f.__doc__ = fun.__doc__ - return f - def run_once(fun): """ Decorator: let a function cache its results, use like this:: @@ -696,33 +821,48 @@ def run_once(fun): def foo(k): return 345*2343 + .. note:: in practice this can cause memory leaks, prefer a :py:class:`waflib.Utils.lru_cache` + :param fun: function to execute :type fun: function :return: the return value of the function executed """ cache = {} - def wrap(k): + def wrap(*k): try: return cache[k] except KeyError: - ret = fun(k) + ret = fun(*k) cache[k] = ret return ret wrap.__cache__ = cache + wrap.__name__ = fun.__name__ return wrap def get_registry_app_path(key, filename): + """ + Returns the value of a registry key for an executable + + :type key: string + :type filename: list of string + """ if not winreg: return None try: result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0]) - except WindowsError: + except OSError: pass else: if os.path.isfile(result): return result def lib64(): + """ + Guess the default ``/usr/lib`` extension for 64-bit applications + + :return: '64' or '' + :rtype: string + """ # default settings for /usr/lib if os.sep == '/': if platform.architecture()[0] == '64bit': @@ -730,3 +870,184 @@ def lib64(): return '64' return '' +def loose_version(ver_str): + # private for the time being! + # see #2402 + lst = re.split(r'([.]|\\d+|[a-zA-Z])', ver_str) + ver = [] + for i, val in enumerate(lst): + try: + ver.append(int(val)) + except ValueError: + if val != '.': + ver.append(val) + return ver + +def sane_path(p): + # private function for the time being! + return os.path.abspath(os.path.expanduser(p)) + +process_pool = [] +""" +List of processes started to execute sub-process commands +""" + +def get_process(): + """ + Returns a process object that can execute commands as sub-processes + + :rtype: subprocess.Popen + """ + try: + return process_pool.pop() + except IndexError: + filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py' + cmd = [sys.executable, '-c', readf(filepath)] + return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, close_fds=not is_win32) + +def run_prefork_process(cmd, kwargs, cargs): + """ + Delegates process execution to a pre-forked process instance. + """ + if not kwargs.get('env'): + kwargs['env'] = dict(os.environ) + try: + obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs])) + except (TypeError, AttributeError): + return run_regular_process(cmd, kwargs, cargs) + + proc = get_process() + if not proc: + return run_regular_process(cmd, kwargs, cargs) + + proc.stdin.write(obj) + proc.stdin.write('\n'.encode()) + proc.stdin.flush() + obj = proc.stdout.readline() + if not obj: + raise OSError('Preforked sub-process %r died' % proc.pid) + + process_pool.append(proc) + lst = cPickle.loads(base64.b64decode(obj)) + # Jython wrapper failures (bash/execvp) + assert len(lst) == 5 + ret, out, err, ex, trace = lst + if ex: + if ex == 'OSError': + raise OSError(trace) + elif ex == 'ValueError': + raise ValueError(trace) + elif ex == 'TimeoutExpired': + exc = TimeoutExpired(cmd, timeout=cargs['timeout'], output=out) + exc.stderr = err + raise exc + else: + raise Exception(trace) + return ret, out, err + +def lchown(path, user=-1, group=-1): + """ + Change the owner/group of a path, raises an OSError if the + ownership change fails. + + :param user: user to change + :type user: int or str + :param group: group to change + :type group: int or str + """ + if isinstance(user, str): + import pwd + entry = pwd.getpwnam(user) + if not entry: + raise OSError('Unknown user %r' % user) + user = entry[2] + if isinstance(group, str): + import grp + entry = grp.getgrnam(group) + if not entry: + raise OSError('Unknown group %r' % group) + group = entry[2] + return os.lchown(path, user, group) + +def run_regular_process(cmd, kwargs, cargs={}): + """ + Executes a subprocess command by using subprocess.Popen + """ + proc = subprocess.Popen(cmd, **kwargs) + if kwargs.get('stdout') or kwargs.get('stderr'): + try: + out, err = proc.communicate(**cargs) + except TimeoutExpired: + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) + else: + proc.kill() + out, err = proc.communicate() + exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out) + exc.stderr = err + raise exc + status = proc.returncode + else: + out, err = (None, None) + try: + status = proc.wait(**cargs) + except TimeoutExpired as e: + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) + else: + proc.kill() + proc.wait() + raise e + return status, out, err + +def run_process(cmd, kwargs, cargs={}): + """ + Executes a subprocess by using a pre-forked process when possible + or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process` + and :py:func:`waflib.Utils.run_regular_process` + """ + if kwargs.get('stdout') and kwargs.get('stderr'): + return run_prefork_process(cmd, kwargs, cargs) + else: + return run_regular_process(cmd, kwargs, cargs) + +def alloc_process_pool(n, force=False): + """ + Allocates an amount of processes to the default pool so its size is at least *n*. + It is useful to call this function early so that the pre-forked + processes use as little memory as possible. + + :param n: pool size + :type n: integer + :param force: if True then *n* more processes are added to the existing pool + :type force: bool + """ + # mandatory on python2, unnecessary on python >= 3.2 + global run_process, get_process, alloc_process_pool + if not force: + n = max(n - len(process_pool), 0) + try: + lst = [get_process() for x in range(n)] + except OSError: + run_process = run_regular_process + get_process = alloc_process_pool = nada + else: + for x in lst: + process_pool.append(x) + +def atexit_pool(): + for k in process_pool: + try: + os.kill(k.pid, 9) + except OSError: + pass + else: + k.wait() +# see #1889 +if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f: + atexit.register(atexit_pool) + +if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable: + run_process = run_regular_process + get_process = alloc_process_pool = nada + diff --git a/waflib/__init__.py b/waflib/__init__.py index c8a3c34928..079df358f5 100644 --- a/waflib/__init__.py +++ b/waflib/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2010 (ita) +# Thomas Nagy, 2005-2018 (ita) diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py index e7a4ce7521..027f0ad68a 100644 --- a/waflib/ansiterm.py +++ b/waflib/ansiterm.py @@ -120,7 +120,7 @@ def screen_buffer_info(self): def clear_line(self, param): mode = param and int(param) or 0 sbinfo = self.screen_buffer_info() - if mode == 1: # Clear from begining of line to cursor position + if mode == 1: # Clear from beginning of line to cursor position line_start = COORD(0, sbinfo.CursorPosition.Y) line_length = sbinfo.Size.X elif mode == 2: # Clear entire line @@ -136,7 +136,7 @@ def clear_line(self, param): def clear_screen(self, param): mode = to_int(param, 0) sbinfo = self.screen_buffer_info() - if mode == 1: # Clear from begining of screen to cursor position + if mode == 1: # Clear from beginning of screen to cursor position clear_start = COORD(0, 0) clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y elif mode == 2: # Clear entire screen and return cursor to home @@ -264,7 +264,7 @@ def hide_cursor(self,param): 'u': pop_cursor, } # Match either the escape sequence or text not containing escape sequence - ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + ansi_tokens = re.compile(r'(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') def write(self, text): try: wlock.acquire() @@ -284,7 +284,7 @@ def write(self, text): wlock.release() def writeconsole(self, txt): - chars_written = c_int() + chars_written = c_ulong() writeconsole = windll.kernel32.WriteConsoleA if isinstance(txt, _type): writeconsole = windll.kernel32.WriteConsoleW @@ -320,7 +320,7 @@ def isatty(self): sbinfo = CONSOLE_SCREEN_BUFFER_INFO() def get_term_cols(): windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo)) - # TODO Issue 1401 + # Issue 1401 - the progress bar cannot reach the last character return sbinfo.Size.X - 1 # just try and see diff --git a/waflib/extras/add_objects.py b/waflib/extras/add_objects.py deleted file mode 100644 index e383a1c875..0000000000 --- a/waflib/extras/add_objects.py +++ /dev/null @@ -1,7 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2011 (ita) - -from waflib import Logs -Logs.warn('This tool has been merged to the main library, remove the references to "add_objects"') - diff --git a/waflib/extras/batched_cc.py b/waflib/extras/batched_cc.py index 0b035e856c..aad2872298 100644 --- a/waflib/extras/batched_cc.py +++ b/waflib/extras/batched_cc.py @@ -3,21 +3,22 @@ # Thomas Nagy, 2006-2015 (ita) """ -Build as batches. - Instead of compiling object files one by one, c/c++ compilers are often able to compile at once: cc -c ../file1.c ../file2.c ../file3.c Files are output on the directory where the compiler is called, and dependencies are more difficult to track (do not run the command on all source files if only one file changes) - As such, we do as if the files were compiled one by one, but no command is actually run: replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the signatures from each slave and finds out the command-line to run. -Just import this module in the configuration (no other change required). -This is provided as an example, for performance unity builds are recommended (fewer tasks and -fewer jobs to execute). See waflib/extras/unity.py. +Just import this module to start using it: +def build(bld): + bld.load('batched_cc') + +Note that this is provided as an example, unity builds are recommended +for best performance results (fewer tasks and fewer jobs to execute). +See waflib/extras/unity.py. """ from waflib import Task, Utils @@ -26,24 +27,21 @@ MAX_BATCH = 50 -c_str = '${CC} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}' +c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}' c_fun, _ = Task.compile_fun_noshell(c_str) -cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED}' +cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}' cxx_fun, _ = Task.compile_fun_noshell(cxx_str) count = 70000 -class batch_task(Task.Task): +class batch(Task.Task): color = 'PINK' after = ['c', 'cxx'] before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib'] def uid(self): - m = Utils.md5() - m.update(Task.Task.uid(self)) - m.update(str(self.generator.idx).encode()) - return m.digest() + return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target]) def __str__(self): return 'Batch compilation for %d slaves' % len(self.slaves) @@ -74,6 +72,13 @@ def runnable_status(self): return Task.SKIP_ME + def get_cwd(self): + return self.slaves[0].outputs[0].parent + + def batch_incpaths(self): + st = self.env.CPPPATH_ST + return [st % node.abspath() for node in self.generator.includes_nodes] + def run(self): self.outputs = [] @@ -85,7 +90,6 @@ def run(self): srclst.append(t.inputs[0].abspath()) self.env.SRCLST = srclst - self.cwd = slaves[0].outputs[0].parent.abspath() if self.slaves[0].__class__.__name__ == 'c': ret = c_fun(self) diff --git a/waflib/extras/bjam.py b/waflib/extras/bjam.py index 3070df8d5f..8e04d3a66f 100644 --- a/waflib/extras/bjam.py +++ b/waflib/extras/bjam.py @@ -2,7 +2,6 @@ # per rosengren 2011 from os import sep, readlink -from os.path import abspath from waflib import Logs from waflib.TaskGen import feature, after_method from waflib.Task import Task, always_run @@ -23,7 +22,7 @@ def configure(cnf): cnf.find_program('bjam', path_list=[ cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME ]) - except Exception as e: + except Exception: cnf.env.BJAM = None if not cnf.env.BJAM_CONFIG: cnf.env.BJAM_CONFIG = cnf.options.bjam_config @@ -48,8 +47,6 @@ class bjam_creator(Task): def run(self): env = self.env gen = self.generator - path = gen.path - bld = gen.bld bjam = gen.bld.root.find_dir(env.BJAM_SRC) if not bjam: Logs.error('Can not find bjam source') diff --git a/waflib/extras/blender.py b/waflib/extras/blender.py index 5c7f1a020f..e5efc280cf 100644 --- a/waflib/extras/blender.py +++ b/waflib/extras/blender.py @@ -20,7 +20,6 @@ def build(bld): """ import os import re -from sys import platform as _platform from getpass import getuser from waflib import Utils @@ -105,7 +104,5 @@ def blender(self): # Two ways to install a blender extension: as a module or just .py files dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name()) Utils.check_dir(dest_dir) - self.bld.install_files( - dest_dir, - getattr(self, 'files', '.') - ) + self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.')) + diff --git a/waflib/extras/boost.py b/waflib/extras/boost.py index d3c4869060..93b312a1e6 100644 --- a/waflib/extras/boost.py +++ b/waflib/extras/boost.py @@ -54,14 +54,13 @@ def build(bld): from waflib.Configure import conf from waflib.TaskGen import feature, after_method -BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu', - '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib'] +BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib'] BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include'] BOOST_VERSION_FILE = 'boost/version.hpp' BOOST_VERSION_CODE = ''' #include #include -int main() { std::cout << BOOST_LIB_VERSION << std::endl; } +int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; } ''' BOOST_ERROR_CODE = ''' @@ -69,11 +68,38 @@ def build(bld): int main() { boost::system::error_code c; } ''' +PTHREAD_CODE = ''' +#include +static void* f(void*) { return 0; } +int main() { + pthread_t th; + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_create(&th, &attr, &f, 0); + pthread_join(th, 0); + pthread_cleanup_push(0, 0); + pthread_cleanup_pop(0); + pthread_attr_destroy(&attr); +} +''' + BOOST_THREAD_CODE = ''' #include int main() { boost::thread t; } ''' +BOOST_LOG_CODE = ''' +#include +#include +#include +int main() { + using namespace boost::log; + add_common_attributes(); + add_console_log(std::clog, keywords::format = "%Message%"); + BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl; +} +''' + # toolsets from {boost_dir}/tools/build/v2/tools/common.jam PLATFORM = Utils.unversioned_sys_platform() detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il' @@ -104,14 +130,15 @@ def build(bld): def options(opt): + opt = opt.add_option_group('Boost Options') opt.add_option('--boost-includes', type='string', default='', dest='boost_includes', - help='''path to the boost includes root (~boost root) - e.g. /path/to/boost_1_47_0''') + help='''path to the directory where the boost includes are, + e.g., /path/to/boost_1_55_0/stage/include''') opt.add_option('--boost-libs', type='string', default='', dest='boost_libs', - help='''path to the directory where the boost libs are - e.g. /path/to/boost_1_47_0/stage/lib''') + help='''path to the directory where the boost libs are, + e.g., path/to/boost_1_55_0/stage/lib''') opt.add_option('--boost-mt', action='store_true', default=False, dest='boost_mt', help='select multi-threaded libraries') @@ -148,17 +175,19 @@ def boost_get_version(self, d): try: txt = node.read() except EnvironmentError: - Logs.error("Could not read the file %r" % node.abspath()) + Logs.error("Could not read the file %r", node.abspath()) else: - re_but = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.*)"', re.M) - m = re_but.search(txt) - if m: - return m.group(1) - return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True) + re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M) + m1 = re_but1.search(txt) + re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M) + m2 = re_but2.search(txt) + if m1 and m2: + return (m1.group(1), m2.group(1)) + return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":") @conf def boost_get_includes(self, *k, **kw): - includes = k and k[0] or kw.get('includes', None) + includes = k and k[0] or kw.get('includes') if includes and self.__boost_get_version_file(includes): return includes for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES: @@ -191,7 +220,7 @@ def __boost_get_libs_path(self, *k, **kw): ''' return the lib path and all the files in it ''' if 'files' in kw: return self.root.find_dir('.'), Utils.to_list(kw['files']) - libs = k and k[0] or kw.get('libs', None) + libs = k and k[0] or kw.get('libs') if libs: path = self.root.find_dir(libs) files = path.ant_glob('*boost_*') @@ -241,10 +270,12 @@ def find_lib(re_lib, files): return file return None + # extensions from Tools.ccroot.lib_patterns + wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$") def format_lib_name(name): if name.startswith('lib') and self.env.CC_NAME != 'msvc': name = name[3:] - return name[:name.rfind('.')] + return wo_ext.sub("", name) def match_libs(lib_names, is_static): libs = [] @@ -254,7 +285,7 @@ def match_libs(lib_names, is_static): t = [] if kw.get('mt', False): t.append('-mt') - if kw.get('abi', None): + if kw.get('abi'): t.append('%s%s' % (is_static and '-s' or '-', kw['abi'])) elif is_static: t.append('-s') @@ -265,9 +296,9 @@ def match_libs(lib_names, is_static): for lib in lib_names: if lib == 'python': # for instance, with python='27', - # accepts '-py27', '-py2', '27' and '2' + # accepts '-py27', '-py2', '27', '-2.7' and '2' # but will reject '-py3', '-py26', '26' and '3' - tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python']) + tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1]) else: tags = tags_pat # Trying libraries, from most strict match to least one @@ -288,8 +319,68 @@ def match_libs(lib_names, is_static): self.fatal('The configuration failed') return libs - return path.abspath(), match_libs(kw.get('lib', None), False), match_libs(kw.get('stlib', None), True) + return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True) + +@conf +def _check_pthread_flag(self, *k, **kw): + ''' + Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode + + Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3, + boost/thread.hpp will trigger a #error if -pthread isn't used: + boost/config/requires_threads.hpp:47:5: #error "Compiler threading support + is not turned on. Please set the correct command line options for + threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)" + + Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4 + ''' + + var = kw.get('uselib_store', 'BOOST') + + self.start_msg('Checking the flags needed to use pthreads') + + # The ordering *is* (sometimes) important. Some notes on the + # individual items follow: + # (none): in case threads are in libc; should be tried before -Kthread and + # other compiler flags to prevent continual compiler warnings + # -lpthreads: AIX (must check this before -lpthread) + # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h) + # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able) + # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread) + # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads) + # -pthreads: Solaris/GCC + # -mthreads: MinGW32/GCC, Lynx/GCC + # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it + # doesn't hurt to check since this sometimes defines pthreads too; + # also defines -D_REENTRANT) + # ... -mt is also the pthreads flag for HP/aCC + # -lpthread: GNU Linux, etc. + # --thread-safe: KAI C++ + if Utils.unversioned_sys_platform() == "sunos": + # On Solaris (at least, for some versions), libc contains stubbed + # (non-functional) versions of the pthreads routines, so link-based + # tests will erroneously succeed. (We need to link with -pthreads/-mt/ + # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather + # a function called by this macro, so we could check for that, but + # who knows whether they'll stub that too in a future libc.) So, + # we'll just look for -pthreads and -lpthread first: + boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"] + else: + boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread", + "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"] + for boost_pthread_flag in boost_pthread_flags: + try: + self.env.stash() + self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag) + self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag) + self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False) + + self.end_msg(boost_pthread_flag) + return + except self.errors.ConfigurationError: + self.env.revert() + self.end_msg('None') @conf def check_boost(self, *k, **kw): @@ -303,8 +394,8 @@ def check_boost(self, *k, **kw): self.fatal('load a c++ compiler first, conf.load("compiler_cxx")') params = { - 'lib': k and k[0] or kw.get('lib', None), - 'stlib': kw.get('stlib', None) + 'lib': k and k[0] or kw.get('lib'), + 'stlib': kw.get('stlib') } for key, value in self.options.__dict__.items(): if not key.startswith('boost_'): @@ -314,10 +405,19 @@ def check_boost(self, *k, **kw): var = kw.get('uselib_store', 'BOOST') + self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False) + if self.env.DPKG_ARCHITECTURE: + deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH']) + BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip()) + self.start_msg('Checking boost includes') self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params) - self.env.BOOST_VERSION = self.boost_get_version(inc) - self.end_msg(self.env.BOOST_VERSION) + versions = self.boost_get_version(inc) + self.env.BOOST_VERSION = versions[0] + self.env.BOOST_VERSION_NUMBER = int(versions[1]) + self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000, + int(versions[1]) / 100 % 1000, + int(versions[1]) % 100)) if Logs.verbose: Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var]) @@ -337,14 +437,27 @@ def check_boost(self, *k, **kw): Logs.pprint('CYAN', ' shared libs : %s' % libs) Logs.pprint('CYAN', ' static libs : %s' % stlibs) + def has_shlib(lib): + return params['lib'] and lib in params['lib'] + def has_stlib(lib): + return params['stlib'] and lib in params['stlib'] + def has_lib(lib): + return has_shlib(lib) or has_stlib(lib) + if has_lib('thread'): + # not inside try_link to make check visible in the output + self._check_pthread_flag(k, kw) def try_link(): - if (params['lib'] and 'system' in params['lib']) or \ - params['stlib'] and 'system' in params['stlib']: + if has_lib('system'): self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False) - if (params['lib'] and 'thread' in params['lib']) or \ - params['stlib'] and 'thread' in params['stlib']: + if has_lib('thread'): self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False) + if has_lib('log'): + if not has_lib('thread'): + self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS'] + if has_shlib('log'): + self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK'] + self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False) if params.get('linkage_autodetect', False): self.start_msg("Attempting to detect boost linkage flags") @@ -372,12 +485,14 @@ def try_link(): self.env["CXXFLAGS_%s" % var] += cxxflags try: try_link() - self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var])) - exc = None - break except Errors.ConfigurationError as e: self.env.revert() exc = e + else: + self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var])) + exc = None + self.env.commit() + break if exc is not None: self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc) @@ -405,7 +520,7 @@ def install_boost(self): for lib in self.env.LIB_BOOST: try: file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST) - self.bld.install_files(inst_to, self.bld.root.find_node(file)) + self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file)) except: continue install_boost.done = False diff --git a/waflib/extras/build_file_tracker.py b/waflib/extras/build_file_tracker.py index 4f9e7b6fa0..c4f26fd070 100644 --- a/waflib/extras/build_file_tracker.py +++ b/waflib/extras/build_file_tracker.py @@ -8,25 +8,21 @@ Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example, or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool) -or to hash the file in the build directory with its timestamp (similar to 'update_outputs') +or to hash the file in the build directory with its timestamp """ import os from waflib import Node, Utils def get_bld_sig(self): + if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: + return Utils.h_file(self.abspath()) + try: - return self.cache_sig + # add the creation time to the signature + return self.sig + str(os.stat(self.abspath()).st_mtime) except AttributeError: - pass - - if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode: - self.sig = Utils.h_file(self.abspath()) - self.cache_sig = ret = self.sig - else: - # add the - self.cache_sig = ret = self.sig + str(os.stat(self.abspath()).st_mtime) - return ret + return None Node.Node.get_bld_sig = get_bld_sig diff --git a/waflib/extras/build_logs.py b/waflib/extras/build_logs.py index 9c7c9cd6c3..cdf8ed097e 100644 --- a/waflib/extras/build_logs.py +++ b/waflib/extras/build_logs.py @@ -10,14 +10,14 @@ def init(ctx): """ import atexit, sys, time, os, shutil, threading -from waflib import Logs, Context +from waflib import ansiterm, Logs, Context # adding the logs under the build/ directory will clash with the clean/ command try: up = os.path.dirname(Context.g_module.__file__) except AttributeError: up = '.' -LOGFILE = os.path.join(up, 'logs', '%s.log' % time.strftime('%Y_%m_%d_%H_%M')) +LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log')) wlock = threading.Lock() class log_to_file(object): @@ -28,7 +28,7 @@ def __init__(self, stream, fileobj, filename): self.filename = filename self.is_valid = True def replace_colors(self, data): - for x in Logs.colors_lst.values(): + for x in Logs.colors_lst.values(): if isinstance(x, str): data = data.replace(x, '') return data @@ -68,8 +68,12 @@ def init(ctx): # sys.stdout has already been replaced, so __stdout__ will be faster #sys.stdout = log_to_file(sys.stdout, fileobj, filename) #sys.stderr = log_to_file(sys.stderr, fileobj, filename) - sys.stdout = log_to_file(sys.__stdout__, fileobj, filename) - sys.stderr = log_to_file(sys.__stderr__, fileobj, filename) + def wrap(stream): + if stream.isatty(): + return ansiterm.AnsiTerm(stream) + return stream + sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename) + sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename) # now mess with the logging module... for x in Logs.log.handlers: @@ -92,7 +96,7 @@ def exit_cleanup(): fileobj.close() filename = sys.stdout.filename - Logs.info('Output logged to %r' % filename) + Logs.info('Output logged to %r', filename) # then copy the log file to "latest.log" if possible up = os.path.dirname(os.path.abspath(filename)) @@ -100,7 +104,6 @@ def exit_cleanup(): shutil.copy(filename, os.path.join(up, 'latest.log')) except OSError: # this may fail on windows due to processes spawned - # pass atexit.register(exit_cleanup) diff --git a/waflib/extras/buildcopy.py b/waflib/extras/buildcopy.py new file mode 100644 index 0000000000..eaff7e605a --- /dev/null +++ b/waflib/extras/buildcopy.py @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2017 (xbreak) +""" +Create task that copies source files to the associated build node. +This is useful to e.g. construct a complete Python package so it can be unit tested +without installation. + +Source files to be copied can be specified either in `buildcopy_source` attribute, or +`source` attribute. If both are specified `buildcopy_source` has priority. + +Examples:: + + def build(bld): + bld(name = 'bar', + features = 'py buildcopy', + source = bld.path.ant_glob('src/bar/*.py')) + + bld(name = 'py baz', + features = 'buildcopy', + buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt']) + +""" +import os, shutil +from waflib import Errors, Task, TaskGen, Utils, Node, Logs + +@TaskGen.before_method('process_source') +@TaskGen.feature('buildcopy') +def make_buildcopy(self): + """ + Creates the buildcopy task. + """ + def to_src_nodes(lst): + """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives + preference to nodes in build. + """ + if isinstance(lst, Node.Node): + if not lst.is_src(): + raise Errors.WafError('buildcopy: node %s is not in src'%lst) + if not os.path.isfile(lst.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst) + return lst + + if isinstance(lst, str): + lst = [x for x in Utils.split_path(lst) if x and x != '.'] + + node = self.bld.path.get_src().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) + return node + + node = self.bld.path.get_src().find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) + return node + raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst)) + + nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ] + if not nodes: + Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)', + self) + return + node_pairs = [(n, n.get_bld()) for n in nodes] + self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs) + +class buildcopy(Task.Task): + """ + Copy for each pair `n` in `node_pairs`: n[0] -> n[1]. + + Attribute `node_pairs` should contain a list of tuples describing source and target: + + node_pairs = [(in, out), ...] + + """ + color = 'PINK' + + def keyword(self): + return 'Copying' + + def run(self): + for f,t in self.node_pairs: + t.parent.mkdir() + shutil.copy2(f.abspath(), t.abspath()) diff --git a/waflib/extras/c_bgxlc.py b/waflib/extras/c_bgxlc.py index 89a7ec1641..6e3eaf7bb6 100644 --- a/waflib/extras/c_bgxlc.py +++ b/waflib/extras/c_bgxlc.py @@ -6,7 +6,6 @@ IBM XL Compiler for Blue Gene """ -import os from waflib.Tools import ccroot,ar from waflib.Configure import conf diff --git a/waflib/extras/c_dumbpreproc.py b/waflib/extras/c_dumbpreproc.py index b3cf1c48d2..1fdd5c364a 100644 --- a/waflib/extras/c_dumbpreproc.py +++ b/waflib/extras/c_dumbpreproc.py @@ -23,9 +23,7 @@ def configure(conf): conf.load('c_dumbpreproc') """ -import re, sys, os, string, traceback -from waflib import Logs, Build, Utils, Errors -from waflib.Logs import debug, error +import re from waflib.Tools import c_preproc re_inc = re.compile( @@ -35,7 +33,8 @@ def configure(conf): def lines_includes(node): code = node.read() if c_preproc.use_trigraphs: - for (a, b) in c_preproc.trig_def: code = code.split(a).join(b) + for (a, b) in c_preproc.trig_def: + code = code.split(a).join(b) code = c_preproc.re_nl.sub('', code) code = c_preproc.re_cpp.sub(c_preproc.repl, code) return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)] @@ -67,7 +66,7 @@ def start(self, node, env): if x == c_preproc.POPFILE: self.currentnode_stack.pop() continue - self.tryfind(y) + self.tryfind(y, env=env) c_preproc.c_parser = dumb_parser diff --git a/waflib/extras/c_emscripten.py b/waflib/extras/c_emscripten.py new file mode 100644 index 0000000000..e1ac494f44 --- /dev/null +++ b/waflib/extras/c_emscripten.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding: utf-8 vi:ts=4:noexpandtab + +import subprocess, shlex, sys + +from waflib.Tools import ccroot, gcc, gxx +from waflib.Configure import conf +from waflib.TaskGen import after_method, feature + +from waflib.Tools.compiler_c import c_compiler +from waflib.Tools.compiler_cxx import cxx_compiler + +for supported_os in ('linux', 'darwin', 'gnu', 'aix'): + c_compiler[supported_os].append('c_emscripten') + cxx_compiler[supported_os].append('c_emscripten') + + +@conf +def get_emscripten_version(conf, cc): + """ + Emscripten doesn't support processing '-' like clang/gcc + """ + + dummy = conf.cachedir.parent.make_node("waf-emscripten.c") + dummy.write("") + cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()] + env = conf.env.env or None + try: + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) + out = p.communicate()[0] + except Exception as e: + conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e)) + + if not isinstance(out, str): + out = out.decode(sys.stdout.encoding or 'latin-1') + + k = {} + out = out.splitlines() + for line in out: + lst = shlex.split(line) + if len(lst)>2: + key = lst[1] + val = lst[2] + k[key] = val + + if not ('__clang__' in k and 'EMSCRIPTEN' in k): + conf.fatal('Could not determine the emscripten compiler version.') + + conf.env.DEST_OS = 'generic' + conf.env.DEST_BINFMT = 'elf' + conf.env.DEST_CPU = 'asm-js' + conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) + return k + +@conf +def find_emscripten(conf): + cc = conf.find_program(['emcc'], var='CC') + conf.get_emscripten_version(cc) + conf.env.CC = cc + conf.env.CC_NAME = 'emscripten' + cxx = conf.find_program(['em++'], var='CXX') + conf.env.CXX = cxx + conf.env.CXX_NAME = 'emscripten' + conf.find_program(['emar'], var='AR') + +def configure(conf): + conf.find_emscripten() + conf.find_ar() + conf.gcc_common_flags() + conf.gxx_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() + conf.env.ARFLAGS = ['rcs'] + conf.env.cshlib_PATTERN = '%s.js' + conf.env.cxxshlib_PATTERN = '%s.js' + conf.env.cstlib_PATTERN = '%s.a' + conf.env.cxxstlib_PATTERN = '%s.a' + conf.env.cprogram_PATTERN = '%s.html' + conf.env.cxxprogram_PATTERN = '%s.html' + conf.env.CXX_TGT_F = ['-c', '-o', ''] + conf.env.CC_TGT_F = ['-c', '-o', ''] + conf.env.CXXLNK_TGT_F = ['-o', ''] + conf.env.CCLNK_TGT_F = ['-o', ''] + conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) diff --git a/waflib/extras/c_nec.py b/waflib/extras/c_nec.py index 04b6aae6cc..96bfae4f34 100644 --- a/waflib/extras/c_nec.py +++ b/waflib/extras/c_nec.py @@ -6,7 +6,6 @@ NEC SX Compiler for SX vector systems """ -import os import re from waflib import Utils from waflib.Tools import ccroot,ar @@ -25,43 +24,46 @@ def find_sxc(conf): @conf def get_sxc_version(conf, fc): - version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search - cmd = fc + ['-V'] - p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None) - out, err = p.communicate() + version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search + cmd = fc + ['-V'] + p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None) + out, err = p.communicate() - if out: match = version_re(out) - else: match = version_re(err) - if not match: - conf.fatal('Could not determine the NEC C compiler version.') - k = match.groupdict() - conf.env['C_VERSION'] = (k['major'], k['minor']) + if out: + match = version_re(out) + else: + match = version_re(err) + if not match: + conf.fatal('Could not determine the NEC C compiler version.') + k = match.groupdict() + conf.env['C_VERSION'] = (k['major'], k['minor']) @conf def sxc_common_flags(conf): - v=conf.env - v['CC_SRC_F']=[] - v['CC_TGT_F']=['-c','-o'] - if not v['LINK_CC']:v['LINK_CC']=v['CC'] - v['CCLNK_SRC_F']=[] - v['CCLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['RPATH_ST']='' - v['SONAME_ST']=[] - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['LINKFLAGS_cprogram']=[''] - v['cprogram_PATTERN']='%s' - v['CFLAGS_cshlib']=['-fPIC'] - v['LINKFLAGS_cshlib']=[''] - v['cshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cstlib']=[] - v['cstlib_PATTERN']='lib%s.a' + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']: + v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=[''] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=[''] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' def configure(conf): conf.find_sxc() diff --git a/waflib/extras/cabal.py b/waflib/extras/cabal.py index 6b3c3aae88..e10a0d1129 100644 --- a/waflib/extras/cabal.py +++ b/waflib/extras/cabal.py @@ -3,12 +3,8 @@ # Anton Feldmann, 2012 # "Base for cabal" -import re -import time -from waflib import TaskGen, Task, Utils -from waflib.Configure import conf -from waflib.Task import always_run -from waflib.TaskGen import extension, feature, after, before, before_method +from waflib import Task, Utils +from waflib.TaskGen import extension from waflib.Utils import threading from shutil import rmtree diff --git a/waflib/extras/cfg_altoptions.py b/waflib/extras/cfg_altoptions.py index c60ac32c16..47b1189f47 100644 --- a/waflib/extras/cfg_altoptions.py +++ b/waflib/extras/cfg_altoptions.py @@ -33,7 +33,7 @@ def configure(cfg): import os import functools -from waflib import Task, Utils, TaskGen, Configure, Options, Errors +from waflib import Configure, Options, Errors def name_to_dest(x): return x.lower().replace('-', '_') diff --git a/waflib/extras/cfg_cross_gnu.py b/waflib/extras/cfg_cross_gnu.py deleted file mode 100644 index e2ed30f7c2..0000000000 --- a/waflib/extras/cfg_cross_gnu.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# Tool to provide dedicated variables for cross-compilation - -__author__ = __maintainer__ = "Jérôme Carretero " -__copyright__ = "Jérôme Carretero, 2014" - -""" - -This tool allows to use environment variables to define cross-compilation things, -mostly used when you use build variants. - -Usage: - -- In your build script:: - - def configure(cfg): - ... - conf.load('c_cross_gnu') - for variant in x_variants: - conf.xcheck_host() - conf.xcheck_host_var('POUET') - ... - - ... - -- Then:: - - CHOST=arm-hardfloat-linux-gnueabi waf configure - - env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure - - CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure - - HOST_CC="clang -..." waf configure - -""" - -import os -from waflib import Utils, Configure - -try: - from shlex import quote -except ImportError: - from pipes import quote - -@Configure.conf -def xcheck_prog(conf, var, tool, cross=False): - value = os.environ.get(var, '') - value = Utils.to_list(value) - - if not value: - return - - conf.env[var] = value - if cross: - pretty = 'cross-compilation %s' % var - else: - pretty = var - conf.msg('Will use %s' % pretty, - " ".join(quote(x) for x in value)) - -@Configure.conf -def xcheck_envar(conf, name, wafname=None, cross=False): - wafname = wafname or name - value = os.environ.get(name, None) - value = Utils.to_list(value) - - if not value: - return - - conf.env[wafname] += value - if cross: - pretty = 'cross-compilation %s' % wafname - else: - pretty = wafname - conf.msg('Will use %s' % pretty, - " ".join(quote(x) for x in value)) - -@Configure.conf -def xcheck_host_prog(conf, name, tool, wafname=None): - wafname = wafname or name - host = conf.env.CHOST - specific = None - if host: - specific = os.environ.get('%s-%s' % (host[0], name), None) - - if specific: - value = Utils.to_list(specific) - conf.env[wafname] += value - conf.msg('Will use cross-compilation %s' % name, - " ".join(quote(x) for x in value)) - return - - conf.xcheck_prog('HOST_%s' % name, tool, cross=True) - - if conf.env[wafname]: - return - - value = None - if host: - value = '%s-%s' % (host[0], tool) - - if value: - conf.env[wafname] = value - conf.msg('Will use cross-compilation %s' % wafname, value) - -@Configure.conf -def xcheck_host_envar(conf, name, wafname=None): - wafname = wafname or name - - host = conf.env.CHOST - specific = None - if host: - specific = os.environ.get('%s-%s' % (host[0], name), None) - - if specific: - value = Utils.to_list(specific) - conf.env[wafname] += value - conf.msg('Will use cross-compilation %s' % name, - " ".join(quote(x) for x in value)) - return - - conf.xcheck_envar('HOST_%s' % name, wafname, cross=True) - - -@Configure.conf -def xcheck_host(conf): - conf.xcheck_envar('CHOST', cross=True) - conf.xcheck_host_prog('CC', 'gcc') - conf.xcheck_host_prog('CXX', 'g++') - conf.xcheck_host_prog('LINK_CC', 'gcc') - conf.xcheck_host_prog('LINK_CXX', 'g++') - conf.xcheck_host_prog('AR', 'ar') - conf.xcheck_host_prog('AS', 'as') - conf.xcheck_host_prog('LD', 'ld') - conf.xcheck_host_envar('CFLAGS') - conf.xcheck_host_envar('CXXFLAGS') - conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS') - conf.xcheck_host_envar('LIB') - conf.xcheck_host_envar('PKG_CONFIG_PATH') - # TODO find a better solution than this ugliness - if conf.env.PKG_CONFIG_PATH: - conf.find_program('pkg-config', var='PKGCONFIG') - conf.env.PKGCONFIG = [ - 'env', 'PKG_CONFIG_PATH=%s' % (conf.env.PKG_CONFIG_PATH[0]) - ] + conf.env.PKGCONFIG diff --git a/waflib/extras/clang_compilation_database.py b/waflib/extras/clang_compilation_database.py index a9e61ff074..bd29db93fd 100644 --- a/waflib/extras/clang_compilation_database.py +++ b/waflib/extras/clang_compilation_database.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # encoding: utf-8 # Christoph Koke, 2013 +# Alibek Omarov, 2019 """ Writes the c and cpp compile commands into build/compile_commands.json @@ -8,59 +9,129 @@ Usage: - def configure(conf): - conf.load('compiler_cxx') - ... - conf.load('clang_compilation_database') + Load this tool in `options` to be able to generate database + by request in command-line and before build: + + $ waf clangdb + + def options(opt): + opt.load('clang_compilation_database') + + Otherwise, load only in `configure` to generate it always before build. + + def configure(conf): + conf.load('compiler_cxx') + ... + conf.load('clang_compilation_database') """ -import sys, os, json, shlex, pipes -from waflib import Logs, TaskGen -from waflib.Tools import c, cxx - -if sys.hexversion >= 0x3030000: - quote = shlex.quote -else: - quote = pipes.quote - -@TaskGen.feature('*') -@TaskGen.after_method('process_use') -def collect_compilation_db_tasks(self): - "Add a compilation database entry for compiled tasks" - try: - clang_db = self.bld.clang_compilation_database_tasks - except AttributeError: - clang_db = self.bld.clang_compilation_database_tasks = [] - self.bld.add_post_fun(write_compilation_database) - - for task in getattr(self, 'compiled_tasks', []): - if isinstance(task, (c.c, cxx.cxx)): - clang_db.append(task) - -def write_compilation_database(ctx): - "Write the clang compilation database as JSON" - database_file = ctx.bldnode.make_node('compile_commands.json') - Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path)) - try: - root = json.load(database_file) - except IOError: - root = [] - clang_db = dict((x["file"], x) for x in root) - for task in getattr(ctx, 'clang_compilation_database_tasks', []): +from waflib import Logs, TaskGen, Task, Build, Scripting + +Task.Task.keep_last_cmd = True + +class ClangDbContext(Build.BuildContext): + '''generates compile_commands.json by request''' + cmd = 'clangdb' + + def write_compilation_database(self): + """ + Write the clang compilation database as JSON + """ + database_file = self.bldnode.make_node('compile_commands.json') + Logs.info('Build commands will be stored in %s', database_file.path_from(self.path)) try: - cmd = task.last_cmd - except AttributeError: - continue - directory = getattr(task, 'cwd', ctx.variant_dir) - f_node = task.inputs[0] - filename = os.path.relpath(f_node.abspath(), directory) - cmd = " ".join(map(quote, cmd)) - entry = { - "directory": directory, - "command": cmd, - "file": filename, - } - clang_db[filename] = entry - root = list(clang_db.values()) - database_file.write(json.dumps(root, indent=2)) + root = database_file.read_json() + except IOError: + root = [] + clang_db = dict((x['file'], x) for x in root) + for task in self.clang_compilation_database_tasks: + try: + cmd = task.last_cmd + except AttributeError: + continue + f_node = task.inputs[0] + filename = f_node.path_from(task.get_cwd()) + entry = { + "directory": task.get_cwd().abspath(), + "arguments": cmd, + "file": filename, + } + clang_db[filename] = entry + root = list(clang_db.values()) + database_file.write_json(root) + + def execute(self): + """ + Build dry run + """ + self.restore() + self.cur_tasks = [] + self.clang_compilation_database_tasks = [] + + if not self.all_envs: + self.load_envs() + + self.recurse([self.run_dir]) + self.pre_build() + + # we need only to generate last_cmd, so override + # exec_command temporarily + def exec_command(self, *k, **kw): + return 0 + + for g in self.groups: + for tg in g: + try: + f = tg.post + except AttributeError: + pass + else: + f() + + if isinstance(tg, Task.Task): + lst = [tg] + else: lst = tg.tasks + for tsk in lst: + if tsk.__class__.__name__ == "swig": + tsk.runnable_status() + if hasattr(tsk, 'more_tasks'): + lst.extend(tsk.more_tasks) + # Not all dynamic tasks can be processed, in some cases + # one may have to call the method "run()" like this: + #elif tsk.__class__.__name__ == 'src2c': + # tsk.run() + # if hasattr(tsk, 'more_tasks'): + # lst.extend(tsk.more_tasks) + + tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y) + if isinstance(tsk, tup): + self.clang_compilation_database_tasks.append(tsk) + tsk.nocache = True + old_exec = tsk.exec_command + tsk.exec_command = exec_command + tsk.run() + tsk.exec_command = old_exec + + self.write_compilation_database() + +EXECUTE_PATCHED = False +def patch_execute(): + global EXECUTE_PATCHED + + if EXECUTE_PATCHED: + return + + def new_execute_build(self): + """ + Invoke clangdb command before build + """ + if self.cmd.startswith('build'): + Scripting.run_command(self.cmd.replace('build','clangdb')) + + old_execute_build(self) + + old_execute_build = getattr(Build.BuildContext, 'execute_build', None) + setattr(Build.BuildContext, 'execute_build', new_execute_build) + EXECUTE_PATCHED = True +patch_execute() diff --git a/waflib/extras/clang_cross.py b/waflib/extras/clang_cross.py new file mode 100644 index 0000000000..1b51e2886c --- /dev/null +++ b/waflib/extras/clang_cross.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Krzysztof KosiÅ„ski 2014 +# DragoonX6 2018 + +""" +Detect the Clang C compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang. +""" + +from waflib.Tools import ccroot, ar, gcc +from waflib.Configure import conf +import waflib.Context +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang:: + $ waf configure --clang-target-triple=x86_64-pc-linux-gnu + """ + cc_compiler_opts = opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--clang-target-triple', default=None, + help='Target triple for clang', + dest='clang_target_triple') + cc_compiler_opts.add_option('--clang-sysroot', default=None, + help='Sysroot for clang', + dest='clang_sysroot') + +@conf +def find_clang(conf): + """ + Finds the program clang and executes it to ensure it really is clang + """ + + import os + + cc = conf.find_program('clang', var='CC') + + if conf.options.clang_target_triple != None: + conf.env.append_value('CC', ['-target', conf.options.clang_target_triple]) + + if conf.options.clang_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clang_sysroot): + sysroot = conf.options.clang_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot)) + + conf.env.append_value('CC', ['--sysroot', sysroot]) + + conf.get_cc_version(cc, clang=True) + conf.env.CC_NAME = 'clang' + +@conf +def clang_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clang_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clang() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gcc_common_flags() + # Allow the user to provide flags for the target platform. + conf.gcc_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/waflib/extras/clang_cross_common.py b/waflib/extras/clang_cross_common.py new file mode 100644 index 0000000000..b76a070065 --- /dev/null +++ b/waflib/extras/clang_cross_common.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# encoding: utf-8 +# DragoonX6 2018 + +""" +Common routines for cross_clang.py and cross_clangxx.py +""" + +from waflib.Configure import conf +import waflib.Context + +def normalize_target_triple(target_triple): + target_triple = target_triple[:-1] + normalized_triple = target_triple.replace('--', '-unknown-') + + if normalized_triple.startswith('-'): + normalized_triple = 'unknown' + normalized_triple + + if normalized_triple.endswith('-'): + normalized_triple += 'unknown' + + # Normalize MinGW builds to *arch*-w64-mingw32 + if normalized_triple.endswith('windows-gnu'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32' + + # Strip the vendor when doing msvc builds, since it's unused anyway. + if normalized_triple.endswith('windows-msvc'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc' + + return normalized_triple.replace('-', '_') + +@conf +def clang_modifier_msvc(conf): + import os + + """ + Really basic setup to use clang in msvc mode. + We actually don't really want to do a lot, even though clang is msvc compatible + in this mode, that doesn't mean we're actually using msvc. + It's probably the best to leave it to the user, we can assume msvc mode if the user + uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend. + """ + v = conf.env + v.cprogram_PATTERN = '%s.exe' + + v.cshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.lib' + v.IMPLIB_ST = '-Wl,-IMPLIB:%s' + v.SHLIB_MARKER = [] + + v.CFLAGS_cshlib = [] + v.LINKFLAGS_cshlib = ['-Wl,-DLL'] + v.cstlib_PATTERN = '%s.lib' + v.STLIB_MARKER = [] + + del(v.AR) + conf.find_program(['llvm-lib', 'lib'], var='AR') + v.ARFLAGS = ['-nologo'] + v.AR_TGT_F = ['-out:'] + + # Default to the linker supplied with llvm instead of link.exe or ld + v.LINK_CC = v.CC + ['-fuse-ld=lld', '-nostdlib'] + v.CCLNK_TGT_F = ['-o'] + v.def_PATTERN = '-Wl,-def:%s' + + v.LINKFLAGS = [] + + v.LIB_ST = '-l%s' + v.LIBPATH_ST = '-Wl,-LIBPATH:%s' + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-Wl,-LIBPATH:%s' + + CFLAGS_CRT_COMMON = [ + '-Xclang', '--dependent-lib=oldnames', + '-Xclang', '-fno-rtti-data', + '-D_MT' + ] + + v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [ + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmt', + ] + v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED + + v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [ + '-D_DEBUG', + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG + + v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-Xclang', '--dependent-lib=msvcrt' + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL + + v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-D_DEBUG', + '-Xclang', '--dependent-lib=msvcrtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG + +@conf +def clang_modifier_target_triple(conf, cpp=False): + compiler = conf.env.CXX if cpp else conf.env.CC + output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT) + + modifier = ('clangxx' if cpp else 'clang') + '_modifier_' + clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None) + if clang_modifier_func: + clang_modifier_func() diff --git a/waflib/extras/clangxx_cross.py b/waflib/extras/clangxx_cross.py new file mode 100644 index 0000000000..0ad38ad46c --- /dev/null +++ b/waflib/extras/clangxx_cross.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy 2009-2018 (ita) +# DragoonX6 2018 + +""" +Detect the Clang++ C++ compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang++. +""" + +from waflib.Tools import ccroot, ar, gxx +from waflib.Configure import conf +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang++:: + $ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu + """ + cxx_compiler_opts = opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--clangxx-target-triple', default=None, + help='Target triple for clang++', + dest='clangxx_target_triple') + cxx_compiler_opts.add_option('--clangxx-sysroot', default=None, + help='Sysroot for clang++', + dest='clangxx_sysroot') + +@conf +def find_clangxx(conf): + """ + Finds the program clang++, and executes it to ensure it really is clang++ + """ + + import os + + cxx = conf.find_program('clang++', var='CXX') + + if conf.options.clangxx_target_triple != None: + conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple]) + + if conf.options.clangxx_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clangxx_sysroot): + sysroot = conf.options.clangxx_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot)) + + conf.env.append_value('CXX', ['--sysroot', sysroot]) + + conf.get_cc_version(cxx, clang=True) + conf.env.CXX_NAME = 'clang' + +@conf +def clangxx_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_msvc(conf): + v = conf.env + v.cxxprogram_PATTERN = v.cprogram_PATTERN + v.cxxshlib_PATTERN = v.cshlib_PATTERN + + v.CXXFLAGS_cxxshlib = [] + v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib + v.cxxstlib_PATTERN = v.cstlib_PATTERN + + v.LINK_CXX = v.CXX + ['-fuse-ld=lld', '-nostdlib'] + v.CXXLNK_TGT_F = v.CCLNK_TGT_F + +@conf +def clangxx_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clangxx_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clangxx() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gxx_common_flags() + # Allow the user to provide flags for the target platform. + conf.gxx_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple(cpp=True) + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/waflib/extras/classic_runner.py b/waflib/extras/classic_runner.py new file mode 100644 index 0000000000..b08c794e88 --- /dev/null +++ b/waflib/extras/classic_runner.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2021 (ita) + +from waflib import Utils, Runner + +""" +Re-enable the classic threading system from waf 1.x + +def configure(conf): + conf.load('classic_runner') +""" + +class TaskConsumer(Utils.threading.Thread): + """ + Task consumers belong to a pool of workers + + They wait for tasks in the queue and then use ``task.process(...)`` + """ + def __init__(self, spawner): + Utils.threading.Thread.__init__(self) + """ + Obtain :py:class:`waflib.Task.TaskBase` instances from this queue. + """ + self.spawner = spawner + self.daemon = True + self.start() + + def run(self): + """ + Loop over the tasks to execute + """ + try: + self.loop() + except Exception: + pass + + def loop(self): + """ + Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call + :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it. + """ + master = self.spawner.master + while 1: + if not master.stop: + try: + tsk = master.ready.get() + if tsk: + tsk.log_display(tsk.generator.bld) + master.process_task(tsk) + else: + break + finally: + master.out.put(tsk) + +class Spawner(object): + """ + Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and + spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each + :py:class:`waflib.Task.Task` instance. + """ + def __init__(self, master): + self.master = master + """:py:class:`waflib.Runner.Parallel` producer instance""" + + self.pool = [TaskConsumer(self) for i in range(master.numjobs)] + +Runner.Spawner = Spawner diff --git a/waflib/extras/codelite.py b/waflib/extras/codelite.py index 56219cfd8a..523302c05c 100644 --- a/waflib/extras/codelite.py +++ b/waflib/extras/codelite.py @@ -244,7 +244,8 @@ def compile_template(line): extr = [] def repl(match): g = match.group - if g('dollar'): return "$" + if g('dollar'): + return "$" elif g('backslash'): return "\\" elif g('subst'): @@ -269,14 +270,14 @@ def app(txt): app("lst.append(%r)" % params[x]) f = extr[x] - if f.startswith('if') or f.startswith('for'): + if f.startswith(('if', 'for')): app(f + ':') indent += 1 elif f.startswith('py:'): app(f[3:]) - elif f.startswith('endif') or f.startswith('endfor'): + elif f.startswith(('endif', 'endfor')): indent -= 1 - elif f.startswith('else') or f.startswith('elif'): + elif f.startswith(('else', 'elif')): indent -= 1 app(f + ':') indent += 1 @@ -302,20 +303,20 @@ def rm_blank_lines(txt): BOM = '\xef\xbb\xbf' try: - BOM = bytes(BOM, 'iso8859-1') # python 3 -except NameError: + BOM = bytes(BOM, 'latin-1') # python 3 +except (TypeError, NameError): pass def stealth_write(self, data, flags='wb'): try: - x = unicode + unicode except NameError: data = data.encode('utf-8') # python 3 else: data = data.decode(sys.getfilesystemencoding(), 'replace') data = data.encode('utf-8') - if self.name.endswith('.project') or self.name.endswith('.project'): + if self.name.endswith('.project'): data = BOM + data try: @@ -325,7 +326,7 @@ def stealth_write(self, data, flags='wb'): except (IOError, ValueError): self.write(data, flags=flags) else: - Logs.debug('codelite: skipping %s' % self.abspath()) + Logs.debug('codelite: skipping %r', self) Node.Node.stealth_write = stealth_write re_quote = re.compile("[^a-zA-Z0-9-]") @@ -470,7 +471,7 @@ def add(x): return lst def write(self): - Logs.debug('codelite: creating %r' % self.path) + Logs.debug('codelite: creating %r', self.path) #print "self.name:",self.name # first write the project file @@ -491,7 +492,7 @@ def get_key(self, node): required for writing the source files """ name = node.name - if name.endswith('.cpp') or name.endswith('.c'): + if name.endswith(('.cpp', '.c')): return 'sourcefile' return 'headerfile' @@ -511,7 +512,7 @@ def collect_properties(self): x.preprocessor_definitions = '' x.includes_search_path = '' - # can specify "deploy_dir" too + # can specify "deploy_dir" too ret.append(x) self.build_properties = ret @@ -588,10 +589,10 @@ def __init__(self, ctx, node, name='project_view'): vsnode_alias.__init__(self, ctx, node, name) self.tg = self.ctx() # fake one, cannot remove self.exclude_files = Node.exclude_regs + ''' -waf-1.8.* -waf3-1.8.*/** -.waf-1.8.* -.waf3-1.8.*/** +waf-2* +waf3-2*/** +.waf-2* +.waf3-2*/** **/*.sdf **/*.suo **/*.ncb @@ -737,22 +738,20 @@ def sortfun(x): return '' return getattr(x, 'path', None) and x.path.abspath() or x.name self.all_projects.sort(key=sortfun) - def write_files(self): - """ Write the project and solution files from the data collected so far. It is unlikely that you will want to change this """ - for p in self.all_projects: + for p in self.all_projects: p.write() # and finally write the solution file node = self.get_solution_node() node.parent.mkdir() - Logs.warn('Creating %r' % node) - #a = dir(self.root) + Logs.warn('Creating %r', node) + #a = dir(self.root) #for b in a: # print b #print self.group_names @@ -874,8 +873,3 @@ def make_parents(proj): p.iter_path = p.tg.path make_parents(p) - - -def options(ctx): - pass - diff --git a/waflib/extras/color_gcc.py b/waflib/extras/color_gcc.py index 71842405ca..b580396c81 100644 --- a/waflib/extras/color_gcc.py +++ b/waflib/extras/color_gcc.py @@ -2,6 +2,9 @@ # encoding: utf-8 # Replaces the default formatter by one which understands GCC output and colorizes it. +# +# This is mostly obsolete as gcc/g++ provide colored outputs by default: +# CFLAGS="-fdiagnostics-color=always" CXXFLAGS="-fdiagnostics-color=always" waf configure clean build __author__ = __maintainer__ = "Jérôme Carretero " __copyright__ = "Jérôme Carretero, 2012" @@ -18,8 +21,8 @@ def format(self, rec): while frame: func = frame.f_code.co_name if func == 'exec_command': - cmd = frame.f_locals['cmd'] - if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]): + cmd = frame.f_locals.get('cmd') + if isinstance(cmd, list) and (len(cmd) > 0) and ('gcc' in cmd[0] or 'g++' in cmd[0]): lines = [] for line in rec.msg.splitlines(): if 'warning: ' in line: @@ -36,4 +39,3 @@ def format(self, rec): def options(opt): Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors)) - diff --git a/waflib/extras/color_msvc.py b/waflib/extras/color_msvc.py new file mode 100644 index 0000000000..3ea9ee635a --- /dev/null +++ b/waflib/extras/color_msvc.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Replaces the default formatter by one which understands MSVC output and colorizes it. +# Modified from color_gcc.py + +__author__ = __maintainer__ = "Alibek Omarov " +__copyright__ = "Alibek Omarov, 2019" + +import sys +from waflib import Logs + +class ColorMSVCFormatter(Logs.formatter): + def __init__(self, colors): + self.colors = colors + Logs.formatter.__init__(self) + + def parseMessage(self, line, color): + # Split messaage from 'disk:filepath: type: message' + arr = line.split(':', 3) + if len(arr) < 4: + return line + + colored = self.colors.BOLD + arr[0] + ':' + arr[1] + ':' + self.colors.NORMAL + colored += color + arr[2] + ':' + self.colors.NORMAL + colored += arr[3] + return colored + + def format(self, rec): + frame = sys._getframe() + while frame: + func = frame.f_code.co_name + if func == 'exec_command': + cmd = frame.f_locals.get('cmd') + if isinstance(cmd, list): + # Fix file case, it may be CL.EXE or cl.exe + argv0 = cmd[0].lower() + if 'cl.exe' in argv0: + lines = [] + # This will not work with "localized" versions + # of MSVC + for line in rec.msg.splitlines(): + if ': warning ' in line: + lines.append(self.parseMessage(line, self.colors.YELLOW)) + elif ': error ' in line: + lines.append(self.parseMessage(line, self.colors.RED)) + elif ': fatal error ' in line: + lines.append(self.parseMessage(line, self.colors.RED + self.colors.BOLD)) + elif ': note: ' in line: + lines.append(self.parseMessage(line, self.colors.CYAN)) + else: + lines.append(line) + rec.msg = "\n".join(lines) + frame = frame.f_back + return Logs.formatter.format(self, rec) + +def options(opt): + Logs.log.handlers[0].setFormatter(ColorMSVCFormatter(Logs.colors)) + diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py index e51dcfc86b..0e74df8506 100644 --- a/waflib/extras/compat15.py +++ b/waflib/extras/compat15.py @@ -24,6 +24,9 @@ sys.modules['Runner'] = Runner sys.modules['TaskGen'] = TaskGen sys.modules['Utils'] = Utils +sys.modules['Constants'] = Context +Context.SRCDIR = '' +Context.BLDDIR = '' from waflib.Tools import c_preproc sys.modules['preproc'] = c_preproc @@ -34,6 +37,8 @@ ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive ConfigSet.ConfigSet.set_variant = Utils.nada +Utils.pproc = Utils.subprocess + Build.BuildContext.add_subdirs = Build.BuildContext.recurse Build.BuildContext.new_task_gen = Build.BuildContext.__call__ Build.BuildContext.is_install = 0 @@ -102,7 +107,7 @@ def retrieve(self, name, fromenv=None): self.all_envs[name] = env else: if fromenv: - Logs.warn("The environment %s may have been configured already" % name) + Logs.warn('The environment %s may have been configured already', name) return env Configure.ConfigurationContext.retrieve = retrieve @@ -117,7 +122,7 @@ def retrieve(self, name, fromenv=None): Options.Handler = Options.OptionsContext Task.simple_task_type = Task.task_type_from_func = Task.task_factory -Task.TaskBase.classes = Task.classes +Task.Task.classes = Task.classes def setitem(self, key, value): if key.startswith('CCFLAGS'): @@ -150,22 +155,34 @@ def get_curdir(self): return self.path.abspath() Context.Context.curdir = property(get_curdir, Utils.nada) +def get_srcdir(self): + return self.srcnode.abspath() +Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada) + +def get_blddir(self): + return self.bldnode.abspath() +Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada) + +Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg +Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg rev = Context.load_module def load_module(path, encoding=None): ret = rev(path, encoding) if 'set_options' in ret.__dict__: if Logs.verbose: - Logs.warn('compat: rename "set_options" to "options" (%r)' % path) + Logs.warn('compat: rename "set_options" to "options" (%r)', path) ret.options = ret.set_options if 'srcdir' in ret.__dict__: if Logs.verbose: - Logs.warn('compat: rename "srcdir" to "top" (%r)' % path) + Logs.warn('compat: rename "srcdir" to "top" (%r)', path) ret.top = ret.srcdir if 'blddir' in ret.__dict__: if Logs.verbose: - Logs.warn('compat: rename "blddir" to "out" (%r)' % path) + Logs.warn('compat: rename "blddir" to "out" (%r)', path) ret.out = ret.blddir + Utils.g_module = Context.g_module + Options.launch_dir = Context.launch_dir return ret Context.load_module = load_module @@ -212,8 +229,8 @@ def apply_uselib_local(self): self.includes = self.to_list(getattr(self, 'includes', [])) names = self.to_list(getattr(self, 'uselib_local', [])) get = self.bld.get_tgen_by_name - seen = set([]) - seen_uselib = set([]) + seen = set() + seen_uselib = set() tmp = Utils.deque(names) # consume a copy of the list of names if tmp: if Logs.verbose: @@ -299,10 +316,12 @@ def apply_objdeps(self): lst = y.to_list(y.add_objects) lst.reverse() for u in lst: - if u in seen: continue + if u in seen: + continue added = 1 names = [u]+names - if added: continue # list of names modified, loop + if added: + continue # list of names modified, loop # safe to process the current object y.post() @@ -324,24 +343,26 @@ def add_obj_file(self, file): """Small example on how to link object files as if they were source obj = bld.create_obj('cc') obj.add_obj_file('foo.o')""" - if not hasattr(self, 'obj_files'): self.obj_files = [] - if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files') + if not hasattr(self, 'obj_files'): + self.obj_files = [] + if not 'process_obj_files' in self.meths: + self.meths.append('process_obj_files') self.obj_files.append(file) old_define = Configure.ConfigurationContext.__dict__['define'] @Configure.conf -def define(self, key, val, quote=True): - old_define(self, key, val, quote) +def define(self, key, val, quote=True, comment=''): + old_define(self, key, val, quote, comment) if key.startswith('HAVE_'): self.env[key] = 1 old_undefine = Configure.ConfigurationContext.__dict__['undefine'] @Configure.conf -def undefine(self, key): - old_undefine(self, key) +def undefine(self, key, comment=''): + old_undefine(self, key, comment) if key.startswith('HAVE_'): self.env[key] = 0 @@ -358,13 +379,28 @@ def install_dir(self, path): destpath = Utils.subst_vars(path, self.env) if self.is_install > 0: - Logs.info('* creating %s' % destpath) + Logs.info('* creating %s', destpath) Utils.check_dir(destpath) elif self.is_install < 0: - Logs.info('* removing %s' % destpath) + Logs.info('* removing %s', destpath) try: os.remove(destpath) except OSError: pass Build.BuildContext.install_dir = install_dir +# before/after names +repl = {'apply_core': 'process_source', + 'apply_lib_vars': 'process_source', + 'apply_obj_vars': 'propagate_uselib_vars', + 'exec_rule': 'process_rule' +} +def after(*k): + k = [repl.get(key, key) for key in k] + return TaskGen.after_method(*k) + +def before(*k): + k = [repl.get(key, key) for key in k] + return TaskGen.before_method(*k) +TaskGen.before = before + diff --git a/waflib/extras/cppcheck.py b/waflib/extras/cppcheck.py index c60f9fab30..13ff42477f 100644 --- a/waflib/extras/cppcheck.py +++ b/waflib/extras/cppcheck.py @@ -5,12 +5,12 @@ """ Tool Description ================ -This module provides a waf wrapper (i.e. waftool) around the C/C++ source code +This module provides a waf wrapper (i.e. waftool) around the C/C++ source code checking tool 'cppcheck'. See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool -itself. -Note that many linux distributions already provide a ready to install version +itself. +Note that many linux distributions already provide a ready to install version of cppcheck. On fedora, for instance, it can be installed using yum: 'sudo yum install cppcheck' @@ -18,7 +18,7 @@ Usage ===== -In order to use this waftool simply add it to the 'options' and 'configure' +In order to use this waftool simply add it to the 'options' and 'configure' functions of your main waf script as shown in the example below: def options(opt): @@ -26,12 +26,12 @@ def options(opt): def configure(conf): conf.load('cppcheck') - -Note that example shown above assumes that the cppcheck waftool is located in + +Note that example shown above assumes that the cppcheck waftool is located in the sub directory named 'waftools'. -When configured as shown in the example above, cppcheck will automatically -perform a source code analysis on all C/C++ build tasks that have been +When configured as shown in the example above, cppcheck will automatically +perform a source code analysis on all C/C++ build tasks that have been defined in your waf build system. The example shown below for a C program will be used as input for cppcheck when @@ -40,16 +40,21 @@ def configure(conf): def build(bld): bld.program(name='foo', src='foobar.c') -The result of the source code analysis will be stored both as xml and html -files in the build location for the task. Should any error be detected by +The result of the source code analysis will be stored both as xml and html +files in the build location for the task. Should any error be detected by cppcheck the build will be aborted and a link to the html report will be shown. +By default, one index.html file is created for each task generator. A global +index.html file can be obtained by setting the following variable +in the configuration section: + + conf.env.CPPCHECK_SINGLE_HTML = False -When needed source code checking by cppcheck can be disabled per task, per -detected error or warning for a particular task. It can be also be disabled for +When needed source code checking by cppcheck can be disabled per task, per +detected error or warning for a particular task. It can be also be disabled for all tasks. In order to exclude a task from source code checking add the skip option to the -task as shown below: +task as shown below: def build(bld): bld.program( @@ -58,8 +63,8 @@ def build(bld): cppcheck_skip=True ) -When needed problems detected by cppcheck may be suppressed using a file -containing a list of suppression rules. The relative or absolute path to this +When needed problems detected by cppcheck may be suppressed using a file +containing a list of suppression rules. The relative or absolute path to this file can be added to the build task as shown in the example below: bld.program( @@ -68,27 +73,27 @@ def build(bld): cppcheck_suppress='bar.suppress' ) -A cppcheck suppress file should contain one suppress rule per line. Each of +A cppcheck suppress file should contain one suppress rule per line. Each of these rules will be passed as an '--suppress=' argument to cppcheck. Dependencies ================ -This waftool depends on the python pygments module, it is used for source code -syntax highlighting when creating the html reports. see http://pygments.org/ for +This waftool depends on the python pygments module, it is used for source code +syntax highlighting when creating the html reports. see http://pygments.org/ for more information on this package. Remarks ================ -The generation of the html report is originally based on the cppcheck-htmlreport.py +The generation of the html report is originally based on the cppcheck-htmlreport.py script that comes shipped with the cppcheck tool. """ -import os, sys +import sys import xml.etree.ElementTree as ElementTree -from waflib import Task, TaskGen, Logs, Context +from waflib import Task, TaskGen, Logs, Context, Options PYGMENTS_EXC_MSG= ''' -The required module 'pygments' could not be found. Please install it using your +The required module 'pygments' could not be found. Please install it using your platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install', see 'http://pygments.org/download/' for installation instructions. ''' @@ -98,42 +103,45 @@ def build(bld): from pygments import formatters, lexers except ImportError as e: Logs.warn(PYGMENTS_EXC_MSG) - raise e + raise e def options(opt): - opt.add_option('--cppcheck-skip', dest='cppcheck_skip', - default=False, action='store_true', + opt.add_option('--cppcheck-skip', dest='cppcheck_skip', + default=False, action='store_true', help='do not check C/C++ sources (default=False)') - opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume', - default=False, action='store_true', + opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume', + default=False, action='store_true', help='continue in case of errors (default=False)') - opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable', + opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable', default='warning,performance,portability,style,unusedFunction', action='store', help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)") - opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable', - default='warning,performance,portability,style', action='store', + opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable', + default='warning,performance,portability,style', action='store', help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)") opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c', - default='c99', action='store', + default='c99', action='store', help='cppcheck standard to use when checking C (default=c99)') opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx', - default='c++03', action='store', + default='c++03', action='store', help='cppcheck standard to use when checking C++ (default=c++03)') - opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config', - default=False, action='store_true', + opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config', + default=False, action='store_true', help='forced check for missing buildin include files, e.g. stdio.h (default=False)') opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs', - default='20', action='store', + default='20', action='store', help='maximum preprocessor (--max-configs) define iterations (default=20)') + opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs', + default='1', action='store', + help='number of jobs (-j) to do the checking work (default=1)') def configure(conf): if conf.options.cppcheck_skip: @@ -143,20 +151,27 @@ def configure(conf): conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable + conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs + if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable): + Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically') conf.find_program('cppcheck', var='CPPCHECK') + # set to True to get a single index.html file + conf.env.CPPCHECK_SINGLE_HTML = False @TaskGen.feature('c') @TaskGen.feature('cxx') def cppcheck_execute(self): - if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip: + if hasattr(self.bld, 'conf'): + return + if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip: return if getattr(self, 'cppcheck_skip', False): return task = self.create_task('cppcheck') task.cmd = _tgen_create_cmd(self) task.fatal = [] - if not self.bld.options.cppcheck_err_resume: + if not Options.options.cppcheck_err_resume: task.fatal.append('error') @@ -167,10 +182,12 @@ def _tgen_create_cmd(self): max_configs = self.env.CPPCHECK_MAX_CONFIGS bin_enable = self.env.CPPCHECK_BIN_ENABLE lib_enable = self.env.CPPCHECK_LIB_ENABLE + jobs = self.env.CPPCHECK_JOBS - cmd = '%s' % self.env.CPPCHECK + cmd = self.env.CPPCHECK args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2'] args.append('--max-configs=%s' % max_configs) + args.append('-j %s' % jobs) if 'cxx' in features: args.append('--language=c++') @@ -179,7 +196,7 @@ def _tgen_create_cmd(self): args.append('--language=c') args.append('--std=%s' % std_c) - if self.bld.options.cppcheck_check_config: + if Options.options.cppcheck_check_config: args.append('--check-config') if set(['cprogram','cxxprogram']) & set(features): @@ -188,12 +205,18 @@ def _tgen_create_cmd(self): args.append('--enable=%s' % lib_enable) for src in self.to_list(getattr(self, 'source', [])): - args.append('%r' % src) + if not isinstance(src, str): + src = repr(src) + args.append(src) for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))): - args.append('-I%r' % inc) + if not isinstance(inc, str): + inc = repr(inc) + args.append('-I%s' % inc) for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)): - args.append('-I%r' % inc) - return '%s %s' % (cmd, ' '.join(args)) + if not isinstance(inc, str): + inc = repr(inc) + args.append('-I%s' % inc) + return cmd + args class cppcheck(Task.Task): @@ -215,8 +238,11 @@ def _save_xml_report(self, s): root = ElementTree.fromstring(s) cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd') cmd.text = str(self.cmd) - body = ElementTree.tostring(root) - node = self.generator.path.get_bld().find_or_declare('cppcheck.xml') + body = ElementTree.tostring(root).decode('us-ascii') + body_html_name = 'cppcheck-%s.xml' % self.generator.get_name() + if self.env.CPPCHECK_SINGLE_HTML: + body_html_name = 'cppcheck.xml' + node = self.generator.path.get_bld().find_or_declare(body_html_name) node.write(header + body) def _get_defects(self, xml_string): @@ -244,21 +270,24 @@ def _create_html_report(self, defects): def _create_html_files(self, defects): sources = {} - defects = [defect for defect in defects if defect.has_key('file')] + defects = [defect for defect in defects if 'file' in defect] for defect in defects: name = defect['file'] - if not sources.has_key(name): + if not name in sources: sources[name] = [defect] else: sources[name].append(defect) - + files = {} css_style_defs = None bpath = self.generator.path.get_bld().abspath() - names = sources.keys() + names = list(sources.keys()) for i in range(0,len(names)): name = names[i] - htmlfile = 'cppcheck/%i.html' % (i) + if self.env.CPPCHECK_SINGLE_HTML: + htmlfile = 'cppcheck/%i.html' % (i) + else: + htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i) errors = sources[name] files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors } css_style_defs = self._create_html_file(name, htmlfile, errors) @@ -279,19 +308,25 @@ def _create_html_file(self, sourcefile, htmlfile, errors): if div.get('id') == 'header': h1 = div.find('h1') h1.text = 'cppcheck report - %s' % name + if div.get('id') == 'menu': + indexlink = div.find('a') + if self.env.CPPCHECK_SINGLE_HTML: + indexlink.attrib['href'] = 'index.html' + else: + indexlink.attrib['href'] = 'index-%s.html' % name if div.get('id') == 'content': content = div srcnode = self.generator.bld.root.find_node(sourcefile) - hl_lines = [e['line'] for e in errors if e.has_key('line')] + hl_lines = [e['line'] for e in errors if 'line' in e] formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line') - formatter.errors = [e for e in errors if e.has_key('line')] + formatter.errors = [e for e in errors if 'line' in e] css_style_defs = formatter.get_style_defs('.highlight') lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "") s = pygments.highlight(srcnode.read(), lexer, formatter) table = ElementTree.fromstring(s) content.append(table) - s = ElementTree.tostring(root, method='html') + s = ElementTree.tostring(root, method='html').decode('us-ascii') s = CCPCHECK_HTML_TYPE + s node = self.generator.path.get_bld().find_or_declare(htmlfile) node.write(s) @@ -315,10 +350,19 @@ def _create_html_index(self, files): if div.get('id') == 'content': content = div self._create_html_table(content, files) + if div.get('id') == 'menu': + indexlink = div.find('a') + if self.env.CPPCHECK_SINGLE_HTML: + indexlink.attrib['href'] = 'index.html' + else: + indexlink.attrib['href'] = 'index-%s.html' % name - s = ElementTree.tostring(root, method='html') + s = ElementTree.tostring(root, method='html').decode('us-ascii') s = CCPCHECK_HTML_TYPE + s - node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html') + index_html_name = 'cppcheck/index-%s.html' % name + if self.env.CPPCHECK_SINGLE_HTML: + index_html_name = 'cppcheck/index.html' + node = self.generator.path.get_bld().find_or_declare(index_html_name) node.write(s) return node @@ -330,9 +374,9 @@ def _create_html_table(self, content, files): row = ElementTree.fromstring(s) table.append(row) - errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint) + errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint) for e in errors: - if not e.has_key('line'): + if not 'line' in e: s = '\n' % (e['id'], e['severity'], e['msg']) else: attr = '' @@ -352,7 +396,7 @@ def _create_css_file(self, css_style_defs): node.write(css) def _errors_evaluate(self, errors, http_index): - name = self.generator.get_name() + name = self.generator.get_name() fatal = self.fatal severity = [err['severity'] for err in errors] problems = [err for err in errors if err['severity'] != 'information'] @@ -382,7 +426,7 @@ def wrap(self, source, outfile): for error in self.errors: if int(error['line']) == line_no: t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg']) - line_no = line_no + 1 + line_no += 1 yield i, t @@ -418,7 +462,7 @@ def wrap(self, source, outfile):   -   +   diff --git a/waflib/extras/cpplint.py b/waflib/extras/cpplint.py index bbeb53640b..afc09c9472 100644 --- a/waflib/extras/cpplint.py +++ b/waflib/extras/cpplint.py @@ -8,19 +8,10 @@ This is an extra tool, not bundled with the default waf binary. To add the cpplint tool to the waf file: $ ./waf-light --tools=compat15,cpplint - or, if you have waf >= 1.6.2 -$ ./waf update --files=cpplint this tool also requires cpplint for python. If you have PIP, you can install it like this: pip install cpplint -But I'd recommend getting the latest version from the SVN, -the PIP version is outdated. -https://code.google.com/p/google-styleguide/source/browse/trunk/cpplint/cpplint.py -Apply this patch if you want to run it with Python 3: -https://code.google.com/p/google-styleguide/issues/detail?id=19 - - When using this tool, the wscript will look like: def options(opt): @@ -44,65 +35,59 @@ def build(bld): bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp')) ''' +from __future__ import absolute_import import sys, re import logging -import threading -from waflib import Task, Build, TaskGen, Logs, Utils -try: - from cpplint.cpplint import ProcessFile, _cpplint_state -except ImportError: - pass +from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils critical_errors = 0 CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n' -RE_EMACS = re.compile('(?P.*):(?P\d+): (?P.*) \[(?P.*)\] \[(?P\d+)\]'); +RE_EMACS = re.compile(r'(?P.*):(?P\d+): (?P.*) \[(?P.*)\] \[(?P\d+)\]') CPPLINT_RE = { 'waf': RE_EMACS, 'emacs': RE_EMACS, - 'vs7': re.compile('(?P.*)\((?P\d+)\): (?P.*) \[(?P.*)\] \[(?P\d+)\]'), - 'eclipse': re.compile('(?P.*):(?P\d+): warning: (?P.*) \[(?P.*)\] \[(?P\d+)\]'), + 'vs7': re.compile(r'(?P.*)\((?P\d+)\): (?P.*) \[(?P.*)\] \[(?P\d+)\]'), + 'eclipse': re.compile(r'(?P.*):(?P\d+): warning: (?P.*) \[(?P.*)\] \[(?P\d+)\]'), } - - - -def init_env_from_options(env): - from waflib.Options import options - for key, value in options.__dict__.items(): - if not key.startswith('CPPLINT_') or env[key]: - continue - env[key] = value - if env.CPPLINT_OUTPUT != 'waf': - _cpplint_state.output_format = env.CPPLINT_OUTPUT +CPPLINT_STR = ('${CPPLINT} ' + '--verbose=${CPPLINT_LEVEL} ' + '--output=${CPPLINT_OUTPUT} ' + '--filter=${CPPLINT_FILTERS} ' + '--root=${CPPLINT_ROOT} ' + '--linelength=${CPPLINT_LINE_LENGTH} ') def options(opt): opt.add_option('--cpplint-filters', type='string', default='', dest='CPPLINT_FILTERS', help='add filters to cpplint') + opt.add_option('--cpplint-length', type='int', + default=80, dest='CPPLINT_LINE_LENGTH', + help='specify the line length (default: 80)') opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL', help='specify the log level (default: 1)') opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK', help='break the build if error >= level (default: 5)') + opt.add_option('--cpplint-root', type='string', + default='', dest='CPPLINT_ROOT', + help='root directory used to derive header guard') opt.add_option('--cpplint-skip', action='store_true', default=False, dest='CPPLINT_SKIP', help='skip cpplint during build') opt.add_option('--cpplint-output', type='string', default='waf', dest='CPPLINT_OUTPUT', - help='select output format (waf, emacs, vs7)') + help='select output format (waf, emacs, vs7, eclipse)') def configure(conf): - conf.start_msg('Checking cpplint') try: - import cpplint - conf.end_msg('ok') - except ImportError: + conf.find_program('cpplint', var='CPPLINT') + except Errors.ConfigurationError: conf.env.CPPLINT_SKIP = True - conf.end_msg('not found, skipping it.') -class cpplint_formatter(Logs.formatter): +class cpplint_formatter(Logs.formatter, object): def __init__(self, fmt): logging.Formatter.__init__(self, CPPLINT_FORMAT) self.fmt = fmt @@ -116,7 +101,7 @@ def format(self, rec): return super(cpplint_formatter, self).format(rec) -class cpplint_handler(Logs.log_handler): +class cpplint_handler(Logs.log_handler, object): def __init__(self, stream=sys.stderr, **kw): super(cpplint_handler, self).__init__(stream, **kw) self.stream = stream @@ -128,34 +113,22 @@ def emit(self, rec): class cpplint_wrapper(object): - stream = None - tasks_count = 0 - lock = threading.RLock() - def __init__(self, logger, threshold, fmt): self.logger = logger self.threshold = threshold - self.error_count = 0 self.fmt = fmt def __enter__(self): - with cpplint_wrapper.lock: - cpplint_wrapper.tasks_count += 1 - if cpplint_wrapper.tasks_count == 1: - sys.stderr.flush() - cpplint_wrapper.stream = sys.stderr - sys.stderr = self - return self + return self def __exit__(self, exc_type, exc_value, traceback): - with cpplint_wrapper.lock: - cpplint_wrapper.tasks_count -= 1 - if cpplint_wrapper.tasks_count == 0: - sys.stderr = cpplint_wrapper.stream - sys.stderr.flush() - - def isatty(self): - return True + if isinstance(exc_value, Utils.subprocess.CalledProcessError): + messages = [m for m in exc_value.output.splitlines() + if 'Done processing' not in m + and 'Total errors found' not in m] + for message in messages: + self.write(message) + return True def write(self, message): global critical_errors @@ -194,31 +167,43 @@ def __init__(self, *k, **kw): def run(self): global critical_errors - _cpplint_state.SetFilters(self.env.CPPLINT_FILTERS) - break_level = self.env.CPPLINT_BREAK - verbosity = self.env.CPPLINT_LEVEL - with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), - break_level, self.env.CPPLINT_OUTPUT): - ProcessFile(self.inputs[0].abspath(), verbosity) + with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT): + params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key} + if params['CPPLINT_OUTPUT'] == 'waf': + params['CPPLINT_OUTPUT'] = 'emacs' + params['CPPLINT'] = self.env.get_flat('CPPLINT') + cmd = Utils.subst_vars(CPPLINT_STR, params) + env = self.env.env or None + Utils.subprocess.check_output(cmd + self.inputs[0].abspath(), + stderr=Utils.subprocess.STDOUT, + env=env, shell=True) return critical_errors - @TaskGen.extension('.h', '.hh', '.hpp', '.hxx') def cpplint_includes(self, node): pass @TaskGen.feature('cpplint') @TaskGen.before_method('process_source') -def run_cpplint(self): +def post_cpplint(self): if not self.env.CPPLINT_INITIALIZED: + for key, value in Options.options.__dict__.items(): + if not key.startswith('CPPLINT_') or self.env[key]: + continue + self.env[key] = value self.env.CPPLINT_INITIALIZED = True - init_env_from_options(self.env) + if self.env.CPPLINT_SKIP: return + if not self.env.CPPLINT_OUTPUT in CPPLINT_RE: return + for src in self.to_list(getattr(self, 'source', [])): - if isinstance(src, str): - self.create_task('cpplint', self.path.find_or_declare(src)) + if isinstance(src, Node.Node): + node = src else: - self.create_task('cpplint', src) + node = self.path.find_or_declare(src) + if not node: + self.bld.fatal('Could not find %r' % src) + self.create_task('cpplint', node) diff --git a/waflib/extras/cross_gnu.py b/waflib/extras/cross_gnu.py new file mode 100644 index 0000000000..309f53b034 --- /dev/null +++ b/waflib/extras/cross_gnu.py @@ -0,0 +1,227 @@ +#!/usr/bin/python +# -*- coding: utf-8 vi:ts=4:noexpandtab +# Tool to provide dedicated variables for cross-compilation + +__author__ = __maintainer__ = "Jérôme Carretero " +__copyright__ = "Jérôme Carretero, 2014" + +""" +This tool allows to use environment variables to define cross-compilation +variables intended for build variants. + +The variables are obtained from the environment in 3 ways: + +1. By defining CHOST, they can be derived as ${CHOST}-${TOOL} +2. By defining HOST_x +3. By defining ${CHOST//-/_}_x + +else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``. + +Usage: + +- In your build script:: + + def configure(cfg): + ... + for variant in x_variants: + setenv(variant) + conf.load('cross_gnu') + conf.xcheck_host_var('POUET') + ... + + +- Then:: + + CHOST=arm-hardfloat-linux-gnueabi waf configure + env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure + CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure + HOST_CC="clang -..." waf configure + +This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH): + +.. code:: python + + from waflib import Configure + + #from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971 + import waf_variants + + variants='pc fw/variant1 fw/variant2'.split() + + top = "." + out = "../build" + + PIC = '33FJ128GP804' #dsPICxxx + + @Configure.conf + def gcc_modifier_xc16(cfg): + v = cfg.env + v.cprogram_PATTERN = '%s.elf' + v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld', + '--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections', + '--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem']) + v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1', + '-msfr-warn=off','-mno-override-inline','-finline','-Winline'] + + def configure(cfg): + if 'fw' in cfg.variant: #firmware + cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too + cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16'] + ... + else: #configure for pc SW + ... + + def build(bld): + if 'fw' in bld.variant: #firmware + bld.program(source='maintst.c', target='maintst'); + bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf") + else: #build for pc SW + ... + +""" + +import os +from waflib import Utils, Configure +from waflib.Tools import ccroot, gcc + +try: + from shlex import quote +except ImportError: + from pipes import quote + +def get_chost_stuff(conf): + """ + Get the CHOST environment variable contents + """ + chost = None + chost_envar = None + if conf.env.CHOST: + chost = conf.env.CHOST[0] + chost_envar = chost.replace('-', '_') + return chost, chost_envar + + +@Configure.conf +def xcheck_var(conf, name, wafname=None, cross=False): + wafname = wafname or name + + if wafname in conf.env: + value = conf.env[wafname] + if isinstance(value, str): + value = [value] + else: + envar = os.environ.get(name) + if not envar: + return + value = Utils.to_list(envar) if envar != '' else [envar] + + conf.env[wafname] = value + if cross: + pretty = 'cross-compilation %s' % wafname + else: + pretty = wafname + conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value)) + +@Configure.conf +def xcheck_host_prog(conf, name, tool, wafname=None): + wafname = wafname or name + + chost, chost_envar = get_chost_stuff(conf) + + specific = None + if chost: + specific = os.environ.get('%s_%s' % (chost_envar, name)) + + if specific: + value = Utils.to_list(specific) + conf.env[wafname] += value + conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name), + " ".join(quote(x) for x in value)) + return + else: + envar = os.environ.get('HOST_%s' % name) + if envar is not None: + value = Utils.to_list(envar) + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name), + " ".join(quote(x) for x in value)) + return + + if conf.env[wafname]: + return + + value = None + if chost: + value = '%s-%s' % (chost, tool) + + if value: + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from CHOST' % wafname, value) + +@Configure.conf +def xcheck_host_envar(conf, name, wafname=None): + wafname = wafname or name + + chost, chost_envar = get_chost_stuff(conf) + + specific = None + if chost: + specific = os.environ.get('%s_%s' % (chost_envar, name)) + + if specific: + value = Utils.to_list(specific) + conf.env[wafname] += value + conf.msg('Will use cross-compilation %s from %s_%s' \ + % (name, chost_envar, name), + " ".join(quote(x) for x in value)) + return + + + envar = os.environ.get('HOST_%s' % name) + if envar is None: + return + + value = Utils.to_list(envar) if envar != '' else [envar] + + conf.env[wafname] = value + conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name), + " ".join(quote(x) for x in value)) + + +@Configure.conf +def xcheck_host(conf): + conf.xcheck_var('CHOST', cross=True) + conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS] + conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_') + conf.xcheck_host_prog('CC', 'gcc') + conf.xcheck_host_prog('CXX', 'g++') + conf.xcheck_host_prog('LINK_CC', 'gcc') + conf.xcheck_host_prog('LINK_CXX', 'g++') + conf.xcheck_host_prog('AR', 'ar') + conf.xcheck_host_prog('AS', 'as') + conf.xcheck_host_prog('LD', 'ld') + conf.xcheck_host_envar('CFLAGS') + conf.xcheck_host_envar('CXXFLAGS') + conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS') + conf.xcheck_host_envar('LIB') + conf.xcheck_host_envar('PKG_CONFIG_LIBDIR') + conf.xcheck_host_envar('PKG_CONFIG_PATH') + + if not conf.env.env: + conf.env.env = {} + conf.env.env.update(os.environ) + if conf.env.PKG_CONFIG_LIBDIR: + conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0] + if conf.env.PKG_CONFIG_PATH: + conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0] + +def configure(conf): + """ + Configuration example for gcc, it will not work for g++/clang/clang++ + """ + conf.xcheck_host() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/waflib/extras/cython.py b/waflib/extras/cython.py index 7ba7d11ba9..591c274d95 100644 --- a/waflib/extras/cython.py +++ b/waflib/extras/cython.py @@ -2,17 +2,15 @@ # encoding: utf-8 # Thomas Nagy, 2010-2015 -import os,re - -import waflib -import waflib.Logs as _msg +import re from waflib import Task, Logs -from waflib.TaskGen import extension, feature, before_method, after_method +from waflib.TaskGen import extension cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*') re_cyt = re.compile(r""" - (?:from\s+(\w+)\s+)? # optionally match "from foo" and capture foo - c?import\s(\w+|[*]) # require "import bar" and capture bar + ^\s* # must begin with some whitespace characters + (?:from\s+(\w+)(?:\.\w+)*\s+)? # optionally match "from foo(.baz)" and capture foo + c?import\s(\w+|[*]) # require "import bar" and capture bar """, re.M | re.VERBOSE) @extension('.pyx') @@ -71,9 +69,9 @@ def runnable_status(self): def post_run(self): for x in self.outputs: if x.name.endswith('.h'): - if not os.path.exists(x.abspath()): + if not x.exists(): if Logs.verbose: - Logs.warn('Expected %r' % x.abspath()) + Logs.warn('Expected %r', x.abspath()) x.write('') return Task.Task.post_run(self) @@ -88,21 +86,21 @@ def scan(self): node = self.inputs[0] txt = node.read() - mods = [] + mods = set() for m in re_cyt.finditer(txt): if m.group(1): # matches "from foo import bar" - mods.append(m.group(1)) + mods.add(m.group(1)) else: - mods.append(m.group(2)) + mods.add(m.group(2)) - _msg.debug("cython: mods %r" % mods) + Logs.debug('cython: mods %r', mods) incs = getattr(self.generator, 'cython_includes', []) incs = [self.generator.path.find_dir(x) for x in incs] incs.append(node.parent) found = [] missing = [] - for x in mods: + for x in sorted(mods): for y in incs: k = y.find_resource(x + '.pxd') if k: @@ -116,7 +114,7 @@ def scan(self): if implicit: found.append(implicit) - _msg.debug("cython: found %r" % found) + Logs.debug('cython: found %r', found) # Now the .h created - store them in bld.raw_deps for later use has_api = False @@ -144,6 +142,6 @@ def configure(ctx): if not ctx.env.PYTHON: ctx.fatal('Load the python tool first!') ctx.find_program('cython', var='CYTHON') - if ctx.options.cython_flags: + if hasattr(ctx.options, 'cython_flags'): ctx.env.CYTHONFLAGS = ctx.options.cython_flags diff --git a/waflib/extras/dcc.py b/waflib/extras/dcc.py index b0bd256814..c1a57c04d9 100644 --- a/waflib/extras/dcc.py +++ b/waflib/extras/dcc.py @@ -2,30 +2,29 @@ # encoding: utf-8 # Jérôme Carretero, 2011 (zougloub) -from waflib import Configure, Options, Utils +from waflib import Options from waflib.Tools import ccroot from waflib.Configure import conf @conf def find_dcc(conf): - cc = conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', "")) + conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', "")) conf.env.CC_NAME = 'dcc' @conf def find_dld(conf): - ld = conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', "")) + conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', "")) conf.env.LINK_CC_NAME = 'dld' @conf def find_dar(conf): - ar = conf.find_program(['dar'], var='DAR', path_list=getattr(Options.options, 'diabbindir', "")) - conf.env.AR = ar + conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', "")) conf.env.AR_NAME = 'dar' conf.env.ARFLAGS = 'rcs' @conf def find_ddump(conf): - prg = conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', "")) + conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', "")) @conf def dcc_common_flags(conf): @@ -34,7 +33,8 @@ def dcc_common_flags(conf): v['CC_TGT_F'] = ['-c', '-o'] # linker - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] + if not v['LINK_CC']: + v['LINK_CC'] = v['CC'] v['CCLNK_SRC_F'] = [] v['CCLNK_TGT_F'] = ['-o'] v['CPPPATH_ST'] = '-I%s' diff --git a/waflib/extras/distnet.py b/waflib/extras/distnet.py index d18c186b6a..8084b156b1 100644 --- a/waflib/extras/distnet.py +++ b/waflib/extras/distnet.py @@ -44,7 +44,7 @@ def safe_urlencode(data): TIMEOUT = 60 REQUIRES = 'requires.txt' -re_com = re.compile('\s*#.*', re.M) +re_com = re.compile(r'\s*#.*', re.M) def total_version_order(num): lst = num.split('.') @@ -101,16 +101,18 @@ def make_tarfile(self, filename, files, **kw): tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = 'root' tarinfo.size = os.stat(x).st_size + if os.environ.get('SOURCE_DATE_EPOCH'): + tarinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH')) # TODO - more archive creation options? if kw.get('bare', True): tarinfo.name = os.path.split(x)[1] else: tarinfo.name = endname + x # todo, if tuple, then.. - Logs.debug("adding %r to %s" % (tarinfo.name, filename)) + Logs.debug('distnet: adding %r to %s', tarinfo.name, filename) with open(x, 'rb') as f: tar.addfile(tarinfo, f) - Logs.info('Created %s' % filename) + Logs.info('Created %s', filename) class publish(Context.Context): fun = 'publish' @@ -223,7 +225,7 @@ def compute_dependencies(self, filename=REQUIRES): try: response = urlopen(req, timeout=TIMEOUT) except URLError as e: - Logs.warn('The package server is down! %r' % e) + Logs.warn('The package server is down! %r', e) self.constraints = self.local_resolve(text) else: ret = response.read() @@ -243,11 +245,11 @@ def check_errors(self): reasons = c.why() if len(reasons) == 1: - Logs.error('%s but no matching package could be found in this repository' % reasons[0]) + Logs.error('%s but no matching package could be found in this repository', reasons[0]) else: - Logs.error('Conflicts on package %r:' % c.pkgname) + Logs.error('Conflicts on package %r:', c.pkgname) for r in reasons: - Logs.error(' %s' % r) + Logs.error(' %s', r) if errors: self.fatal('The package requirements cannot be satisfied!') @@ -255,7 +257,6 @@ def load_constraints(self, pkgname, pkgver, requires=REQUIRES): try: return self.cache_constraints[(pkgname, pkgver)] except KeyError: - #Logs.error("no key %r" % (pkgname, pkgver)) text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires)) ret = parse_constraints(text) self.cache_constraints[(pkgname, pkgver)] = ret @@ -329,7 +330,7 @@ def get_results(self): def solution_to_constraints(self, versions, constraints): solution = [] - for p in versions.keys(): + for p in versions: c = constraint() solution.append(c) @@ -403,7 +404,6 @@ def __iter__(self): if x.pkgname == self.myproject: continue yield x - raise StopIteration def execute(self): self.compute_dependencies() diff --git a/waflib/extras/doxygen.py b/waflib/extras/doxygen.py index acd4398dcf..0fda70361f 100644 --- a/waflib/extras/doxygen.py +++ b/waflib/extras/doxygen.py @@ -26,9 +26,9 @@ def build(bld): bld(features="doxygen", doxyfile='Doxyfile', ...) """ -from fnmatch import fnmatchcase -import os, os.path, re, stat -from waflib import Task, Utils, Node, Logs, Errors +import os, os.path, re +from collections import OrderedDict +from waflib import Task, Utils, Node from waflib.TaskGen import feature DOXY_STR = '"${DOXYGEN}" - ' @@ -41,7 +41,13 @@ def build(bld): re_rl = re.compile('\\\\\r*\n', re.MULTILINE) re_nl = re.compile('\r*\n', re.M) def parse_doxy(txt): - tbl = {} + ''' + Parses a doxygen file. + Returns an ordered dictionary. We cannot return a default dictionary, as the + order in which the entries are reported does matter, especially for the + '@INCLUDE' lines. + ''' + tbl = OrderedDict() txt = re_rl.sub('', txt) lines = re_nl.split(txt) for x in lines: @@ -63,6 +69,7 @@ def parse_doxy(txt): class doxygen(Task.Task): vars = ['DOXYGEN', 'DOXYFLAGS'] color = 'BLUE' + ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ] def runnable_status(self): ''' @@ -79,6 +86,12 @@ def runnable_status(self): if not getattr(self, 'pars', None): txt = self.inputs[0].read() self.pars = parse_doxy(txt) + + # Override with any parameters passed to the task generator + if getattr(self.generator, 'pars', None): + for k, v in self.generator.pars.items(): + self.pars[k] = v + if self.pars.get('OUTPUT_DIRECTORY'): # Use the path parsed from the Doxyfile as an absolute path output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY']) @@ -88,11 +101,6 @@ def runnable_status(self): output_node.mkdir() self.pars['OUTPUT_DIRECTORY'] = output_node.abspath() - # Override with any parameters passed to the task generator - if getattr(self.generator, 'pars', None): - for k, v in self.generator.pars.iteritems(): - self.pars[k] = v - self.doxy_inputs = getattr(self, 'doxy_inputs', []) if not self.pars.get('INPUT'): self.doxy_inputs.append(self.inputs[0].parent) @@ -112,13 +120,18 @@ def runnable_status(self): self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) self.signature() - return Task.Task.runnable_status(self) + ret = Task.Task.runnable_status(self) + if ret == Task.SKIP_ME: + # in case the files were removed + self.add_install() + return ret def scan(self): exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split() + exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns] file_patterns = self.pars.get('FILE_PATTERNS','').split() if not file_patterns: - file_patterns = DOXY_FILE_PATTERNS + file_patterns = DOXY_FILE_PATTERNS.split() if self.pars.get('RECURSIVE') == 'YES': file_patterns = ["**/%s" % pattern for pattern in file_patterns] nodes = [] @@ -145,16 +158,20 @@ def run(self): def post_run(self): nodes = self.output_dir.ant_glob('**/*', quiet=True) for x in nodes: - x.sig = Utils.h_file(x.abspath()) + self.generator.bld.node_sigs[x] = self.uid() + self.add_install() + return Task.Task.post_run(self) + + def add_install(self): + nodes = self.output_dir.ant_glob('**/*', quiet=True) self.outputs += nodes if getattr(self.generator, 'install_path', None): if not getattr(self.generator, 'doxy_tar', None): - self.generator.bld.install_files(self.generator.install_path, - self.outputs, + self.generator.add_install_files(install_to=self.generator.install_path, + install_from=self.outputs, postpone=False, cwd=self.output_dir, relative_trick=True) - return Task.Task.post_run(self) class tar(Task.Task): "quick tar creation" @@ -182,19 +199,19 @@ def __str__(self): @feature('doxygen') def process_doxy(self): if not getattr(self, 'doxyfile', None): - self.generator.bld.fatal('no doxyfile??') + self.bld.fatal('no doxyfile variable specified??') node = self.doxyfile if not isinstance(node, Node.Node): node = self.path.find_resource(node) if not node: - raise ValueError('doxygen file not found') + self.bld.fatal('doxygen file %s not found' % self.doxyfile) # the task instance - dsk = self.create_task('doxygen', node) + dsk = self.create_task('doxygen', node, always_run=getattr(self, 'always', False)) if getattr(self, 'doxy_tar', None): - tsk = self.create_task('tar') + tsk = self.create_task('tar', always_run=getattr(self, 'always', False)) tsk.input_tasks = [dsk] tsk.set_outputs(self.path.find_or_declare(self.doxy_tar)) if self.doxy_tar.endswith('bz2'): @@ -204,7 +221,7 @@ def process_doxy(self): else: tsk.env['TAROPTS'] = ['cf'] if getattr(self, 'install_path', None): - self.bld.install_files(self.install_path, tsk.outputs) + self.add_install_files(install_to=self.install_path, install_from=tsk.outputs) def configure(conf): ''' diff --git a/waflib/extras/dpapi.py b/waflib/extras/dpapi.py index c988b1c043..b94d482358 100644 --- a/waflib/extras/dpapi.py +++ b/waflib/extras/dpapi.py @@ -35,7 +35,7 @@ def get_data(blob_out): pbData = blob_out.pbData buffer = c_buffer(cbData) memcpy(buffer, pbData, cbData) - LocalFree(pbData); + LocalFree(pbData) return buffer.raw @conf diff --git a/waflib/extras/eclipse.py b/waflib/extras/eclipse.py index c5aee007f9..49ca9686b7 100644 --- a/waflib/extras/eclipse.py +++ b/waflib/extras/eclipse.py @@ -10,11 +10,14 @@ def options(opt): opt.load('eclipse') +To add additional targets beside standard ones (configure, dist, install, check) +the environment ECLIPSE_EXTRA_TARGETS can be set (ie. to ['test', 'lint', 'docs']) + $ waf configure eclipse """ import sys, os -from waflib import Utils, Logs, Context, Options, Build, TaskGen, Scripting +from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node from xml.dom.minidom import Document STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ] @@ -23,6 +26,10 @@ def options(opt): cdt_mk = oe_cdt + '.make.core' cdt_core = oe_cdt + '.core' cdt_bld = oe_cdt + '.build.core' +extbuilder_dir = '.externalToolBuilders' +extbuilder_name = 'Waf_Builder.launch' +settings_dir = '.settings' +settings_name = 'language.settings.xml' class eclipse(Build.BuildContext): cmd = 'eclipse' @@ -40,6 +47,10 @@ def execute(self): appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH']) + # Helper to dump the XML document content to XML with UTF-8 encoding + def write_conf_to_xml(self, filename, document): + self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb') + def create_cproject(self, appname, workspace_includes=[], pythonpath=[]): """ Create the Eclipse CDT .project and .cproject files @@ -49,10 +60,26 @@ def create_cproject(self, appname, workspace_includes=[], pythonpath=[]): "Unresolved Inclusion" errors in the Eclipse editor @param pythonpath Optional project specific python paths """ + hasc = hasjava = haspython = False source_dirs = [] cpppath = self.env['CPPPATH'] + javasrcpath = [] + javalibpath = [] + includes = STANDARD_INCLUDES if sys.platform != 'win32': - cpppath += STANDARD_INCLUDES + cc = self.env.CC or self.env.CXX + if cc: + cmd = cc + ['-xc++', '-E', '-Wp,-v', '-'] + try: + gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines() + except Errors.WafError: + pass + else: + includes = [] + for ipath in gccout: + if ipath.startswith(' /'): + includes.append(ipath[1:]) + cpppath += includes Logs.warn('Generating Eclipse CDT project files') for g in self.groups: @@ -61,41 +88,137 @@ def create_cproject(self, appname, workspace_includes=[], pythonpath=[]): continue tg.post() + + # Add local Python modules paths to configuration so object resolving will work in IDE + # This may also contain generated files (ie. pyqt5 or protoc) that get picked from build + if 'py' in tg.features: + pypath = tg.path.relpath() + py_installfrom = getattr(tg, 'install_from', None) + if isinstance(py_installfrom, Node.Node): + pypath = py_installfrom.path_from(self.root.make_node(self.top_dir)) + if pypath not in pythonpath: + pythonpath.append(pypath) + haspython = True + + # Add Java source directories so object resolving works in IDE + # This may also contain generated files (ie. protoc) that get picked from build + if 'javac' in tg.features: + java_src = tg.path.relpath() + java_srcdir = getattr(tg.javac_task, 'srcdir', None) + if java_srcdir: + if isinstance(java_srcdir, Node.Node): + java_srcdir = [java_srcdir] + for x in Utils.to_list(java_srcdir): + x = x.path_from(self.root.make_node(self.top_dir)) + if x not in javasrcpath: + javasrcpath.append(x) + else: + if java_src not in javasrcpath: + javasrcpath.append(java_src) + hasjava = True + + # Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse + usedlibs=getattr(tg, 'use', []) + for x in Utils.to_list(usedlibs): + for cl in Utils.to_list(tg.env['CLASSPATH_'+x]): + if cl not in javalibpath: + javalibpath.append(cl) + if not getattr(tg, 'link_task', None): continue - l = Utils.to_list(getattr(tg, "includes", '')) - sources = Utils.to_list(getattr(tg, 'source', '')) features = Utils.to_list(getattr(tg, 'features', '')) is_cc = 'c' in features or 'cxx' in features - bldpath = tg.path.bldpath() - - base = os.path.normpath(os.path.join(self.bldnode.name, tg.path.srcpath())) - - if is_cc: - sources_dirs = set([src.parent for src in tg.to_nodes(sources)]) - incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES']) for p in incnodes: path = p.path_from(self.srcnode) - workspace_includes.append(path) + + if (path.startswith("/")): + if path not in cpppath: + cpppath.append(path) + else: + if path not in workspace_includes: + workspace_includes.append(path) if is_cc and path not in source_dirs: source_dirs.append(path) - project = self.impl_create_project(sys.executable, appname) - self.srcnode.make_node('.project').write(project.toprettyxml()) - - waf = os.path.abspath(sys.argv[0]) - project = self.impl_create_cproject(sys.executable, waf, appname, workspace_includes, cpppath, source_dirs) - self.srcnode.make_node('.cproject').write(project.toprettyxml()) - - project = self.impl_create_pydevproject(appname, sys.path, pythonpath) - self.srcnode.make_node('.pydevproject').write(project.toprettyxml()) - - def impl_create_project(self, executable, appname): + hasc = True + + waf_executable = os.path.abspath(sys.argv[0]) + project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable) + self.write_conf_to_xml('.project', project) + + if hasc: + project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs) + self.write_conf_to_xml('.cproject', project) + + if haspython: + project = self.impl_create_pydevproject(sys.path, pythonpath) + self.write_conf_to_xml('.pydevproject', project) + + if hasjava: + project = self.impl_create_javaproject(javasrcpath, javalibpath) + self.write_conf_to_xml('.classpath', project) + + # Create editor language settings to have correct standards applied in IDE, as per project configuration + try: + os.mkdir(settings_dir) + except OSError: + pass # Ignore if dir already exists + + lang_settings = Document() + project = lang_settings.createElement('project') + + # Language configurations for C and C++ via cdt + if hasc: + configuration = self.add(lang_settings, project, 'configuration', + {'id' : 'org.eclipse.cdt.core.default.config.1', 'name': 'Default'}) + + extension = self.add(lang_settings, configuration, 'extension', {'point': 'org.eclipse.cdt.core.LanguageSettingsProvider'}) + + provider = self.add(lang_settings, extension, 'provider', + { 'copy-of': 'extension', + 'id': 'org.eclipse.cdt.ui.UserLanguageSettingsProvider'}) + + provider = self.add(lang_settings, extension, 'provider-reference', + { 'id': 'org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider', + 'ref': 'shared-provider'}) + + provider = self.add(lang_settings, extension, 'provider-reference', + { 'id': 'org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider', + 'ref': 'shared-provider'}) + + # C and C++ are kept as separated providers so appropriate flags are used also in mixed projects + if self.env.CC: + provider = self.add(lang_settings, extension, 'provider', + { 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector', + 'console': 'false', + 'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.1', + 'keep-relative-paths' : 'false', + 'name': 'CDT GCC Built-in Compiler Settings', + 'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CC[0],' '.join(self.env['CFLAGS'])), + 'prefer-non-shared': 'true' }) + + self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.gcc'}) + + if self.env.CXX: + provider = self.add(lang_settings, extension, 'provider', + { 'class': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector', + 'console': 'false', + 'id': 'org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector.2', + 'keep-relative-paths' : 'false', + 'name': 'CDT GCC Built-in Compiler Settings', + 'parameter': '%s %s ${FLAGS} -E -P -v -dD "${INPUTS}"'%(self.env.CXX[0],' '.join(self.env['CXXFLAGS'])), + 'prefer-non-shared': 'true' }) + self.add(lang_settings, provider, 'language-scope', { 'id': 'org.eclipse.cdt.core.g++'}) + + lang_settings.appendChild(project) + self.write_conf_to_xml('%s%s%s'%(settings_dir, os.path.sep, settings_name), lang_settings) + + def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable): doc = Document() projectDescription = doc.createElement('projectDescription') self.add(doc, projectDescription, 'name', appname) @@ -103,35 +226,71 @@ def impl_create_project(self, executable, appname): self.add(doc, projectDescription, 'projects') buildSpec = self.add(doc, projectDescription, 'buildSpec') buildCommand = self.add(doc, buildSpec, 'buildCommand') - self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder') self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,') arguments = self.add(doc, buildCommand, 'arguments') - # the default make-style targets are overwritten by the .cproject values - dictionaries = { - cdt_mk + '.contents': cdt_mk + '.activeConfigSettings', - cdt_mk + '.enableAutoBuild': 'false', - cdt_mk + '.enableCleanBuild': 'true', - cdt_mk + '.enableFullBuild': 'true', - } + dictionaries = {} + + # If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...) + if hasc: + self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder') + # the default make-style targets are overwritten by the .cproject values + dictionaries = { + cdt_mk + '.contents': cdt_mk + '.activeConfigSettings', + cdt_mk + '.enableAutoBuild': 'false', + cdt_mk + '.enableCleanBuild': 'true', + cdt_mk + '.enableFullBuild': 'true', + } + else: + # Otherwise for Java/Python an external builder tool is created that will call waf build + self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder') + dictionaries = { + 'LaunchConfigHandle': '/%s/%s'%(extbuilder_dir, extbuilder_name), + } + # The definition is in a separate directory XML file + try: + os.mkdir(extbuilder_dir) + except OSError: + pass # Ignore error if already exists + + # Populate here the external builder XML calling waf + builder = Document() + launchConfiguration = doc.createElement('launchConfiguration') + launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType') + self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'}) + self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'}) + self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'}) + builder.appendChild(launchConfiguration) + # And write the XML to the file references before + self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder) + + for k, v in dictionaries.items(): self.addDictionary(doc, arguments, k, v) natures = self.add(doc, projectDescription, 'natures') - nature_list = """ - core.ccnature - managedbuilder.core.ScannerConfigNature - managedbuilder.core.managedBuildNature - core.cnature - """.split() - for n in nature_list: - self.add(doc, natures, 'nature', oe_cdt + '.' + n) - self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature') + if hasc: + nature_list = """ + core.ccnature + managedbuilder.core.ScannerConfigNature + managedbuilder.core.managedBuildNature + core.cnature + """.split() + for n in nature_list: + self.add(doc, natures, 'nature', oe_cdt + '.' + n) + + if haspython: + self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature') + if hasjava: + self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature') doc.appendChild(projectDescription) return doc - def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpppath, source_dirs=[]): + def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]): doc = Document() doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0')) cconf_id = cdt_core + '.default.config.1' @@ -156,11 +315,9 @@ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpp GASErrorParser GLDErrorParser """.split() - ext = self.add(doc, extensions, 'extension', - {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'}) + self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'}) for e in extension_list: - ext = self.add(doc, extensions, 'extension', - {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'}) + self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'}) storageModule = self.add(doc, cconf, 'storageModule', {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'}) @@ -178,33 +335,32 @@ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpp 'resourceTypeBasedDiscovery': 'false', 'superClass': cdt_bld + '.prefbase.toolchain'}) - targetPlatform = self.add(doc, toolChain, 'targetPlatform', - { 'binaryParser': 'org.eclipse.cdt.core.ELF', - 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''}) - - waf_build = '"%s" %s'%(waf, eclipse.fun) - waf_clean = '"%s" clean'%(waf) - builder = self.add(doc, toolChain, 'builder', - {'autoBuildTarget': waf_build, - 'command': executable, - 'enableAutoBuild': 'false', - 'cleanBuildTarget': waf_clean, - 'enableIncrementalBuild': 'true', - 'id': cdt_bld + '.settings.default.builder.1', - 'incrementalBuildTarget': waf_build, - 'managedBuildOn': 'false', - 'name': 'Gnu Make Builder', - 'superClass': cdt_bld + '.settings.default.builder'}) - + self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''}) + + waf_build = '"%s" %s'%(waf_executable, eclipse.fun) + waf_clean = '"%s" clean'%(waf_executable) + self.add(doc, toolChain, 'builder', + {'autoBuildTarget': waf_build, + 'command': executable, + 'enableAutoBuild': 'false', + 'cleanBuildTarget': waf_clean, + 'enableIncrementalBuild': 'true', + 'id': cdt_bld + '.settings.default.builder.1', + 'incrementalBuildTarget': waf_build, + 'managedBuildOn': 'false', + 'name': 'Gnu Make Builder', + 'superClass': cdt_bld + '.settings.default.builder'}) + + tool_index = 1; for tool_name in ("Assembly", "GNU C++", "GNU C"): tool = self.add(doc, toolChain, 'tool', - {'id': cdt_bld + '.settings.holder.1', + {'id': cdt_bld + '.settings.holder.' + str(tool_index), 'name': tool_name, 'superClass': cdt_bld + '.settings.holder'}) if cpppath or workspace_includes: incpaths = cdt_bld + '.settings.holder.incpaths' option = self.add(doc, tool, 'option', - {'id': incpaths+'.1', + {'id': incpaths + '.' + str(tool_index), 'name': 'Include Paths', 'superClass': incpaths, 'valueType': 'includePath'}) @@ -217,10 +373,12 @@ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpp {'builtIn': 'false', 'value': '"%s"'%(i)}) if tool_name == "GNU C++" or tool_name == "GNU C": - self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.1', \ - 'languageId':'org.eclipse.cdt.core.gcc','languageName':tool_name, \ + self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \ + 'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \ 'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \ 'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' }) + tool_index += 1 + if source_dirs: sourceEntries = self.add(doc, config, 'sourceEntries') for i in source_dirs: @@ -240,23 +398,30 @@ def impl_create_cproject(self, executable, waf, appname, workspace_includes, cpp buildTargets = self.add(doc, storageModule, 'buildTargets') def addTargetWrap(name, runAll): return self.addTarget(doc, buildTargets, executable, name, - '"%s" %s'%(waf, name), runAll) + '"%s" %s'%(waf_executable, name), runAll) addTargetWrap('configure', True) addTargetWrap('dist', False) addTargetWrap('install', False) addTargetWrap('check', False) + for addTgt in self.env.ECLIPSE_EXTRA_TARGETS or []: + addTargetWrap(addTgt, False) storageModule = self.add(doc, cproject, 'storageModule', {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'}) - project = self.add(doc, storageModule, 'project', - {'id': '%s.null.1'%appname, 'name': appname}) + self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname}) + + storageModule = self.add(doc, cproject, 'storageModule', + {'moduleId': 'org.eclipse.cdt.core.LanguageSettingsProviders'}) + + storageModule = self.add(doc, cproject, 'storageModule', + {'moduleId': 'scannerConfiguration'}) doc.appendChild(cproject) return doc - def impl_create_pydevproject(self, appname, system_path, user_path): + def impl_create_pydevproject(self, system_path, user_path): # create a pydevproject file doc = Document() doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"')) @@ -278,11 +443,30 @@ def impl_create_pydevproject(self, appname, system_path, user_path): prop = self.add(doc, pydevproject, 'pydev_pathproperty', {'name':'org.python.pydev.PROJECT_SOURCE_PATH'}) for i in user_path: - self.add(doc, prop, 'path', '/'+appname+'/'+i) + self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i) doc.appendChild(pydevproject) return doc + def impl_create_javaproject(self, javasrcpath, javalibpath): + # create a .classpath file for java usage + doc = Document() + javaproject = doc.createElement('classpath') + if javasrcpath: + for i in javasrcpath: + self.add(doc, javaproject, 'classpathentry', + {'kind': 'src', 'path': i}) + + if javalibpath: + for i in javalibpath: + self.add(doc, javaproject, 'classpathentry', + {'kind': 'lib', 'path': i}) + + self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'}) + self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name }) + doc.appendChild(javaproject) + return doc + def addDictionary(self, doc, parent, k, v): dictionary = self.add(doc, parent, 'dictionary') self.add(doc, dictionary, 'key', k) diff --git a/waflib/extras/erlang.py b/waflib/extras/erlang.py index b6349fe0ca..0b93d9a4f4 100644 --- a/waflib/extras/erlang.py +++ b/waflib/extras/erlang.py @@ -1,19 +1,110 @@ #!/usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2010 (ita) +# Przemyslaw Rzepecki, 2016 """ Erlang support """ -from waflib import TaskGen +import re +from waflib import Task, TaskGen +from waflib.TaskGen import feature, after_method, before_method +# to load the method "to_incnodes" below +from waflib.Tools import ccroot -TaskGen.declare_chain(name = 'erlc', - rule = '${ERLC} ${ERLC_FLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}', - ext_in = '.erl', - ext_out = '.beam') +# Those flags are required by the Erlang VM to execute/evaluate code in +# non-interactive mode. It is used in this tool to create Erlang modules +# documentation and run unit tests. The user can pass additional arguments to the +# 'erl' command with ERL_FLAGS environment variable. +EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval'] def configure(conf): conf.find_program('erlc', var='ERLC') - conf.env.ERLC_FLAGS = [] + conf.find_program('erl', var='ERL') + conf.add_os_flags('ERLC_FLAGS') + conf.add_os_flags('ERL_FLAGS') + conf.env.ERLC_DEF_PATTERN = '-D%s' + conf.env.ERLC_INC_PATTERN = '-I%s' + +@TaskGen.extension('.erl') +def process_erl_node(self, node): + tsk = self.create_task('erl', node, node.change_ext('.beam')) + tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes) + tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes]) + tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', []))) + tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', []))) + tsk.cwd = tsk.outputs[0].parent + +class erl(Task.Task): + color = 'GREEN' + run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}' + + def scan(task): + node = task.inputs[0] + + deps = [] + scanned = set([]) + nodes_to_scan = [node] + + for n in nodes_to_scan: + if n.abspath() in scanned: + continue + + for i in re.findall(r'-include\("(.*)"\)\.', n.read()): + for d in task.erlc_incnodes: + r = d.find_node(i) + if r: + deps.append(r) + nodes_to_scan.append(r) + break + scanned.add(n.abspath()) + + return (deps, []) + +@TaskGen.extension('.beam') +def process(self, node): + pass + + +class erl_test(Task.Task): + color = 'BLUE' + run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}' + +@feature('eunit') +@after_method('process_source') +def add_erl_test_run(self): + test_modules = [t.outputs[0] for t in self.tasks] + test_task = self.create_task('erl_test') + test_task.set_inputs(self.source + test_modules) + test_task.cwd = test_modules[0].parent + + test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', []))) + + test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules]) + test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list + test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE) + test_task.env.append_value('ERL_TEST_FLAGS', test_flag) + + +class edoc(Task.Task): + color = 'BLUE' + run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}" + def keyword(self): + return 'Generating edoc' + +@feature('edoc') +@before_method('process_source') +def add_edoc_task(self): + # do not process source, it would create double erl->beam task + self.meths.remove('process_source') + e = self.path.find_resource(self.source) + t = e.change_ext('.html') + png = t.parent.make_node('erlang.png') + css = t.parent.make_node('stylesheet.css') + tsk = self.create_task('edoc', e, [t, png, css]) + tsk.cwd = tsk.outputs[0].parent + tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE) + tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath()) + # TODO the above can break if a file path contains '"' diff --git a/waflib/extras/fast_partial.py b/waflib/extras/fast_partial.py new file mode 100644 index 0000000000..90a94723bb --- /dev/null +++ b/waflib/extras/fast_partial.py @@ -0,0 +1,531 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2017-2018 (ita) + +""" +A system for fast partial rebuilds + +Creating a large amount of task objects up front can take some time. +By making a few assumptions, it is possible to avoid posting creating +task objects for targets that are already up-to-date. + +On a silly benchmark the gain observed for 1M tasks can be 5m->10s +for a single file change. + +Usage:: + + def options(opt): + opt.load('fast_partial') + +Assumptions: +* Start with a clean build (run "waf distclean" after enabling) +* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled) + try it in the folder generated by utils/genbench.py +* For full project builds: no --targets and no pruning from subfolders +* The installation phase is ignored +* `use=` dependencies are specified up front even across build groups +* Task generator source files are not obtained from globs + +Implementation details: +* The first layer obtains file timestamps to recalculate file hashes only + when necessary (similar to md5_tstamp); the timestamps are then stored + in a dedicated pickle file +* A second layer associates each task generator to a file set to help + detecting changes. Task generators are to create their tasks only when + the related files have been modified. A specific db file is created + to store such data (5m -> 1m10) +* A third layer binds build context proxies onto task generators, replacing + the default context. While loading data for the full build uses more memory + (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s) +* A fourth layer enables a 2-level cache on file signatures to + reduce the size of the main pickle file (13s -> 10s) +""" + +import os +from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils +from waflib.TaskGen import feature, after_method, taskgen_method +import waflib.Node + +DONE = 0 +DIRTY = 1 +NEEDED = 2 + +SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram'] + +TSTAMP_DB = '.wafpickle_tstamp_db_file' + +SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() + +class bld_proxy(object): + def __init__(self, bld): + object.__setattr__(self, 'bld', bld) + + object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {})) + self.node_class.__module__ = 'waflib.Node' + self.node_class.ctx = self + + object.__setattr__(self, 'root', self.node_class('', None)) + for x in SAVED_ATTRS: + if x != 'root': + object.__setattr__(self, x, {}) + + self.fix_nodes() + + def __setattr__(self, name, value): + bld = object.__getattribute__(self, 'bld') + setattr(bld, name, value) + + def __delattr__(self, name): + bld = object.__getattribute__(self, 'bld') + delattr(bld, name) + + def __getattribute__(self, name): + try: + return object.__getattribute__(self, name) + except AttributeError: + bld = object.__getattribute__(self, 'bld') + return getattr(bld, name) + + def __call__(self, *k, **kw): + return self.bld(*k, **kw) + + def fix_nodes(self): + for x in ('srcnode', 'path', 'bldnode'): + node = self.root.find_dir(getattr(self.bld, x).abspath()) + object.__setattr__(self, x, node) + + def set_key(self, store_key): + object.__setattr__(self, 'store_key', store_key) + + def fix_tg_path(self, *tgs): + # changing Node objects on task generators is possible + # yet, all Node objects must belong to the same parent + for tg in tgs: + tg.path = self.root.make_node(tg.path.abspath()) + + def restore(self): + dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key) + Logs.debug('rev_use: reading %s', dbfn) + try: + data = Utils.readf(dbfn, 'rb') + except (EnvironmentError, EOFError): + # handle missing file/empty file + Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) + else: + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3 = self.node_class + try: + data = Build.cPickle.loads(data) + except Exception as e: + Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) + else: + for x in SAVED_ATTRS: + object.__setattr__(self, x, data.get(x, {})) + finally: + waflib.Node.pickle_lock.release() + self.fix_nodes() + + def store(self): + data = {} + for x in Build.SAVED_ATTRS: + data[x] = getattr(self, x) + db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key) + + with waflib.Node.pickle_lock: + waflib.Node.Nod3 = self.node_class + try: + x = Build.cPickle.dumps(data, Build.PROTOCOL) + except Build.cPickle.PicklingError: + root = data['root'] + for node_deps in data['node_deps'].values(): + for idx, node in enumerate(node_deps): + # there may be more cross-context Node objects to fix, + # but this should be the main source + node_deps[idx] = root.find_node(node.abspath()) + x = Build.cPickle.dumps(data, Build.PROTOCOL) + + Logs.debug('rev_use: storing %s', db) + Utils.writef(db + '.tmp', x, m='wb') + try: + st = os.stat(db) + os.remove(db) + if not Utils.is_win32: + os.chown(db + '.tmp', st.st_uid, st.st_gid) + except (AttributeError, OSError): + pass + os.rename(db + '.tmp', db) + +class bld(Build.BuildContext): + def __init__(self, **kw): + super(bld, self).__init__(**kw) + self.hashes_md5_tstamp = {} + + def __call__(self, *k, **kw): + # this is one way of doing it, one could use a task generator method too + bld = kw['bld'] = bld_proxy(self) + ret = TaskGen.task_gen(*k, **kw) + self.task_gen_cache_names = {} + self.add_to_group(ret, group=kw.get('group')) + ret.bld = bld + bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx)) + return ret + + def is_dirty(self): + return True + + def store_tstamps(self): + # Called after a build is finished + # For each task generator, record all files involved in task objects + # optimization: done only if there was something built + do_store = False + try: + f_deps = self.f_deps + except AttributeError: + f_deps = self.f_deps = {} + self.f_tstamps = {} + + allfiles = set() + for g in self.groups: + for tg in g: + try: + staleness = tg.staleness + except AttributeError: + staleness = DIRTY + + if staleness != DIRTY: + # DONE case: there was nothing built + # NEEDED case: the tg was brought in because of 'use' propagation + # but nothing really changed for them, there may be incomplete + # tasks (object files) and in this case it is best to let the next build + # figure out if an input/output file changed + continue + + do_cache = False + for tsk in tg.tasks: + if tsk.hasrun == Task.SUCCESS: + do_cache = True + pass + elif tsk.hasrun == Task.SKIPPED: + pass + else: + # one failed task, clear the cache for this tg + try: + del f_deps[(tg.path.abspath(), tg.idx)] + except KeyError: + pass + else: + # just store the new state because there is a change + do_store = True + + # skip the rest because there is no valid cache possible + break + else: + if not do_cache: + # all skipped, but is there anything in cache? + try: + f_deps[(tg.path.abspath(), tg.idx)] + except KeyError: + # probably cleared because a wscript file changed + # store it + do_cache = True + + if do_cache: + + # there was a rebuild, store the data structure too + tg.bld.store() + + # all tasks skipped but no cache + # or a successful task build + do_store = True + st = set() + for tsk in tg.tasks: + st.update(tsk.inputs) + st.update(self.node_deps.get(tsk.uid(), [])) + + # TODO do last/when loading the tgs? + lst = [] + for k in ('wscript', 'wscript_build'): + n = tg.path.find_node(k) + if n: + n.get_bld_sig() + lst.append(n.abspath()) + + lst.extend(sorted(x.abspath() for x in st)) + allfiles.update(lst) + f_deps[(tg.path.abspath(), tg.idx)] = lst + + for x in allfiles: + # f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds + self.f_tstamps[x] = self.hashes_md5_tstamp[x][0] + + if do_store: + dbfn = os.path.join(self.variant_dir, TSTAMP_DB) + Logs.debug('rev_use: storing %s', dbfn) + dbfn_tmp = dbfn + '.tmp' + x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL) + Utils.writef(dbfn_tmp, x, m='wb') + os.rename(dbfn_tmp, dbfn) + Logs.debug('rev_use: stored %s', dbfn) + + def store(self): + self.store_tstamps() + if self.producer.dirty: + Build.BuildContext.store(self) + + def compute_needed_tgs(self): + # assume the 'use' keys are not modified during the build phase + + dbfn = os.path.join(self.variant_dir, TSTAMP_DB) + Logs.debug('rev_use: Loading %s', dbfn) + try: + data = Utils.readf(dbfn, 'rb') + except (EnvironmentError, EOFError): + Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) + self.f_deps = {} + self.f_tstamps = {} + else: + try: + self.f_tstamps, self.f_deps = Build.cPickle.loads(data) + except Exception as e: + Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) + self.f_deps = {} + self.f_tstamps = {} + else: + Logs.debug('rev_use: Loaded %s', dbfn) + + + # 1. obtain task generators that contain rebuilds + # 2. obtain the 'use' graph and its dual + stales = set() + reverse_use_map = Utils.defaultdict(list) + use_map = Utils.defaultdict(list) + + for g in self.groups: + for tg in g: + if tg.is_stale(): + stales.add(tg) + + try: + lst = tg.use = Utils.to_list(tg.use) + except AttributeError: + pass + else: + for x in lst: + try: + xtg = self.get_tgen_by_name(x) + except Errors.WafError: + pass + else: + use_map[tg].append(xtg) + reverse_use_map[xtg].append(tg) + + Logs.debug('rev_use: found %r stale tgs', len(stales)) + + # 3. dfs to post downstream tg as stale + visited = set() + def mark_down(tg): + if tg in visited: + return + visited.add(tg) + Logs.debug('rev_use: marking down %r as stale', tg.name) + tg.staleness = DIRTY + for x in reverse_use_map[tg]: + mark_down(x) + for tg in stales: + mark_down(tg) + + # 4. dfs to find ancestors tg to mark as needed + self.needed_tgs = needed_tgs = set() + def mark_needed(tg): + if tg in needed_tgs: + return + needed_tgs.add(tg) + if tg.staleness == DONE: + Logs.debug('rev_use: marking up %r as needed', tg.name) + tg.staleness = NEEDED + for x in use_map[tg]: + mark_needed(x) + for xx in visited: + mark_needed(xx) + + # so we have the whole tg trees to post in the set "needed" + # load their build trees + for tg in needed_tgs: + tg.bld.restore() + tg.bld.fix_tg_path(tg) + + # the stale ones should be fully build, while the needed ones + # may skip a few tasks, see create_compiled_task and apply_link_after below + Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs)) + + def post_group(self): + # assumption: we can ignore the folder/subfolders cuts + def tgpost(tg): + try: + f = tg.post + except AttributeError: + pass + else: + f() + + if not self.targets or self.targets == '*': + for tg in self.groups[self.current_group]: + # this can cut quite a lot of tg objects + if tg in self.needed_tgs: + tgpost(tg) + else: + # default implementation + return Build.BuildContext.post_group() + + def get_build_iterator(self): + if not self.targets or self.targets == '*': + self.compute_needed_tgs() + return Build.BuildContext.get_build_iterator(self) + +@taskgen_method +def is_stale(self): + # assume no globs + self.staleness = DIRTY + + # 1. the case of always stale targets + if getattr(self, 'always_stale', False): + return True + + # 2. check if the db file exists + db = os.path.join(self.bld.variant_dir, Context.DBFILE) + try: + dbstat = os.stat(db).st_mtime + except OSError: + Logs.debug('rev_use: must post %r because this is a clean build') + return True + + # 3.a check if the configuration exists + cache_node = self.bld.bldnode.find_node('c4che/build.config.py') + if not cache_node: + return True + + # 3.b check if the configuration changed + if os.stat(cache_node.abspath()).st_mtime > dbstat: + Logs.debug('rev_use: must post %r because the configuration has changed', self.name) + return True + + # 3.c any tstamp data? + try: + f_deps = self.bld.f_deps + except AttributeError: + Logs.debug('rev_use: must post %r because there is no f_deps', self.name) + return True + + # 4. check if this is the first build (no cache) + try: + lst = f_deps[(self.path.abspath(), self.idx)] + except KeyError: + Logs.debug('rev_use: must post %r because there it has no cached data', self.name) + return True + + try: + cache = self.bld.cache_tstamp_rev_use + except AttributeError: + cache = self.bld.cache_tstamp_rev_use = {} + + # 5. check the timestamp of each dependency files listed is unchanged + f_tstamps = self.bld.f_tstamps + for x in lst: + try: + old_ts = f_tstamps[x] + except KeyError: + Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x) + return True + + try: + try: + ts = cache[x] + except KeyError: + ts = cache[x] = os.stat(x).st_mtime + except OSError: + del f_deps[(self.path.abspath(), self.idx)] + Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x) + return True + else: + if ts != old_ts: + Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts) + return True + + self.staleness = DONE + return False + +@taskgen_method +def create_compiled_task(self, name, node): + # skip the creation of object files + # assumption: object-only targets are not skippable + if self.staleness == NEEDED: + # only libraries/programs can skip object files + for x in SKIPPABLE: + if x in self.features: + return None + + out = '%s.%d.o' % (node.name, self.idx) + task = self.create_task(name, node, node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks = [task] + return task + +@feature(*SKIPPABLE) +@after_method('apply_link') +def apply_link_after(self): + # cprogram/cxxprogram might be unnecessary + if self.staleness != NEEDED: + return + for tsk in self.tasks: + tsk.hasrun = Task.SKIPPED + +def path_from(self, node): + # handle nodes of distinct types + if node.ctx is not self.ctx: + node = self.ctx.root.make_node(node.abspath()) + return self.default_path_from(node) +waflib.Node.Node.default_path_from = waflib.Node.Node.path_from +waflib.Node.Node.path_from = path_from + +def h_file(self): + # similar to md5_tstamp.py, but with 2-layer cache + # global_cache for the build context common for all task generators + # local_cache for the build context proxy (one by task generator) + # + # the global cache is not persistent + # the local cache is persistent and meant for partial builds + # + # assume all calls are made from a single thread + # + filename = self.abspath() + st = os.stat(filename) + + global_cache = self.ctx.bld.hashes_md5_tstamp + local_cache = self.ctx.hashes_md5_tstamp + + if filename in global_cache: + # value already calculated in this build + cval = global_cache[filename] + + # the value in global cache is assumed to be calculated once + # reverifying it could cause task generators + # to get distinct tstamp values, thus missing rebuilds + local_cache[filename] = cval + return cval[1] + + if filename in local_cache: + cval = local_cache[filename] + if cval[0] == st.st_mtime: + # correct value from a previous build + # put it in the global cache + global_cache[filename] = cval + return cval[1] + + ret = Utils.h_file(filename) + local_cache[filename] = global_cache[filename] = (st.st_mtime, ret) + return ret +waflib.Node.Node.h_file = h_file + diff --git a/waflib/extras/fc_bgxlf.py b/waflib/extras/fc_bgxlf.py index 88090c0a46..cca18101ea 100644 --- a/waflib/extras/fc_bgxlf.py +++ b/waflib/extras/fc_bgxlf.py @@ -2,7 +2,6 @@ # encoding: utf-8 # harald at klimachs.de -import re from waflib.Tools import fc, fc_config, fc_scan from waflib.Configure import conf diff --git a/waflib/extras/fc_cray.py b/waflib/extras/fc_cray.py index eba1886e85..da733fade3 100644 --- a/waflib/extras/fc_cray.py +++ b/waflib/extras/fc_cray.py @@ -3,7 +3,6 @@ # harald at klimachs.de import re -from waflib import Utils from waflib.Tools import fc, fc_config, fc_scan from waflib.Configure import conf @@ -21,7 +20,7 @@ def find_crayftn(conf): @conf def crayftn_flags(conf): v = conf.env - v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directoy + v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directory v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings v['FCFLAGS_fcshlib'] = ['-h pic'] v['LINKFLAGS_fcshlib'] = ['-h shared'] @@ -34,8 +33,10 @@ def get_crayftn_version(conf, fc): version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search cmd = fc + ['-V'] out,err = fc_config.getoutput(conf, cmd, stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not determine the Cray Fortran compiler version.') k = match.groupdict() diff --git a/waflib/extras/fc_fujitsu.py b/waflib/extras/fc_fujitsu.py new file mode 100644 index 0000000000..cae676c207 --- /dev/null +++ b/waflib/extras/fc_fujitsu.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Detection of the Fujitsu Fortran compiler for ARM64FX + +import re +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_fujitsu') + +@conf +def find_fujitsu(conf): + fc=conf.find_program(['frtpx'],var='FC') + conf.get_fujitsu_version(fc) + conf.env.FC_NAME='FUJITSU' + conf.env.FC_MOD_CAPITALIZATION='lower' + +@conf +def fujitsu_flags(conf): + v=conf.env + v['_FCMODOUTFLAGS']=[] + v['FCFLAGS_DEBUG']=[] + v['FCFLAGS_fcshlib']=[] + v['LINKFLAGS_fcshlib']=[] + v['FCSTLIB_MARKER']='' + v['FCSHLIB_MARKER']='' + +@conf +def get_fujitsu_version(conf,fc): + version_re=re.compile(r"frtpx\s*\(FRT\)\s*(?P\d+)\.(?P\d+)\.",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + return(False) + conf.fatal('Could not determine the Fujitsu FRT Fortran compiler version.') + else: + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) + +def configure(conf): + conf.find_fujitsu() + conf.find_program('ar',var='AR') + conf.add_os_flags('ARFLAGS') + if not conf.env.ARFLAGS: + conf.env.ARFLAGS=['rcs'] + conf.fc_flags() + conf.fc_add_flags() + conf.fujitsu_flags() diff --git a/waflib/extras/fc_nag.py b/waflib/extras/fc_nag.py index 78ebbfdd98..edcb218bc3 100644 --- a/waflib/extras/fc_nag.py +++ b/waflib/extras/fc_nag.py @@ -22,7 +22,9 @@ def find_nag(conf): @conf def nag_flags(conf): v = conf.env - v['FCFLAGS_DEBUG'] = ['-C=all'] + v.FCFLAGS_DEBUG = ['-C=all'] + v.FCLNK_TGT_F = ['-o', ''] + v.FC_TGT_F = ['-c', '-o', ''] @conf def nag_modifier_platform(conf): @@ -36,7 +38,7 @@ def get_nag_version(conf, fc): """Get the NAG compiler version""" version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P\d*)\.(?P\d*)", re.M).search - cmd = fc + ['-v'] + cmd = fc + ['-V'] out, err = fc_config.getoutput(conf,cmd,stdin=False) if out: diff --git a/waflib/extras/fc_nec.py b/waflib/extras/fc_nec.py index b5bcc6d64b..67c8680898 100644 --- a/waflib/extras/fc_nec.py +++ b/waflib/extras/fc_nec.py @@ -3,7 +3,6 @@ # harald at klimachs.de import re -from waflib import Utils from waflib.Tools import fc, fc_config, fc_scan from waflib.Configure import conf @@ -21,7 +20,7 @@ def find_sxfc(conf): @conf def sxfc_flags(conf): v = conf.env - v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directoy + v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directory v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings v['FCFLAGS_fcshlib'] = [] v['LINKFLAGS_fcshlib'] = [] @@ -34,12 +33,16 @@ def get_sxfc_version(conf, fc): version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P\d*)\.(?P\d*)", re.I).search cmd = fc + ['-V'] out,err = fc_config.getoutput(conf, cmd, stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P\S*)\s*\(c\)\s*(?P\d*)",re.I).search - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not determine the NEC Fortran compiler version.') k = match.groupdict() diff --git a/waflib/extras/fc_nfort.py b/waflib/extras/fc_nfort.py new file mode 100644 index 0000000000..c25886b8e7 --- /dev/null +++ b/waflib/extras/fc_nfort.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Detection of the NEC Fortran compiler for Aurora Tsubasa + +import re +from waflib.Tools import fc,fc_config,fc_scan +from waflib.Configure import conf +from waflib.Tools.compiler_fc import fc_compiler +fc_compiler['linux'].append('fc_nfort') + +@conf +def find_nfort(conf): + fc=conf.find_program(['nfort'],var='FC') + conf.get_nfort_version(fc) + conf.env.FC_NAME='NFORT' + conf.env.FC_MOD_CAPITALIZATION='lower' + +@conf +def nfort_flags(conf): + v=conf.env + v['_FCMODOUTFLAGS']=[] + v['FCFLAGS_DEBUG']=[] + v['FCFLAGS_fcshlib']=[] + v['LINKFLAGS_fcshlib']=[] + v['FCSTLIB_MARKER']='' + v['FCSHLIB_MARKER']='' + +@conf +def get_nfort_version(conf,fc): + version_re=re.compile(r"nfort\s*\(NFORT\)\s*(?P\d+)\.(?P\d+)\.",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + return(False) + conf.fatal('Could not determine the NEC NFORT Fortran compiler version.') + else: + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) + +def configure(conf): + conf.find_nfort() + conf.find_program('nar',var='AR') + conf.add_os_flags('ARFLAGS') + if not conf.env.ARFLAGS: + conf.env.ARFLAGS=['rcs'] + conf.fc_flags() + conf.fc_add_flags() + conf.nfort_flags() diff --git a/waflib/extras/fc_open64.py b/waflib/extras/fc_open64.py index 301742f9d1..413719f43e 100644 --- a/waflib/extras/fc_open64.py +++ b/waflib/extras/fc_open64.py @@ -39,8 +39,10 @@ def get_open64_version(conf, fc): cmd = fc + ['-version'] out, err = fc_config.getoutput(conf,cmd,stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not determine the Open64 version.') k = match.groupdict() diff --git a/waflib/extras/fc_pgfortran.py b/waflib/extras/fc_pgfortran.py index f478d03ebe..afb2817ba6 100644 --- a/waflib/extras/fc_pgfortran.py +++ b/waflib/extras/fc_pgfortran.py @@ -3,7 +3,6 @@ # harald at klimachs.de import re -from waflib import Utils from waflib.Tools import fc, fc_config, fc_scan from waflib.Configure import conf @@ -31,8 +30,10 @@ def get_pgfortran_version(conf,fc): version_re = re.compile(r"The Portland Group", re.I).search cmd = fc + ['-V'] out,err = fc_config.getoutput(conf, cmd, stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not verify PGI signature') cmd = fc + ['-help=variable'] @@ -46,10 +47,12 @@ def get_pgfortran_version(conf,fc): lst = line.partition('=') if lst[1] == '=': key = lst[0].rstrip() - if key == '': key = prevk + if key == '': + key = prevk val = lst[2].rstrip() k[key] = val - else: prevk = line.partition(' ')[0] + else: + prevk = line.partition(' ')[0] def isD(var): return var in k def isT(var): diff --git a/waflib/extras/fc_solstudio.py b/waflib/extras/fc_solstudio.py index 2f1a196b3c..53766df8fc 100644 --- a/waflib/extras/fc_solstudio.py +++ b/waflib/extras/fc_solstudio.py @@ -43,8 +43,10 @@ def get_solstudio_version(conf, fc): cmd = fc + ['-V'] out, err = fc_config.getoutput(conf,cmd,stdin=False) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not determine the Sun Studio Fortran version.') k = match.groupdict() diff --git a/waflib/extras/file_to_object.py b/waflib/extras/file_to_object.py index 57d9d62c38..13d2aef37d 100644 --- a/waflib/extras/file_to_object.py +++ b/waflib/extras/file_to_object.py @@ -26,24 +26,20 @@ Known issues: -- Currently only handles elf files with GNU ld. - - Destination is named like source, with extension renamed to .o eg. some.file -> some.o """ -import os, binascii - -from waflib import Task, Utils, TaskGen, Errors - +import os, sys +from waflib import Task, TaskGen, Errors def filename_c_escape(x): return x.replace("\\", "\\\\") class file_to_object_s(Task.Task): color = 'CYAN' - dep_vars = ('DEST_CPU', 'DEST_BINFMT') + vars = ['DEST_CPU', 'DEST_BINFMT'] def run(self): name = [] @@ -99,31 +95,27 @@ def run(self): name = "_binary_" + "".join(name) - data = [] - data = self.inputs[0].read() - data = binascii.hexlify(data) - data = [ ("0x%s" % (data[i:i+2])) for i in range(0, len(data), 2) ] - data = ",\n ".join(data) - - with open(self.outputs[0].abspath(), 'w') as f: - f.write(\ -""" -char const %(name)s[] = { - %(data)s -}; -unsigned long %(name)s_size = %(size)dL; -char const * %(name)s_start = %(name)s; -char const * %(name)s_end = &%(name)s[%(size)d]; -""" % locals()) - with open(self.outputs[0].abspath(), 'w') as f: - f.write(\ + def char_to_num(ch): + if sys.version_info[0] < 3: + return ord(ch) + return ch + + data = self.inputs[0].read('rb') + lines, line = [], [] + for idx_byte, byte in enumerate(data): + line.append(byte) + if len(line) > 15 or idx_byte == size-1: + lines.append(", ".join(("0x%02x" % char_to_num(x)) for x in line)) + line = [] + data = ",\n ".join(lines) + + self.outputs[0].write(\ """ unsigned long %(name)s_size = %(size)dL; char const %(name)s_start[] = { %(data)s }; -char const %(name)s_end[] = { -}; +char const %(name)s_end[] = {}; """ % locals()) @TaskGen.feature('file_to_object') @@ -135,16 +127,16 @@ def tg_file_to_object(self): for src in sources: if bld.env.F2O_METHOD == ["asm"]: tgt = src.parent.find_or_declare(src.name + '.f2o.s') - task = self.create_task('file_to_object_s', - src, tgt, cwd=src.parent.abspath()) + tsk = self.create_task('file_to_object_s', src, tgt) + tsk.cwd = src.parent.abspath() # verify else: tgt = src.parent.find_or_declare(src.name + '.f2o.c') - task = self.create_task('file_to_object_c', - src, tgt, cwd=src.parent.abspath()) + tsk = self.create_task('file_to_object_c', src, tgt) + tsk.cwd = src.parent.abspath() # verify targets.append(tgt) self.source = targets def configure(conf): conf.load('gas') - conf.env.F2O_METHOD = ["asm"] + conf.env.F2O_METHOD = ["c"] diff --git a/waflib/extras/fluid.py b/waflib/extras/fluid.py index 075a0c4f48..4814a35b94 100644 --- a/waflib/extras/fluid.py +++ b/waflib/extras/fluid.py @@ -3,7 +3,7 @@ # Grygoriy Fuchedzhy 2009 """ -Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature. +Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature. """ from waflib import Task @@ -15,7 +15,7 @@ class fluid(Task.Task): run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}' @extension('.fl') -def fluid(self, node): +def process_fluid(self, node): """add the .fl to the source list; the cxx file generated will be compiled when possible""" cpp = node.change_ext('.cpp') hpp = node.change_ext('.hpp') diff --git a/waflib/extras/fsc.py b/waflib/extras/fsc.py index b3a836b966..c67e70be29 100644 --- a/waflib/extras/fsc.py +++ b/waflib/extras/fsc.py @@ -8,10 +8,9 @@ FSC="mono /path/to/fsc.exe" waf configure build """ -from waflib import Utils, Task, Options, Logs, Errors +from waflib import Utils, Task from waflib.TaskGen import before_method, after_method, feature from waflib.Tools import ccroot, cs -from waflib.Configure import conf ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) @@ -36,7 +35,7 @@ def apply_fsc(self): if inst_to: # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) - self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod) + self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod) feature('fs')(cs.use_cs) after_method('apply_fsc')(cs.use_cs) diff --git a/waflib/extras/gccdeps.py b/waflib/extras/gccdeps.py index 43c9602e02..17e6e7dabc 100644 --- a/waflib/extras/gccdeps.py +++ b/waflib/extras/gccdeps.py @@ -5,15 +5,19 @@ """ Execute the tasks with gcc -MD, read the dependencies from the .d file and prepare the dependency calculation for the next run. +This affects the cxx class, so make sure to load Qt5 after this tool. -Usage: +Usage:: + + def options(opt): + opt.load('compiler_cxx') def configure(conf): - conf.load('gccdeps') + conf.load('compiler_cxx gccdeps') """ import os, re, threading from waflib import Task, Logs, Utils, Errors -from waflib.Tools import c_preproc +from waflib.Tools import asm, c, c_preproc, cxx from waflib.TaskGen import before_method, feature lock = threading.Lock() @@ -23,18 +27,9 @@ def configure(conf): gccdeps_flags = ['-MMD'] # Third-party tools are allowed to add extra names in here with append() -supported_compilers = ['gcc', 'icc', 'clang'] +supported_compilers = ['gas', 'gcc', 'icc', 'clang'] -def scan(self): - if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: - if not self.env.GCCDEPS: - self.generator.bld.fatal('Load gccdeps in configure!') - return self.no_gccdeps_scan() - nodes = self.generator.bld.node_deps.get(self.uid(), []) - names = [] - return (nodes, names) - -re_o = re.compile("\.o$") +re_o = re.compile(r"\.o$") re_splitter = re.compile(r'(? %s" % self.outputs) - #print ('++> %s' % self.outputs[1]) - bld_dir = self.outputs[1] - bld_dir.mkdir() - obj_dir = bld_dir.make_node('_obj') - obj_dir.mkdir() - - bld_srcs = [] - for s in source: - # FIXME: it seems gomake/cgo stumbles on filenames like a/b/c.go - # -> for the time being replace '/' with '_'... - #b = bld_dir.make_node(s.path_from(src_dir)) - b = bld_dir.make_node(s.path_from(src_dir).replace(os.sep,'_')) - b.parent.mkdir() - #print ('++> %s' % (s.path_from(src_dir),)) - try: - try:os.remove(b.abspath()) - except Exception:pass - os.symlink(s.abspath(), b.abspath()) - except Exception: - # if no support for symlinks, copy the file from src - b.write(s.read()) - bld_srcs.append(b) - #print("--|> [%s]" % b.abspath()) - b.sig = Utils.h_file(b.abspath()) - pass - #self.set_inputs(bld_srcs) - #self.generator.bld.raw_deps[self.uid()] = [self.signature()] + bld_srcs - makefile_node = bld_dir.make_node("Makefile") - makefile_tmpl = '''\ -# Copyright 2009 The Go Authors. All rights reserved. -# Use of this source code is governed by a BSD-style -# license that can be found in the LICENSE file. --- - -include $(GOROOT)/src/Make.inc - -TARG=%(target)s - -GCIMPORTS= %(gcimports)s - -CGOFILES=\\ -\t%(source)s - -CGO_CFLAGS= %(cgo_cflags)s - -CGO_LDFLAGS= %(cgo_ldflags)s - -include $(GOROOT)/src/Make.pkg - -%%: install %%.go - $(GC) $*.go - $(LD) -o $@ $*.$O - -''' % { -'gcimports': ' '.join(l for l in self.env['GOCFLAGS']), -'cgo_cflags' : ' '.join(l for l in self.env['GOCFLAGS']), -'cgo_ldflags': ' '.join(l for l in self.env['GOLFLAGS']), -'target': target.path_from(obj_dir), -'source': ' '.join([b.path_from(bld_dir) for b in bld_srcs]) -} - makefile_node.write(makefile_tmpl) - #print ("::makefile: %s"%makefile_node.abspath()) - cmd = Utils.subst_vars('gomake ${GOMAKE_FLAGS}', self.env).strip() - o = self.outputs[0].change_ext('.gomake.log') - fout_node = bld_dir.find_or_declare(o.name) - fout = open(fout_node.abspath(), 'w') - rc = self.generator.bld.exec_command( - cmd, - stdout=fout, - stderr=fout, - cwd=bld_dir.abspath(), - ) - if rc != 0: - import waflib.Logs as msg - msg.error('** error running [%s] (cgo-%s)' % (cmd, target)) - msg.error(fout_node.read()) - return rc - self.generator.bld.read_stlib( - target, - paths=[obj_dir.abspath(),], - ) - tgt = self.outputs[0] - if tgt.parent != obj_dir: - install_dir = os.path.join('${LIBDIR}', - tgt.parent.path_from(obj_dir)) - else: - install_dir = '${LIBDIR}' - #print('===> %s (%s)' % (tgt.abspath(), install_dir)) - self.generator.bld.install_files( - install_dir, - tgt.abspath(), - relative_trick=False, - postpone=False, - ) - return rc - -@extension('.go') -def compile_go(self, node): - #print('*'*80, self.name) - if not ('cgopackage' in self.features): - return self.create_compiled_task('go', node) - #print ('compile_go-cgo...') - bld_dir = node.parent.get_bld() - obj_dir = bld_dir.make_node('_obj') - target = obj_dir.make_node(node.change_ext('.a').name) - return self.create_task('cgopackage', node, node.change_ext('.a')) - -@feature('gopackage', 'goprogram', 'cgopackage') -@before_method('process_source') -def go_compiler_is_foobar(self): - if self.env.GONAME == 'gcc': - return - self.source = self.to_nodes(self.source) - src = [] - go = [] - for node in self.source: - if node.name.endswith('.go'): - go.append(node) - else: - src.append(node) - self.source = src - if not ('cgopackage' in self.features): - #print('--> [%s]... (%s)' % (go[0], getattr(self, 'target', 'N/A'))) - tsk = self.create_compiled_task('go', go[0]) - tsk.inputs.extend(go[1:]) - else: - #print ('+++ [%s] +++' % self.target) - bld_dir = self.path.get_bld().make_node('cgopackage--%s' % self.target.replace(os.sep,'_')) - obj_dir = bld_dir.make_node('_obj') - target = obj_dir.make_node(self.target+'.a') - tsk = self.create_task('cgopackage', go, [target, bld_dir]) - self.link_task = tsk - -@feature('gopackage', 'goprogram', 'cgopackage') -@after_method('process_source', 'apply_incpaths',) -def go_local_libs(self): - names = self.to_list(getattr(self, 'use', [])) - #print ('== go-local-libs == [%s] == use: %s' % (self.name, names)) - for name in names: - tg = self.bld.get_tgen_by_name(name) - if not tg: - raise Utils.WafError('no target of name %r necessary for %r in go uselib local' % (name, self)) - tg.post() - #print ("-- tg[%s]: %s" % (self.name,name)) - lnk_task = getattr(tg, 'link_task', None) - if lnk_task: - for tsk in self.tasks: - if isinstance(tsk, (go, gopackage, cgopackage)): - tsk.set_run_after(lnk_task) - tsk.dep_nodes.extend(lnk_task.outputs) - path = lnk_task.outputs[0].parent.abspath() - if isinstance(lnk_task, (go, gopackage)): - # handle hierarchical packages - path = lnk_task.generator.path.get_bld().abspath() - elif isinstance(lnk_task, (cgopackage,)): - # handle hierarchical cgopackages - cgo_obj_dir = lnk_task.outputs[1].find_or_declare('_obj') - path = cgo_obj_dir.abspath() - # recursively add parent GOCFLAGS... - self.env.append_unique('GOCFLAGS', - getattr(lnk_task.env, 'GOCFLAGS',[])) - # ditto for GOLFLAGS... - self.env.append_unique('GOLFLAGS', - getattr(lnk_task.env, 'GOLFLAGS',[])) - self.env.append_unique('GOCFLAGS', ['-I%s' % path]) - self.env.append_unique('GOLFLAGS', ['-L%s' % path]) - for n in getattr(tg, 'includes_nodes', []): - self.env.append_unique('GOCFLAGS', ['-I%s' % n.abspath()]) - pass - pass - -def configure(conf): - - def set_def(var, val): - if not conf.env[var]: - conf.env[var] = val - - goarch = os.getenv('GOARCH') - if goarch == '386': - set_def('GO_PLATFORM', 'i386') - elif goarch == 'amd64': - set_def('GO_PLATFORM', 'x86_64') - elif goarch == 'arm': - set_def('GO_PLATFORM', 'arm') - else: - set_def('GO_PLATFORM', platform.machine()) - - if conf.env.GO_PLATFORM == 'x86_64': - set_def('GO_COMPILER', '6g') - set_def('GO_LINKER', '6l') - elif conf.env.GO_PLATFORM in ('i386', 'i486', 'i586', 'i686'): - set_def('GO_COMPILER', '8g') - set_def('GO_LINKER', '8l') - elif conf.env.GO_PLATFORM == 'arm': - set_def('GO_COMPILER', '5g') - set_def('GO_LINKER', '5l') - set_def('GO_EXTENSION', '.5') - - if not (conf.env.GO_COMPILER or conf.env.GO_LINKER): - raise conf.fatal('Unsupported platform ' + platform.machine()) - - set_def('GO_PACK', 'gopack') - set_def('gopackage_PATTERN', '%s.a') - set_def('CPPPATH_ST', '-I%s') - - set_def('GOMAKE_FLAGS', ['--quiet']) - conf.find_program(conf.env.GO_COMPILER, var='GOC') - conf.find_program(conf.env.GO_LINKER, var='GOL') - conf.find_program(conf.env.GO_PACK, var='GOP') - - conf.find_program('cgo', var='CGO') - -TaskGen.feature('go')(process_use) -TaskGen.feature('go')(propagate_uselib_vars) - diff --git a/waflib/extras/halide.py b/waflib/extras/halide.py index 41a6b8e0b0..6078e38bde 100644 --- a/waflib/extras/halide.py +++ b/waflib/extras/halide.py @@ -89,7 +89,8 @@ def change_ext(src, ext): # Return a node with a new extension, in an appropriate folder name = src.name xpos = src.name.rfind('.') - if xpos == -1: xpos = len(src.name) + if xpos == -1: + xpos = len(src.name) newname = name[:xpos] + ext if src.is_child_of(bld.bldnode): node = src.get_src().parent.find_or_declare(newname) diff --git a/waflib/extras/haxe.py b/waflib/extras/haxe.py new file mode 100644 index 0000000000..4ff3745794 --- /dev/null +++ b/waflib/extras/haxe.py @@ -0,0 +1,154 @@ +import re + +from waflib import Utils, Task, Errors, Logs +from waflib.Configure import conf +from waflib.TaskGen import extension, taskgen_method + +HAXE_COMPILERS = { + 'JS': {'tgt': '--js', 'ext_out': ['.js']}, + 'LUA': {'tgt': '--lua', 'ext_out': ['.lua']}, + 'SWF': {'tgt': '--swf', 'ext_out': ['.swf']}, + 'NEKO': {'tgt': '--neko', 'ext_out': ['.n']}, + 'PHP': {'tgt': '--php', 'ext_out': ['.php']}, + 'CPP': {'tgt': '--cpp', 'ext_out': ['.h', '.cpp']}, + 'CPPIA': {'tgt': '--cppia', 'ext_out': ['.cppia']}, + 'CS': {'tgt': '--cs', 'ext_out': ['.cs']}, + 'JAVA': {'tgt': '--java', 'ext_out': ['.java']}, + 'JVM': {'tgt': '--jvm', 'ext_out': ['.jar']}, + 'PYTHON': {'tgt': '--python', 'ext_out': ['.py']}, + 'HL': {'tgt': '--hl', 'ext_out': ['.hl']}, + 'HLC': {'tgt': '--hl', 'ext_out': ['.h', '.c']}, +} + +@conf +def check_haxe_pkg(self, **kw): + self.find_program('haxelib') + libs = kw.get('libs') + if not libs or not (type(libs) == str or (type(libs) == list and all(isinstance(s, str) for s in libs))): + self.fatal('Specify correct libs value in ensure call') + return + fetch = kw.get('fetch') + if not fetch is None and not type(fetch) == bool: + self.fatal('Specify correct fetch value in ensure call') + + libs = [libs] if type(libs) == str else libs + halt = False + for lib in libs: + try: + self.start_msg('Checking for library %s' % lib) + output = self.cmd_and_log(self.env.HAXELIB + ['list', lib]) + except Errors.WafError: + self.end_msg(False) + self.fatal('Can\'t run haxelib list, ensuring halted') + return + + if lib in output: + self.end_msg(lib in output) + else: + if not fetch: + self.end_msg(False) + halt = True + continue + try: + status = self.exec_command(self.env.HAXELIB + ['install', lib]) + if status: + self.end_msg(False) + self.fatal('Can\'t get %s with haxelib, ensuring halted' % lib) + return + else: + self.end_msg('downloaded', color='YELLOW') + except Errors.WafError: + self.end_msg(False) + self.fatal('Can\'t run haxelib install, ensuring halted') + return + postfix = kw.get('uselib_store') or lib.upper() + self.env.append_unique('LIB_' + postfix, lib) + + if halt: + self.fatal('Can\'t find libraries in haxelib list, ensuring halted') + return + +class haxe(Task.Task): + vars = ['HAXE_VERSION', 'HAXE_FLAGS'] + ext_in = ['.hx'] + + def run(self): + cmd = self.env.HAXE + self.env.HAXE_FLAGS_DEFAULT + self.env.HAXE_FLAGS + return self.exec_command(cmd) + +for COMP in HAXE_COMPILERS: + # create runners for each compile target + type("haxe_" + COMP, (haxe,), {'ext_out': HAXE_COMPILERS[COMP]['ext_out']}) + +@taskgen_method +def init_haxe(self): + errmsg = '%s not found, specify correct value' + try: + compiler = HAXE_COMPILERS[self.compiler] + comp_tgt = compiler['tgt'] + comp_mod = '/main.c' if self.compiler == 'HLC' else '' + except (AttributeError, KeyError): + self.bld.fatal(errmsg % 'COMPILER' + ': ' + ', '.join(HAXE_COMPILERS.keys())) + return + + self.env.append_value( + 'HAXE_FLAGS', + [comp_tgt, self.path.get_bld().make_node(self.target + comp_mod).abspath()]) + if hasattr(self, 'use'): + if not (type(self.use) == str or type(self.use) == list): + self.bld.fatal(errmsg % 'USE') + return + self.use = [self.use] if type(self.use) == str else self.use + + for dep in self.use: + if self.env['LIB_' + dep]: + for lib in self.env['LIB_' + dep]: + self.env.append_value('HAXE_FLAGS', ['-lib', lib]) + + if hasattr(self, 'res'): + if not type(self.res) == str: + self.bld.fatal(errmsg % 'RES') + return + self.env.append_value('HAXE_FLAGS', ['-D', 'resourcesPath=%s' % self.res]) + +@extension('.hx') +def haxe_hook(self, node): + if len(self.source) > 1: + self.bld.fatal('Use separate task generators for multiple files') + return + + src = node + tgt = self.path.get_bld().find_or_declare(self.target) + + self.init_haxe() + self.create_task('haxe_' + self.compiler, src, tgt) + +@conf +def check_haxe(self, mini=None, maxi=None): + self.start_msg('Checking for haxe version') + try: + curr = re.search( + r'(\d+.?)+', + self.cmd_and_log(self.env.HAXE + ['-version'])).group() + except Errors.WafError: + self.end_msg(False) + self.fatal('Can\'t get haxe version') + return + + if mini and Utils.num2ver(curr) < Utils.num2ver(mini): + self.end_msg('wrong', color='RED') + self.fatal('%s is too old, need >= %s' % (curr, mini)) + return + if maxi and Utils.num2ver(curr) > Utils.num2ver(maxi): + self.end_msg('wrong', color='RED') + self.fatal('%s is too new, need <= %s' % (curr, maxi)) + return + self.end_msg(curr, color='GREEN') + self.env.HAXE_VERSION = curr + +def configure(self): + self.env.append_value( + 'HAXE_FLAGS_DEFAULT', + ['-D', 'no-compilation', '-cp', self.path.abspath()]) + Logs.warn('Default flags: %s' % ' '.join(self.env.HAXE_FLAGS_DEFAULT)) + self.find_program('haxe') diff --git a/waflib/extras/javatest.py b/waflib/extras/javatest.py new file mode 100755 index 0000000000..76d40edf25 --- /dev/null +++ b/waflib/extras/javatest.py @@ -0,0 +1,237 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2019 (fedepell) + +""" +Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest` +task via the **javatest** feature. + +This gives the possibility to run unit test and have them integrated into the +standard waf unit test environment. It has been tested with TestNG and JUnit +but should be easily expandable to other frameworks given the flexibility of +ut_str provided by the standard waf unit test environment. + +The extra takes care also of managing non-java dependencies (ie. C/C++ libraries +using JNI or Python modules via JEP) and setting up the environment needed to run +them. + +Example usage: + +def options(opt): + opt.load('java waf_unit_test javatest') + +def configure(conf): + conf.load('java javatest') + +def build(bld): + + [ ... mainprog is built here ... ] + + bld(features = 'javac javatest', + srcdir = 'test/', + outdir = 'test', + sourcepath = ['test'], + classpath = [ 'src' ], + basedir = 'test', + use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/ + ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}', + jtest_source = bld.path.ant_glob('test/*.xml'), + ) + + +At command line the CLASSPATH where to find the testing environment and the +test runner (default TestNG) that will then be seen in the environment as +CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for +dependencies and ut_str generation. + +Example configure for TestNG: + waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG + or as default runner is TestNG: + waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar + +Example configure for JUnit: + waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore + +The runner class presence on the system is checked for at configuration stage. + +""" + +import os +from waflib import Task, TaskGen, Options, Errors, Utils, Logs +from waflib.Tools import ccroot + +JAR_RE = '**/*' + +def _process_use_rec(self, name): + """ + Recursively process ``use`` for task generator with name ``name``.. + Used by javatest_process_use. + """ + if name in self.javatest_use_not or name in self.javatest_use_seen: + return + try: + tg = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.javatest_use_not.add(name) + return + + self.javatest_use_seen.append(name) + tg.post() + + for n in self.to_list(getattr(tg, 'use', [])): + _process_use_rec(self, n) + +@TaskGen.feature('javatest') +@TaskGen.after_method('process_source', 'apply_link', 'use_javac_files') +def javatest_process_use(self): + """ + Process the ``use`` attribute which contains a list of task generator names and store + paths that later is used to populate the unit test runtime environment. + """ + self.javatest_use_not = set() + self.javatest_use_seen = [] + self.javatest_libpaths = [] # strings or Nodes + self.javatest_pypaths = [] # strings or Nodes + self.javatest_dep_nodes = [] + + names = self.to_list(getattr(self, 'use', [])) + for name in names: + _process_use_rec(self, name) + + def extend_unique(lst, varlst): + ext = [] + for x in varlst: + if x not in lst: + ext.append(x) + lst.extend(ext) + + # Collect type specific info needed to construct a valid runtime environment + # for the test. + for name in self.javatest_use_seen: + tg = self.bld.get_tgen_by_name(name) + + # Python-Java embedding crosstools such as JEP + if 'py' in tg.features: + # Python dependencies are added to PYTHONPATH + pypath = getattr(tg, 'install_from', tg.path) + + if 'buildcopy' in tg.features: + # Since buildcopy is used we assume that PYTHONPATH in build should be used, + # not source + extend_unique(self.javatest_pypaths, [pypath.get_bld().abspath()]) + + # Add buildcopy output nodes to dependencies + extend_unique(self.javatest_dep_nodes, [o for task in getattr(tg, 'tasks', []) for o in getattr(task, 'outputs', [])]) + else: + # If buildcopy is not used, depend on sources instead + extend_unique(self.javatest_dep_nodes, tg.source) + extend_unique(self.javatest_pypaths, [pypath.abspath()]) + + + if getattr(tg, 'link_task', None): + # For tasks with a link_task (C, C++, D et.c.) include their library paths: + if not isinstance(tg.link_task, ccroot.stlink_task): + extend_unique(self.javatest_dep_nodes, tg.link_task.outputs) + extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH) + + if 'pyext' in tg.features: + # If the taskgen is extending Python we also want to add the interpreter libpath. + extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH_PYEXT) + else: + # Only add to libpath if the link task is not a Python extension + extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()]) + + if 'javac' in tg.features or 'jar' in tg.features: + if hasattr(tg, 'jar_task'): + # For Java JAR tasks depend on generated JAR + extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs) + else: + # For Java non-JAR ones we need to glob generated files (Java output files are not predictable) + if hasattr(tg, 'outdir'): + base_node = tg.outdir + else: + base_node = tg.path.get_bld() + + self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)]) + + + +@TaskGen.feature('javatest') +@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use') +def make_javatest(self): + """ + Creates a ``utest`` task with a populated environment for Java Unit test execution + + """ + tsk = self.create_task('utest') + tsk.set_run_after(self.javac_task) + + # Dependencies from recursive use analysis + tsk.dep_nodes.extend(self.javatest_dep_nodes) + + # Put test input files as waf_unit_test relies on that for some prints and log generation + # If jtest_source is there, this is specially useful for passing XML for TestNG + # that contain test specification, use that as inputs, otherwise test sources + if getattr(self, 'jtest_source', None): + tsk.inputs = self.to_nodes(self.jtest_source) + else: + if self.javac_task.srcdir[0].exists(): + tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False) + + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = lst + tsk.vars + + if getattr(self, 'ut_cwd', None): + if isinstance(self.ut_cwd, str): + # we want a Node instance + if os.path.isabs(self.ut_cwd): + self.ut_cwd = self.bld.root.make_node(self.ut_cwd) + else: + self.ut_cwd = self.path.make_node(self.ut_cwd) + else: + self.ut_cwd = self.bld.bldnode + + # Get parent CLASSPATH and add output dir of test, we run from wscript dir + # We have to change it from list to the standard java -cp format (: separated) + tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath() + + if not self.ut_cwd.exists(): + self.ut_cwd.mkdir() + + if not hasattr(self, 'ut_env'): + self.ut_env = dict(os.environ) + def add_paths(var, lst): + # Add list of paths to a variable, lst can contain strings or nodes + lst = [ str(n) for n in lst ] + Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst) + self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '') + + add_paths('PYTHONPATH', self.javatest_pypaths) + + if Utils.is_win32: + add_paths('PATH', self.javatest_libpaths) + elif Utils.unversioned_sys_platform() == 'darwin': + add_paths('DYLD_LIBRARY_PATH', self.javatest_libpaths) + add_paths('LD_LIBRARY_PATH', self.javatest_libpaths) + else: + add_paths('LD_LIBRARY_PATH', self.javatest_libpaths) + +def configure(ctx): + cp = ctx.env.CLASSPATH or '.' + if getattr(Options.options, 'jtpath', None): + ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':') + cp += ':' + getattr(Options.options, 'jtpath') + + if getattr(Options.options, 'jtrunner', None): + ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner') + + if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp): + ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER) + +def options(opt): + opt.add_option('--jtpath', action='store', default='', dest='jtpath', + help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH') + opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner', + help='Class to run javatest test [default: org.testng.TestNG]') + diff --git a/waflib/Tools/kde4.py b/waflib/extras/kde4.py similarity index 83% rename from waflib/Tools/kde4.py rename to waflib/extras/kde4.py index 36e40aeb0e..aed9bfb557 100644 --- a/waflib/Tools/kde4.py +++ b/waflib/extras/kde4.py @@ -6,9 +6,9 @@ Support for the KDE4 libraries and msgfmt """ -import os, sys, re -from waflib import Options, TaskGen, Task, Utils -from waflib.TaskGen import feature, after_method +import os, re +from waflib import Task, Utils +from waflib.TaskGen import feature @feature('msgfmt') def apply_msgfmt(self): @@ -27,9 +27,9 @@ def build(bld): inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}') - self.bld.install_as( - inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo', - task.outputs[0], + self.add_install_as( + inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo', + inst_from = task.outputs[0], chmod = getattr(self, 'chmod', Utils.O644)) class msgfmt(Task.Task): @@ -53,11 +53,14 @@ def build(bld): kdeconfig = self.find_program('kde4-config') prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip() fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix - try: os.stat(fname) + try: + os.stat(fname) except OSError: fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix - try: os.stat(fname) - except OSError: self.fatal('could not open %s' % fname) + try: + os.stat(fname) + except OSError: + self.fatal('could not open %s' % fname) try: txt = Utils.readf(fname) @@ -68,7 +71,7 @@ def build(bld): fu = re.compile('#(.*)\n') txt = fu.sub('', txt) - setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + setregexp = re.compile(r'([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') found = setregexp.findall(txt) for (_, key, val) in found: diff --git a/waflib/extras/local_rpath.py b/waflib/extras/local_rpath.py index b2507e17a1..e3923d9b9d 100644 --- a/waflib/extras/local_rpath.py +++ b/waflib/extras/local_rpath.py @@ -2,18 +2,20 @@ # encoding: utf-8 # Thomas Nagy, 2011 (ita) +import copy from waflib.TaskGen import after_method, feature @after_method('propagate_uselib_vars') @feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib') def add_rpath_stuff(self): - all = self.to_list(getattr(self, 'use', [])) + all = copy.copy(self.to_list(getattr(self, 'use', []))) while all: name = all.pop() try: tg = self.bld.get_tgen_by_name(name) except: continue - self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath()) - all.extend(self.to_list(getattr(tg, 'use', []))) + if hasattr(tg, 'link_task'): + self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath()) + all.extend(self.to_list(getattr(tg, 'use', []))) diff --git a/waflib/extras/lru_cache.py b/waflib/extras/lru_cache.py deleted file mode 100644 index 2bc964426b..0000000000 --- a/waflib/extras/lru_cache.py +++ /dev/null @@ -1,98 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy 2011 - -import os, shutil, re -from waflib import Options, Build, Logs - -""" -Apply a least recently used policy to the Waf cache. - -For performance reasons, it is called after the build is complete. - -We assume that the the folders are written atomically - -Do export WAFCACHE=/tmp/foo_xyz where xyz represents the cache size in bytes -If missing, the default cache size will be set to 10GB -""" - -re_num = re.compile('[a-zA-Z_-]+(\d+)') - -CACHESIZE = 10*1024*1024*1024 # in bytes -CLEANRATIO = 0.8 -DIRSIZE = 4096 - -def compile(self): - if Options.cache_global and not Options.options.nocache: - try: - os.makedirs(Options.cache_global) - except: - pass - - try: - self.raw_compile() - finally: - if Options.cache_global and not Options.options.nocache: - self.sweep() - -def sweep(self): - global CACHESIZE - CACHEDIR = Options.cache_global - - # get the cache max size from the WAFCACHE filename - re_num = re.compile('[a-zA-Z_]+(\d+)') - val = re_num.sub('\\1', os.path.basename(Options.cache_global)) - try: - CACHESIZE = int(val) - except: - pass - - # map folder names to timestamps - flist = {} - for x in os.listdir(CACHEDIR): - j = os.path.join(CACHEDIR, x) - if os.path.isdir(j) and len(x) == 64: # dir names are md5 hexdigests - flist[x] = [os.stat(j).st_mtime, 0] - - for (x, v) in flist.items(): - cnt = DIRSIZE # each entry takes 4kB - d = os.path.join(CACHEDIR, x) - for k in os.listdir(d): - cnt += os.stat(os.path.join(d, k)).st_size - flist[x][1] = cnt - - total = sum([x[1] for x in flist.values()]) - Logs.debug('lru: Cache size is %r' % total) - - if total >= CACHESIZE: - Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE)) - - # make a list to sort the folders by timestamp - lst = [(p, v[0], v[1]) for (p, v) in flist.items()] - lst.sort(key=lambda x: x[1]) # sort by timestamp - lst.reverse() - - while total >= CACHESIZE * CLEANRATIO: - (k, t, s) = lst.pop() - p = os.path.join(CACHEDIR, k) - v = p + '.del' - try: - os.rename(p, v) - except: - # someone already did it - pass - else: - try: - shutil.rmtree(v) - except: - # this should not happen, but who knows? - Logs.warn('If you ever see this message, report it (%r)' % v) - total -= s - del flist[k] - - Logs.debug('lru: Total at the end %r' % total) - -Build.BuildContext.raw_compile = Build.BuildContext.compile -Build.BuildContext.compile = compile -Build.BuildContext.sweep = sweep - diff --git a/waflib/extras/make.py b/waflib/extras/make.py index 57e841d9c6..933d9cacb2 100644 --- a/waflib/extras/make.py +++ b/waflib/extras/make.py @@ -15,7 +15,7 @@ """ import re -from waflib import Options, Task, Logs +from waflib import Options, Task from waflib.Build import BuildContext class MakeContext(BuildContext): @@ -48,7 +48,7 @@ def get_build_iterator(self): for pat in self.files.split(','): matcher = self.get_matcher(pat) for tg in g: - if isinstance(tg, Task.TaskBase): + if isinstance(tg, Task.Task): lst = [tg] else: lst = tg.tasks @@ -56,7 +56,7 @@ def get_build_iterator(self): all_tasks.append(tsk) do_exec = False - for node in getattr(tsk, 'inputs', []): + for node in tsk.inputs: try: uses[node].append(tsk) except: @@ -66,7 +66,7 @@ def get_build_iterator(self): do_exec = True break - for node in getattr(tsk, 'outputs', []): + for node in tsk.outputs: try: provides[node].append(tsk) except: @@ -86,14 +86,14 @@ def get_build_iterator(self): result = all_tasks else: # this is like a big filter... - result = set([]) - seen = set([]) + result = set() + seen = set() cur = set(tasks) while cur: result |= cur - tosee = set([]) + tosee = set() for tsk in cur: - for node in getattr(tsk, 'inputs', []): + for node in tsk.inputs: if node in seen: continue seen.add(node) @@ -129,9 +129,9 @@ def get_matcher(self, pat): pattern = re.compile(pat) def match(node, output): - if output == True and not out: + if output and not out: return False - if output == False and not inn: + if not output and not inn: return False if anode: diff --git a/waflib/extras/md5_tstamp.py b/waflib/extras/md5_tstamp.py deleted file mode 100644 index af6f1c8a2d..0000000000 --- a/waflib/extras/md5_tstamp.py +++ /dev/null @@ -1,70 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -This module assumes that only one build context is running at a given time, which -is not the case if you want to execute configuration tests in parallel. - -Store some values on the buildcontext mapping file paths to -stat values and md5 values (timestamp + md5) -this way the md5 hashes are computed only when timestamp change (can be faster) -There is usually little or no gain from enabling this, but it can be used to enable -the second level cache with timestamps (WAFCACHE) - -You may have to run distclean or to remove the build directory before enabling/disabling -this hashing scheme -""" - -import os, stat -try: import cPickle -except: import pickle as cPickle -from waflib import Utils, Build, Context - -STRONGEST = True - -try: - Build.BuildContext.store_real -except AttributeError: - - Context.DBFILE += '_md5tstamp' - - Build.hashes_md5_tstamp = {} - Build.SAVED_ATTRS.append('hashes_md5_tstamp') - def store(self): - # save the hash cache as part of the default pickle file - self.hashes_md5_tstamp = Build.hashes_md5_tstamp - self.store_real() - Build.BuildContext.store_real = Build.BuildContext.store - Build.BuildContext.store = store - - def restore(self): - # we need a module variable for h_file below - self.restore_real() - try: - Build.hashes_md5_tstamp = self.hashes_md5_tstamp or {} - except Exception as e: - Build.hashes_md5_tstamp = {} - Build.BuildContext.restore_real = Build.BuildContext.restore - Build.BuildContext.restore = restore - - def h_file(filename): - st = os.stat(filename) - if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file') - - if filename in Build.hashes_md5_tstamp: - if Build.hashes_md5_tstamp[filename][0] == str(st.st_mtime): - return Build.hashes_md5_tstamp[filename][1] - if STRONGEST: - ret = Utils.h_file_no_md5(filename) - Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), ret) - return ret - else: - m = Utils.md5() - m.update(str(st.st_mtime)) - m.update(str(st.st_size)) - m.update(filename) - Build.hashes_md5_tstamp[filename] = (str(st.st_mtime), m.digest()) - return m.digest() - Utils.h_file_no_md5 = Utils.h_file - Utils.h_file = h_file - diff --git a/waflib/extras/mem_reducer.py b/waflib/extras/mem_reducer.py deleted file mode 100644 index 437126687f..0000000000 --- a/waflib/extras/mem_reducer.py +++ /dev/null @@ -1,111 +0,0 @@ -#! /usr/bin/env python -# encoding: UTF-8 - -""" -This tool can help to reduce the memory usage in very large builds featuring many tasks with after/before attributes. -It may also improve the overall build time by decreasing the amount of iterations over tasks. - -Usage: -def options(opt): - opt.load('mem_reducer') -""" - -import itertools -from waflib import Utils, Task, Runner - -class SetOfTasks(object): - """Wraps a set and a task which has a list of other sets. - The interface is meant to mimic the interface of set. Add missing functions as needed. - """ - def __init__(self, owner): - self._set = owner.run_after - self._owner = owner - - def __iter__(self): - for g in self._owner.run_after_groups: - #print len(g) - for task in g: - yield task - for task in self._set: - yield task - - def add(self, obj): - self._set.add(obj) - - def update(self, obj): - self._set.update(obj) - -def set_precedence_constraints(tasks): - cstr_groups = Utils.defaultdict(list) - for x in tasks: - x.run_after = SetOfTasks(x) - x.run_after_groups = [] - x.waiting_sets = [] - - h = x.hash_constraints() - cstr_groups[h].append(x) - - # create sets which can be reused for all tasks - for k in cstr_groups.keys(): - cstr_groups[k] = set(cstr_groups[k]) - - # this list should be short - for key1, key2 in itertools.combinations(cstr_groups.keys(), 2): - group1 = cstr_groups[key1] - group2 = cstr_groups[key2] - # get the first entry of the set - t1 = next(iter(group1)) - t2 = next(iter(group2)) - - # add the constraints based on the comparisons - if Task.is_before(t1, t2): - for x in group2: - x.run_after_groups.append(group1) - for k in group1: - k.waiting_sets.append(group1) - elif Task.is_before(t2, t1): - for x in group1: - x.run_after_groups.append(group2) - for k in group2: - k.waiting_sets.append(group2) - -Task.set_precedence_constraints = set_precedence_constraints - -def get_out(self): - tsk = self.out.get() - if not self.stop: - self.add_more_tasks(tsk) - self.count -= 1 - self.dirty = True - - # shrinking sets - try: - ws = tsk.waiting_sets - except AttributeError: - pass - else: - for k in ws: - try: - k.remove(tsk) - except KeyError: - pass - - return tsk -Runner.Parallel.get_out = get_out - -def skip(self, tsk): - tsk.hasrun = Task.SKIPPED - - # shrinking sets - try: - ws = tsk.waiting_sets - except AttributeError: - pass - else: - for k in ws: - try: - k.remove(tsk) - except KeyError: - pass -Runner.Parallel.skip = skip - diff --git a/waflib/extras/midl.py b/waflib/extras/midl.py index 78a1c8ae96..43e6cf911f 100644 --- a/waflib/extras/midl.py +++ b/waflib/extras/midl.py @@ -55,7 +55,7 @@ def idl_file(self): c = node.change_ext('_i.c') p = node.change_ext('_p.c') d = node.parent.find_or_declare('dlldata.c') - tsk = self.create_task('midl', node, [t, h, c, p, d]) + self.create_task('midl', node, [t, h, c, p, d]) self.source = src_nodes @@ -67,19 +67,3 @@ class midl(Task.Task): run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}' before = ['winrc'] - def exec_command(self, *k, **kw): - if self.env['PATH']: - env = self.env.env or dict(os.environ) - env.update(PATH = ';'.join(self.env['PATH'])) - kw['env'] = env - - bld = self.generator.bld - - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir - - return bld.exec_command(k[0], **kw) - diff --git a/waflib/extras/misc.py b/waflib/extras/misc.py deleted file mode 100644 index 61f98a58bf..0000000000 --- a/waflib/extras/misc.py +++ /dev/null @@ -1,413 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2010 (ita) - -""" -This tool is totally deprecated - -Try using: - .pc.in files for .pc files - the feature intltool_in - see demos/intltool - make-like rules -""" - -import shutil, re, os -from waflib import TaskGen, Node, Task, Utils, Build, Errors -from waflib.TaskGen import feature, after_method, before_method -from waflib.Logs import debug - -def copy_attrs(orig, dest, names, only_if_set=False): - """ - copy class attributes from an object to another - """ - for a in Utils.to_list(names): - u = getattr(orig, a, ()) - if u or not only_if_set: - setattr(dest, a, u) - -def copy_func(tsk): - "Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)" - env = tsk.env - infile = tsk.inputs[0].abspath() - outfile = tsk.outputs[0].abspath() - try: - shutil.copy2(infile, outfile) - except EnvironmentError: - return 1 - else: - if tsk.chmod: os.chmod(outfile, tsk.chmod) - return 0 - -def action_process_file_func(tsk): - "Ask the function attached to the task to process it" - if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!') - return tsk.fun(tsk) - -@feature('cmd') -def apply_cmd(self): - "call a command everytime" - if not self.fun: raise Errors.WafError('cmdobj needs a function!') - tsk = Task.TaskBase() - tsk.fun = self.fun - tsk.env = self.env - self.tasks.append(tsk) - tsk.install_path = self.install_path - -@feature('copy') -@before_method('process_source') -def apply_copy(self): - Utils.def_attrs(self, fun=copy_func) - self.default_install_path = 0 - - lst = self.to_list(self.source) - self.meths.remove('process_source') - - for filename in lst: - node = self.path.find_resource(filename) - if not node: raise Errors.WafError('cannot find input file %s for processing' % filename) - - target = self.target - if not target or len(lst)>1: target = node.name - - # TODO the file path may be incorrect - newnode = self.path.find_or_declare(target) - - tsk = self.create_task('copy', node, newnode) - tsk.fun = self.fun - tsk.chmod = getattr(self, 'chmod', Utils.O644) - - if not tsk.env: - tsk.debug() - raise Errors.WafError('task without an environment') - -def subst_func(tsk): - "Substitutes variables in a .in file" - - m4_re = re.compile('@(\w+)@', re.M) - - code = tsk.inputs[0].read() #Utils.readf(infile) - - # replace all % by %% to prevent errors by % signs in the input file while string formatting - code = code.replace('%', '%%') - - s = m4_re.sub(r'%(\1)s', code) - - env = tsk.env - di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {}) - if not di: - names = m4_re.findall(code) - for i in names: - di[i] = env.get_flat(i) or env.get_flat(i.upper()) - - tsk.outputs[0].write(s % di) - -@feature('subst') -@before_method('process_source') -def apply_subst(self): - Utils.def_attrs(self, fun=subst_func) - lst = self.to_list(self.source) - self.meths.remove('process_source') - - self.dict = getattr(self, 'dict', {}) - - for filename in lst: - node = self.path.find_resource(filename) - if not node: raise Errors.WafError('cannot find input file %s for processing' % filename) - - if self.target: - newnode = self.path.find_or_declare(self.target) - else: - newnode = node.change_ext('') - - try: - self.dict = self.dict.get_merged_dict() - except AttributeError: - pass - - if self.dict and not self.env['DICT_HASH']: - self.env = self.env.derive() - keys = list(self.dict.keys()) - keys.sort() - lst = [self.dict[x] for x in keys] - self.env['DICT_HASH'] = str(Utils.h_list(lst)) - - tsk = self.create_task('copy', node, newnode) - tsk.fun = self.fun - tsk.dict = self.dict - tsk.dep_vars = ['DICT_HASH'] - tsk.chmod = getattr(self, 'chmod', Utils.O644) - - if not tsk.env: - tsk.debug() - raise Errors.WafError('task without an environment') - -#################### -## command-output #### -#################### - -class cmd_arg(object): - """command-output arguments for representing files or folders""" - def __init__(self, name, template='%s'): - self.name = name - self.template = template - self.node = None - -class input_file(cmd_arg): - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_resource(self.name) - if self.node is None: - raise Errors.WafError("Input file %s not found in " % (self.name, base_path)) - - def get_path(self, env, absolute): - if absolute: - return self.template % self.node.abspath() - else: - return self.template % self.node.srcpath() - -class output_file(cmd_arg): - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_or_declare(self.name) - if self.node is None: - raise Errors.WafError("Output file %s not found in " % (self.name, base_path)) - - def get_path(self, env, absolute): - if absolute: - return self.template % self.node.abspath() - else: - return self.template % self.node.bldpath() - -class cmd_dir_arg(cmd_arg): - def find_node(self, base_path): - assert isinstance(base_path, Node.Node) - self.node = base_path.find_dir(self.name) - if self.node is None: - raise Errors.WafError("Directory %s not found in " % (self.name, base_path)) - -class input_dir(cmd_dir_arg): - def get_path(self, dummy_env, dummy_absolute): - return self.template % self.node.abspath() - -class output_dir(cmd_dir_arg): - def get_path(self, env, dummy_absolute): - return self.template % self.node.abspath() - - -class command_output(Task.Task): - color = "BLUE" - def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr): - Task.Task.__init__(self, env=env) - assert isinstance(command, (str, Node.Node)) - self.command = command - self.command_args = command_args - self.stdin = stdin - self.stdout = stdout - self.cwd = cwd - self.os_env = os_env - self.stderr = stderr - - if command_node is not None: self.dep_nodes = [command_node] - self.dep_vars = [] # additional environment variables to look - - def run(self): - task = self - #assert len(task.inputs) > 0 - - def input_path(node, template): - if task.cwd is None: - return template % node.bldpath() - else: - return template % node.abspath() - def output_path(node, template): - fun = node.abspath - if task.cwd is None: fun = node.bldpath - return template % fun() - - if isinstance(task.command, Node.Node): - argv = [input_path(task.command, '%s')] - else: - argv = [task.command] - - for arg in task.command_args: - if isinstance(arg, str): - argv.append(arg) - else: - assert isinstance(arg, cmd_arg) - argv.append(arg.get_path(task.env, (task.cwd is not None))) - - if task.stdin: - stdin = open(input_path(task.stdin, '%s')) - else: - stdin = None - - if task.stdout: - stdout = open(output_path(task.stdout, '%s'), "w") - else: - stdout = None - - if task.stderr: - stderr = open(output_path(task.stderr, '%s'), "w") - else: - stderr = None - - if task.cwd is None: - cwd = ('None (actually %r)' % os.getcwd()) - else: - cwd = repr(task.cwd) - debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" % - (cwd, stdin, stdout, argv)) - - if task.os_env is None: - os_env = os.environ - else: - os_env = task.os_env - command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env) - return command.wait() - -@feature('command-output') -def init_cmd_output(self): - Utils.def_attrs(self, - stdin = None, - stdout = None, - stderr = None, - # the command to execute - command = None, - - # whether it is an external command; otherwise it is assumed - # to be an executable binary or script that lives in the - # source or build tree. - command_is_external = False, - - # extra parameters (argv) to pass to the command (excluding - # the command itself) - argv = [], - - # dependencies to other objects -> this is probably not what you want (ita) - # values must be 'task_gen' instances (not names!) - dependencies = [], - - # dependencies on env variable contents - dep_vars = [], - - # input files that are implicit, i.e. they are not - # stdin, nor are they mentioned explicitly in argv - hidden_inputs = [], - - # output files that are implicit, i.e. they are not - # stdout, nor are they mentioned explicitly in argv - hidden_outputs = [], - - # change the subprocess to this cwd (must use obj.input_dir() or output_dir() here) - cwd = None, - - # OS environment variables to pass to the subprocess - # if None, use the default environment variables unchanged - os_env = None) - -@feature('command-output') -@after_method('init_cmd_output') -def apply_cmd_output(self): - if self.command is None: - raise Errors.WafError("command-output missing command") - if self.command_is_external: - cmd = self.command - cmd_node = None - else: - cmd_node = self.path.find_resource(self.command) - assert cmd_node is not None, ('''Could not find command '%s' in source tree. -Hint: if this is an external command, -use command_is_external=True''') % (self.command,) - cmd = cmd_node - - if self.cwd is None: - cwd = None - - args = [] - inputs = [] - outputs = [] - - for arg in self.argv: - if isinstance(arg, cmd_arg): - arg.find_node(self.path) - if isinstance(arg, input_file): - inputs.append(arg.node) - if isinstance(arg, output_file): - outputs.append(arg.node) - - if self.stdout is None: - stdout = None - else: - assert isinstance(self.stdout, str) - stdout = self.path.find_or_declare(self.stdout) - if stdout is None: - raise Errors.WafError("File %s not found" % (self.stdout,)) - outputs.append(stdout) - - if self.stderr is None: - stderr = None - else: - assert isinstance(self.stderr, str) - stderr = self.path.find_or_declare(self.stderr) - if stderr is None: - raise Errors.WafError("File %s not found" % (self.stderr,)) - outputs.append(stderr) - - if self.stdin is None: - stdin = None - else: - assert isinstance(self.stdin, str) - stdin = self.path.find_resource(self.stdin) - if stdin is None: - raise Errors.WafError("File %s not found" % (self.stdin,)) - inputs.append(stdin) - - for hidden_input in self.to_list(self.hidden_inputs): - node = self.path.find_resource(hidden_input) - if node is None: - raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path)) - inputs.append(node) - - for hidden_output in self.to_list(self.hidden_outputs): - node = self.path.find_or_declare(hidden_output) - if node is None: - raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path)) - outputs.append(node) - - if not (inputs or getattr(self, 'no_inputs', None)): - raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs') - if not (outputs or getattr(self, 'no_outputs', None)): - raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs') - - cwd = self.bld.variant_dir - task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr) - task.generator = self - copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True) - self.tasks.append(task) - - task.inputs = inputs - task.outputs = outputs - task.dep_vars = self.to_list(self.dep_vars) - - for dep in self.dependencies: - assert dep is not self - dep.post() - for dep_task in dep.tasks: - task.set_run_after(dep_task) - - if not task.inputs: - # the case for svnversion, always run, and update the output nodes - task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run - task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__) - - # TODO the case with no outputs? - -def post_run(self): - for x in self.outputs: - x.sig = Utils.h_file(x.abspath()) - -def runnable_status(self): - return self.RUN_ME - -Task.task_factory('copy', vars=[], func=action_process_file_func) - diff --git a/waflib/extras/msvc_pdb.py b/waflib/extras/msvc_pdb.py new file mode 100644 index 0000000000..077656b4f7 --- /dev/null +++ b/waflib/extras/msvc_pdb.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Rafaël Kooi 2019 + +from waflib import TaskGen + +@TaskGen.feature('c', 'cxx', 'fc') +@TaskGen.after_method('propagate_uselib_vars') +def add_pdb_per_object(self): + """For msvc/fortran, specify a unique compile pdb per object, to work + around LNK4099. Flags are updated with a unique /Fd flag based on the + task output name. This is separate from the link pdb. + """ + if not hasattr(self, 'compiled_tasks'): + return + + link_task = getattr(self, 'link_task', None) + + for task in self.compiled_tasks: + if task.inputs and task.inputs[0].name.lower().endswith('.rc'): + continue + + add_pdb = False + for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'): + # several languages may be used at once + for flag in task.env[flagname]: + if flag[1:].lower() == 'zi': + add_pdb = True + break + + if add_pdb: + node = task.outputs[0].change_ext('.pdb') + pdb_flag = '/Fd:' + node.abspath() + + for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'): + buf = [pdb_flag] + for flag in task.env[flagname]: + if flag[1:3] == 'Fd' or flag[1:].lower() == 'fs' or flag[1:].lower() == 'mp': + continue + buf.append(flag) + task.env[flagname] = buf + + if link_task and not node in link_task.dep_nodes: + link_task.dep_nodes.append(node) + if not node in task.outputs: + task.outputs.append(node) diff --git a/waflib/extras/msvcdeps.py b/waflib/extras/msvcdeps.py index 98b06776d0..263596a24f 100644 --- a/waflib/extras/msvcdeps.py +++ b/waflib/extras/msvcdeps.py @@ -14,19 +14,24 @@ The technique of gutting scan() and pushing the dependency calculation down to post_run() is cribbed from gccdeps.py. + +This affects the cxx class, so make sure to load Qt5 after this tool. + +Usage:: + + def options(opt): + opt.load('compiler_cxx') + def configure(conf): + conf.load('compiler_cxx msvcdeps') ''' -import os -import sys -import tempfile -import threading +import os, sys, tempfile, threading from waflib import Context, Errors, Logs, Task, Utils from waflib.Tools import c_preproc, c, cxx, msvc from waflib.TaskGen import feature, before_method lock = threading.Lock() -nodes = {} # Cache the path -> Node lookup PREPROCESSOR_FLAG = '/showIncludes' INCLUDE_PATTERN = 'Note: including file:' @@ -37,226 +42,255 @@ @feature('c', 'cxx') @before_method('process_source') def apply_msvcdeps_flags(taskgen): - if taskgen.env.CC_NAME not in supported_compilers: - return + if taskgen.env.CC_NAME not in supported_compilers: + return + + for flag in ('CFLAGS', 'CXXFLAGS'): + if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0: + taskgen.env.append_value(flag, PREPROCESSOR_FLAG) + + +def get_correct_path_case(base_path, path): + ''' + Return a case-corrected version of ``path`` by searching the filesystem for + ``path``, relative to ``base_path``, using the case returned by the filesystem. + ''' + components = Utils.split_path(path) + + corrected_path = '' + if os.path.isabs(path): + corrected_path = components.pop(0).upper() + os.sep - for flag in ('CFLAGS', 'CXXFLAGS'): - if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0: - taskgen.env.append_value(flag, PREPROCESSOR_FLAG) + for part in components: + part = part.lower() + search_path = os.path.join(base_path, corrected_path) + if part == '..': + corrected_path = os.path.join(corrected_path, part) + search_path = os.path.normpath(search_path) + continue + + for item in sorted(os.listdir(search_path)): + if item.lower() == part: + corrected_path = os.path.join(corrected_path, item) + break + else: + raise ValueError("Can't find %r in %r" % (part, search_path)) + + return corrected_path - # Figure out what casing conventions the user's shell used when - # launching Waf - (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath()) - taskgen.msvcdeps_drive_lowercase = drive == drive.lower() def path_to_node(base_node, path, cached_nodes): - # Take the base node and the path and return a node - # Results are cached because searching the node tree is expensive - # The following code is executed by threads, it is not safe, so a lock is needed... - if getattr(path, '__hash__'): - node_lookup_key = (base_node, path) - else: - # Not hashable, assume it is a list and join into a string - node_lookup_key = (base_node, os.path.sep.join(path)) - try: - lock.acquire() - node = cached_nodes[node_lookup_key] - except KeyError: - node = base_node.find_resource(path) - cached_nodes[node_lookup_key] = node - finally: - lock.release() - return node + ''' + Take the base node and the path and return a node + Results are cached because searching the node tree is expensive + The following code is executed by threads, it is not safe, so a lock is needed... + ''' + # normalize the path to remove parent path components (..) + path = os.path.normpath(path) + + # normalize the path case to increase likelihood of a cache hit + node_lookup_key = (base_node, os.path.normcase(path)) + + try: + node = cached_nodes[node_lookup_key] + except KeyError: + # retry with lock on cache miss + with lock: + try: + node = cached_nodes[node_lookup_key] + except KeyError: + path = get_correct_path_case(base_node.abspath(), path) + node = cached_nodes[node_lookup_key] = base_node.find_node(path) + + return node + +def post_run(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).post_run() + + # TODO this is unlikely to work with netcache + if getattr(self, 'cached', None): + return Task.Task.post_run(self) + + resolved_nodes = [] + unresolved_names = [] + bld = self.generator.bld + + # Dynamically bind to the cache + try: + cached_nodes = bld.cached_nodes + except AttributeError: + cached_nodes = bld.cached_nodes = {} + + for path in self.msvcdeps_paths: + node = None + if os.path.isabs(path): + node = path_to_node(bld.root, path, cached_nodes) + else: + # when calling find_resource, make sure the path does not begin with '..' + base_node = bld.bldnode + path = [k for k in Utils.split_path(path) if k and k != '.'] + while path[0] == '..': + path.pop(0) + base_node = base_node.parent + path = os.sep.join(path) + + node = path_to_node(base_node, path, cached_nodes) + + if not node: + raise ValueError('could not find %r for %r' % (path, self)) + else: + if not c_preproc.go_absolute: + if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): + # System library + Logs.debug('msvcdeps: Ignoring system include %r', node) + continue + + if id(node) == id(self.inputs[0]): + # ignore the source file, it is already in the dependencies + # this way, successful config tests may be retrieved from the cache + continue + + resolved_nodes.append(node) + + Logs.debug('deps: msvcdeps for %s returned %s', self, resolved_nodes) + + bld.node_deps[self.uid()] = resolved_nodes + bld.raw_deps[self.uid()] = unresolved_names + + try: + del self.cache_sig + except AttributeError: + pass + + Task.Task.post_run(self) + +def scan(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).scan() + + resolved_nodes = self.generator.bld.node_deps.get(self.uid(), []) + unresolved_names = [] + return (resolved_nodes, unresolved_names) + +def sig_implicit_deps(self): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).sig_implicit_deps() + bld = self.generator.bld + + try: + return self.compute_sig_implicit_deps() + except Errors.TaskNotReady: + raise ValueError("Please specify the build order precisely with msvcdeps (c/c++ tasks)") + except EnvironmentError: + # If a file is renamed, assume the dependencies are stale and must be recalculated + for x in bld.node_deps.get(self.uid(), []): + if not x.is_bld() and not x.exists(): + try: + del x.parent.children[x.name] + except KeyError: + pass + + key = self.uid() + bld.node_deps[key] = [] + bld.raw_deps[key] = [] + return Utils.SIG_NIL + +def exec_command(self, cmd, **kw): + if self.env.CC_NAME not in supported_compilers: + return super(self.derived_msvcdeps, self).exec_command(cmd, **kw) + + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() + + if self.env.PATH: + env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) + env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH) + + # The Visual Studio IDE adds an environment variable that causes + # the MS compiler to send its textual output directly to the + # debugging window rather than normal stdout/stderr. + # + # This is unrecoverably bad for this tool because it will cause + # all the dependency scanning to see an empty stdout stream and + # assume that the file being compiled uses no headers. + # + # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx + # + # Attempting to repair the situation by deleting the offending + # envvar at this point in tool execution will not be good enough-- + # its presence poisons the 'waf configure' step earlier. We just + # want to put a sanity check here in order to help developers + # quickly diagnose the issue if an otherwise-good Waf tree + # is then executed inside the MSVS IDE. + assert 'VS_UNICODE_OUTPUT' not in kw['env'] + + cmd, args = self.split_argfile(cmd) + try: + (fd, tmp) = tempfile.mkstemp() + os.write(fd, '\r\n'.join(args).encode()) + os.close(fd) + + self.msvcdeps_paths = [] + kw['env'] = kw.get('env', os.environ.copy()) + kw['cwd'] = kw.get('cwd', os.getcwd()) + kw['quiet'] = Context.STDOUT + kw['output'] = Context.STDOUT + + out = [] + if Logs.verbose: + Logs.debug('argfile: @%r -> %r', tmp, args) + try: + raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw) + ret = 0 + except Errors.WafError as e: + # Use e.msg if e.stdout is not set + raw_out = getattr(e, 'stdout', e.msg) + + # Return non-zero error code even if we didn't + # get one from the exception object + ret = getattr(e, 'returncode', 1) + + Logs.debug('msvcdeps: Running for: %s' % self.inputs[0]) + for line in raw_out.splitlines(): + if line.startswith(INCLUDE_PATTERN): + # Only strip whitespace after log to preserve + # dependency structure in debug output + inc_path = line[len(INCLUDE_PATTERN):] + Logs.debug('msvcdeps: Regex matched %s', inc_path) + self.msvcdeps_paths.append(inc_path.strip()) + else: + out.append(line) + + # Pipe through the remaining stdout content (not related to /showIncludes) + if self.generator.bld.logger: + self.generator.bld.logger.debug('out: %s' % os.linesep.join(out)) + elif len(out) > 1: + # msvc will output the input file name by default, which is not useful + # in the single-file case as waf will already print task. For multi-file + # inputs or other messages, allow the full message to be forwarded. + Logs.info(os.linesep.join(out), extra={'stream':sys.stdout, 'c1': ''}) + + return ret + finally: + try: + os.remove(tmp) + except OSError: + # anti-virus and indexers can keep files open -_- + pass + -''' -Register a task subclass that has hooks for running our custom -dependency calculations rather than the C/C++ stock c_preproc -method. -''' def wrap_compiled_task(classname): - derived_class = type(classname, (Task.classes[classname],), {}) - - def post_run(self): - if self.env.CC_NAME not in supported_compilers: - return super(derived_class, self).post_run() - - if getattr(self, 'cached', None): - return Task.Task.post_run(self) - - bld = self.generator.bld - unresolved_names = [] - resolved_nodes = [] - - lowercase = self.generator.msvcdeps_drive_lowercase - correct_case_path = bld.path.abspath() - correct_case_path_len = len(correct_case_path) - correct_case_path_norm = os.path.normcase(correct_case_path) - - # Dynamically bind to the cache - try: - cached_nodes = bld.cached_nodes - except AttributeError: - cached_nodes = bld.cached_nodes = {} - - for path in self.msvcdeps_paths: - node = None - if os.path.isabs(path): - # Force drive letter to match conventions of main source tree - drive, tail = os.path.splitdrive(path) - - if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: - # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. - path = correct_case_path + path[correct_case_path_len:] - else: - # Check the drive letter - if lowercase and (drive != drive.lower()): - path = drive.lower() + tail - elif (not lowercase) and (drive != drive.upper()): - path = drive.upper() + tail - node = path_to_node(bld.root, path, cached_nodes) - else: - base_node = bld.bldnode - # when calling find_resource, make sure the path does not begin by '..' - path = [k for k in Utils.split_path(path) if k and k != '.'] - while path[0] == '..': - path = path[1:] - base_node = base_node.parent - - node = path_to_node(base_node, path, cached_nodes) - - if not node: - raise ValueError('could not find %r for %r' % (path, self)) - else: - if not c_preproc.go_absolute: - if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): - # System library - Logs.debug('msvcdeps: Ignoring system include %r' % node) - continue - - if id(node) == id(self.inputs[0]): - # Self-dependency - continue - - resolved_nodes.append(node) - - bld.node_deps[self.uid()] = resolved_nodes - bld.raw_deps[self.uid()] = unresolved_names - - try: - del self.cache_sig - except: - pass - - Task.Task.post_run(self) - - def scan(self): - if self.env.CC_NAME not in supported_compilers: - return super(derived_class, self).scan() - - resolved_nodes = self.generator.bld.node_deps.get(self.uid(), []) - unresolved_names = [] - return (resolved_nodes, unresolved_names) - - def sig_implicit_deps(self): - if self.env.CC_NAME not in supported_compilers: - return super(derived_class, self).sig_implicit_deps() - - try: - return Task.Task.sig_implicit_deps(self) - except Errors.WafError: - return Utils.SIG_NIL - - def exec_response_command(self, cmd, **kw): - # exec_response_command() is only called from inside msvc.py anyway - assert self.env.CC_NAME in supported_compilers - - # Only bother adding '/showIncludes' to compile tasks - if isinstance(self, (c.c, cxx.cxx)): - try: - # The Visual Studio IDE adds an environment variable that causes - # the MS compiler to send its textual output directly to the - # debugging window rather than normal stdout/stderr. - # - # This is unrecoverably bad for this tool because it will cause - # all the dependency scanning to see an empty stdout stream and - # assume that the file being compiled uses no headers. - # - # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx - # - # Attempting to repair the situation by deleting the offending - # envvar at this point in tool execution will not be good enough-- - # its presence poisons the 'waf configure' step earlier. We just - # want to put a sanity check here in order to help developers - # quickly diagnose the issue if an otherwise-good Waf tree - # is then executed inside the MSVS IDE. - assert 'VS_UNICODE_OUTPUT' not in kw['env'] - - tmp = None - - # This block duplicated from Waflib's msvc.py - if sys.platform.startswith('win') and isinstance(cmd, list) and len(' '.join(cmd)) >= 8192: - program = cmd[0] - cmd = [self.quote_response_command(x) for x in cmd] - (fd, tmp) = tempfile.mkstemp() - os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode()) - os.close(fd) - cmd = [program, '@' + tmp] - # ... end duplication - - self.msvcdeps_paths = [] - - kw['env'] = kw.get('env', os.environ.copy()) - kw['cwd'] = kw.get('cwd', os.getcwd()) - kw['quiet'] = Context.STDOUT - kw['output'] = Context.STDOUT - - out = [] - - try: - raw_out = self.generator.bld.cmd_and_log(cmd, **kw) - ret = 0 - except Errors.WafError as e: - raw_out = e.stdout - ret = e.returncode - - for line in raw_out.splitlines(): - if line.startswith(INCLUDE_PATTERN): - inc_path = line[len(INCLUDE_PATTERN):].strip() - Logs.debug('msvcdeps: Regex matched %s' % inc_path) - self.msvcdeps_paths.append(inc_path) - else: - out.append(line) - - # Pipe through the remaining stdout content (not related to /showIncludes) - if self.generator.bld.logger: - self.generator.bld.logger.debug('out: %s' % os.linesep.join(out)) - else: - sys.stdout.write(os.linesep.join(out) + os.linesep) - - finally: - if tmp: - try: - os.remove(tmp) - except OSError: - pass - - return ret - else: - # Use base class's version of this method for linker tasks - return super(derived_class, self).exec_response_command(cmd, **kw) - - def can_retrieve_cache(self): - # msvcdeps and netcaching are incompatible, so disable the cache - if self.env.CC_NAME not in supported_compilers: - return super(derived_class, self).can_retrieve_cache() - self.nocache = True # Disable sending the file to the cache - return False - - derived_class.post_run = post_run - derived_class.scan = scan - derived_class.sig_implicit_deps = sig_implicit_deps - derived_class.exec_response_command = exec_response_command - derived_class.can_retrieve_cache = can_retrieve_cache + derived_class = type(classname, (Task.classes[classname],), {}) + derived_class.derived_msvcdeps = derived_class + derived_class.post_run = post_run + derived_class.scan = scan + derived_class.sig_implicit_deps = sig_implicit_deps + derived_class.exec_command = exec_command for k in ('c', 'cxx'): - wrap_compiled_task(k) + if k in Task.classes: + wrap_compiled_task(k) + +def options(opt): + raise ValueError('Do not load msvcdeps options') diff --git a/waflib/extras/msvs.py b/waflib/extras/msvs.py index 31abd7bb22..f987bb57b6 100644 --- a/waflib/extras/msvs.py +++ b/waflib/extras/msvs.py @@ -41,34 +41,42 @@ def options(conf): To generate solution files: $ waf configure msvs -To customize the outputs, provide subclasses in your wscript files: - -from waflib.extras import msvs -class vsnode_target(msvs.vsnode_target): - def get_build_command(self, props): - # likely to be required - return "waf.bat build" - def collect_source(self): - # likely to be required - ... -class msvs_bar(msvs.msvs_generator): - def init(self): - msvs.msvs_generator.init(self) - self.vsnode_target = vsnode_target +To customize the outputs, provide subclasses in your wscript files:: + + from waflib.extras import msvs + class vsnode_target(msvs.vsnode_target): + def get_build_command(self, props): + # likely to be required + return "waf.bat build" + def collect_source(self): + # likely to be required + ... + class msvs_bar(msvs.msvs_generator): + def init(self): + msvs.msvs_generator.init(self) + self.vsnode_target = vsnode_target The msvs class re-uses the same build() function for reading the targets (task generators), -you may therefore specify msvs settings on the context object: +you may therefore specify msvs settings on the context object:: -def build(bld): - bld.solution_name = 'foo.sln' - bld.waf_command = 'waf.bat' - bld.projects_dir = bld.srcnode.make_node('.depproj') - bld.projects_dir.mkdir() + def build(bld): + bld.solution_name = 'foo.sln' + bld.waf_command = 'waf.bat' + bld.projects_dir = bld.srcnode.make_node('.depproj') + bld.projects_dir.mkdir() For visual studio 2008, the command is called 'msvs2008', and the classes such as vsnode_target are wrapped by a decorator class 'wrap_2008' to provide special functionality. +To customize platform toolsets, pass additional parameters, for example:: + + class msvs_2013(msvs.msvs_generator): + cmd = 'msvs2013' + numver = '13.00' + vsver = '2013' + platform_toolset_ver = 'v120' + ASSUMPTIONS: * a project can be either a directory or a target, vcxproj files are written only for targets that have source files * each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path @@ -105,7 +113,7 @@ def build(bld): Makefile ${b.outdir} - v110 + ${project.platform_toolset_ver} ${endfor} @@ -150,7 +158,7 @@ def build(bld): ${for x in project.source} - <${project.get_key(x)} Include='${x.abspath()}' /> + <${project.get_key(x)} Include='${x.win32path()}' /> ${endfor} @@ -163,7 +171,7 @@ def build(bld): ${for x in project.source} - <${project.get_key(x)} Include="${x.abspath()}"> + <${project.get_key(x)} Include="${x.win32path()}"> ${project.get_filter_name(x.parent)} ${endfor} @@ -171,7 +179,7 @@ def build(bld): ${for x in project.dirs()} - {${project.make_uuid(x.abspath())}} + {${project.make_uuid(x.win32path())}} ${endfor} @@ -293,7 +301,8 @@ def compile_template(line): extr = [] def repl(match): g = match.group - if g('dollar'): return "$" + if g('dollar'): + return "$" elif g('backslash'): return "\\" elif g('subst'): @@ -318,14 +327,14 @@ def app(txt): app("lst.append(%r)" % params[x]) f = extr[x] - if f.startswith('if') or f.startswith('for'): + if f.startswith(('if', 'for')): app(f + ':') indent += 1 elif f.startswith('py:'): app(f[3:]) - elif f.startswith('endif') or f.startswith('endfor'): + elif f.startswith(('endif', 'endfor')): indent -= 1 - elif f.startswith('else') or f.startswith('elif'): + elif f.startswith(('else', 'elif')): indent -= 1 app(f + ':') indent += 1 @@ -351,20 +360,20 @@ def rm_blank_lines(txt): BOM = '\xef\xbb\xbf' try: - BOM = bytes(BOM, 'iso8859-1') # python 3 + BOM = bytes(BOM, 'latin-1') # python 3 except TypeError: pass def stealth_write(self, data, flags='wb'): try: - x = unicode + unicode except NameError: data = data.encode('utf-8') # python 3 else: data = data.decode(sys.getfilesystemencoding(), 'replace') data = data.encode('utf-8') - if self.name.endswith('.vcproj') or self.name.endswith('.vcxproj'): + if self.name.endswith(('.vcproj', '.vcxproj')): data = BOM + data try: @@ -374,9 +383,18 @@ def stealth_write(self, data, flags='wb'): except (IOError, ValueError): self.write(data, flags=flags) else: - Logs.debug('msvs: skipping %s' % self.abspath()) + Logs.debug('msvs: skipping %s', self.win32path()) Node.Node.stealth_write = stealth_write +re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I) +def win32path(self): + p = self.abspath() + m = re_win32.match(p) + if m: + return "%s:%s" % (m.group(2).upper(), m.group(3)) + return p +Node.Node.win32path = win32path + re_quote = re.compile("[^a-zA-Z0-9-]") def quote(s): return re_quote.sub("_", s) @@ -452,7 +470,7 @@ def get_waf(self): """ Override in subclasses... """ - return 'cd /d "%s" & %s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf.bat')) + return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat')) def ptype(self): """ @@ -498,9 +516,10 @@ def ptype(self): def __init__(self, ctx, node): vsnode.__init__(self, ctx) self.path = node - self.uuid = make_uuid(node.abspath()) + self.uuid = make_uuid(node.win32path()) self.name = node.name - self.title = self.path.abspath() + self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None) + self.title = self.path.win32path() self.source = [] # list of node objects self.build_properties = [] # list of properties (nmake commands, output dir, etc) @@ -519,7 +538,7 @@ def add(x): return lst def write(self): - Logs.debug('msvs: creating %r' % self.path) + Logs.debug('msvs: creating %r', self.path) # first write the project file template1 = compile_template(PROJECT_TEMPLATE) @@ -539,7 +558,7 @@ def get_key(self, node): required for writing the source files """ name = node.name - if name.endswith('.cpp') or name.endswith('.c'): + if name.endswith(('.cpp', '.c')): return 'ClCompile' return 'ClInclude' @@ -564,7 +583,7 @@ def collect_properties(self): self.build_properties = ret def get_build_params(self, props): - opt = '--execsolution=%s' % self.ctx.get_solution_node().abspath() + opt = '--execsolution="%s"' % self.ctx.get_solution_node().win32path() return (self.get_waf(), opt) def get_build_command(self, props): @@ -619,10 +638,10 @@ def __init__(self, ctx, node, name='project_view'): vsnode_alias.__init__(self, ctx, node, name) self.tg = self.ctx() # fake one, cannot remove self.exclude_files = Node.exclude_regs + ''' -waf-1.8.* -waf3-1.8.*/** -.waf-1.8.* -.waf3-1.8.*/** +waf-2* +waf3-2*/** +.waf-2* +.waf3-2*/** **/*.sdf **/*.suo **/*.ncb @@ -662,7 +681,7 @@ def get_build_params(self, props): """ Override the default to add the target name """ - opt = '--execsolution=%s' % self.ctx.get_solution_node().abspath() + opt = '--execsolution="%s"' % self.ctx.get_solution_node().win32path() if getattr(self, 'tg', None): opt += " --targets=%s" % self.tg.name return (self.get_waf(), opt) @@ -681,7 +700,7 @@ def collect_source(self): # remove duplicates self.source.extend(list(set(source_files + include_files))) - self.source.sort(key=lambda x: x.abspath()) + self.source.sort(key=lambda x: x.win32path()) def collect_properties(self): """ @@ -689,7 +708,7 @@ def collect_properties(self): """ super(vsnode_target, self).collect_properties() for x in self.build_properties: - x.outdir = self.path.parent.abspath() + x.outdir = self.path.parent.win32path() x.preprocessor_definitions = '' x.includes_search_path = '' @@ -698,7 +717,7 @@ def collect_properties(self): except AttributeError: pass else: - x.output_file = tsk.outputs[0].abspath() + x.output_file = tsk.outputs[0].win32path() x.preprocessor_definitions = ';'.join(tsk.env.DEFINES) x.includes_search_path = ';'.join(self.tg.env.INCPATHS) @@ -706,6 +725,9 @@ class msvs_generator(BuildContext): '''generates a visual studio 2010 solution''' cmd = 'msvs' fun = 'build' + numver = '11.00' # Visual Studio Version Number + vsver = '2010' # Visual Studio Version Year + platform_toolset_ver = 'v110' # Platform Toolset Version Number def init(self): """ @@ -735,8 +757,9 @@ def init(self): if not getattr(self, 'vsnode_project_view', None): self.vsnode_project_view = vsnode_project_view - self.numver = '11.00' - self.vsver = '2010' + self.numver = self.__class__.numver + self.vsver = self.__class__.vsver + self.platform_toolset_ver = self.__class__.platform_toolset_ver def execute(self): """ @@ -764,9 +787,13 @@ def collect_projects(self): self.collect_dirs() default_project = getattr(self, 'default_project', None) def sortfun(x): - if x.name == default_project: + # folders should sort to the top + if getattr(x, 'VS_GUID_SOLUTIONFOLDER', None): return '' - return getattr(x, 'path', None) and x.path.abspath() or x.name + # followed by the default project + elif x.name == default_project: + return ' ' + return getattr(x, 'path', None) and x.path.win32path() or x.name self.all_projects.sort(key=sortfun) def write_files(self): @@ -780,7 +807,7 @@ def write_files(self): # and finally write the solution file node = self.get_solution_node() node.parent.mkdir() - Logs.warn('Creating %r' % node) + Logs.warn('Creating %r', node) template1 = compile_template(SOLUTION_TEMPLATE) sln_str = template1(self) sln_str = rm_blank_lines(sln_str) @@ -858,7 +885,7 @@ def add_aliases(self): p_view.collect_properties() self.all_projects.append(p_view) - n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases") + n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases") p_build.parent = p_install.parent = p_view.parent = n self.all_projects.append(n) @@ -879,7 +906,7 @@ def make_parents(proj): # There is not vsnode_vsdir for x. # So create a project representing the folder "x" - n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name) + n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name) n.iter_path = x.parent self.all_projects.append(n) @@ -934,7 +961,7 @@ def add_path(lst): def display(n): buf = [] for x in n.source: - buf.append('\n' % (xml_escape(x.abspath()), self.get_key(x))) + buf.append('\n' % (xml_escape(x.win32path()), self.get_key(x))) for x in n.subfilters: buf.append('' % xml_escape(x.name)) buf.append(display(x)) @@ -959,7 +986,7 @@ def get_key(self, node): return '' def write(self): - Logs.debug('msvs: creating %r' % self.path) + Logs.debug('msvs: creating %r', self.path) template1 = compile_template(self.project_template) proj_str = template1(self) proj_str = rm_blank_lines(proj_str) @@ -971,6 +998,8 @@ class msvs_2008_generator(msvs_generator): '''generates a visual studio 2008 solution''' cmd = 'msvs2008' fun = msvs_generator.fun + numver = '10.00' + vsver = '2008' def init(self): if not getattr(self, 'project_extension', None): @@ -988,8 +1017,6 @@ def init(self): self.vsnode_project_view = wrap_2008(vsnode_project_view) msvs_generator.init(self) - self.numver = '10.00' - self.vsver = '2008' def options(ctx): """ diff --git a/waflib/extras/netcache_client.py b/waflib/extras/netcache_client.py index 6b16e301ae..dc490485ac 100644 --- a/waflib/extras/netcache_client.py +++ b/waflib/extras/netcache_client.py @@ -40,7 +40,7 @@ def build(bld): def put_data(conn, data): if sys.hexversion > 0x3000000: - data = data.encode('iso8859-1') + data = data.encode('latin-1') cnt = 0 while cnt < len(data): sent = conn.send(data[cnt:]) @@ -107,8 +107,8 @@ def read_header(conn): buf.append(data) cnt += len(data) if sys.hexversion > 0x3000000: - ret = ''.encode('iso8859-1').join(buf) - ret = ret.decode('iso8859-1') + ret = ''.encode('latin-1').join(buf) + ret = ret.decode('latin-1') else: ret = ''.join(buf) return ret @@ -140,13 +140,13 @@ def check_cache(conn, ssig): cnt += len(data) if sys.hexversion > 0x3000000: - ret = ''.encode('iso8859-1').join(buf) - ret = ret.decode('iso8859-1') + ret = ''.encode('latin-1').join(buf) + ret = ret.decode('latin-1') else: ret = ''.join(buf) all_sigs_in_cache = (time.time(), ret.splitlines()) - Logs.debug('netcache: server cache has %r entries' % len(all_sigs_in_cache[1])) + Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1])) if not ssig in all_sigs_in_cache[1]: raise ValueError('no file %s in cache' % ssig) @@ -215,26 +215,25 @@ def can_retrieve_cache(self): recv_file(conn, ssig, cnt, p) cnt += 1 except MissingFile as e: - Logs.debug('netcache: file is not in the cache %r' % e) + Logs.debug('netcache: file is not in the cache %r', e) err = True - except Exception as e: - Logs.debug('netcache: could not get the files %r' % e) + Logs.debug('netcache: could not get the files %r', self.outputs) + if Logs.verbose > 1: + Logs.debug('netcache: exception %r', e) err = True # broken connection? remove this one close_connection(conn) conn = None + else: + Logs.debug('netcache: obtained %r from cache', self.outputs) + finally: release_connection(conn) if err: return False - for node in self.outputs: - node.sig = sig - #if self.generator.bld.progress_bar < 1: - # self.generator.bld.to_log('restoring from cache %r\n' % node.abspath()) - self.cached = True return True @@ -263,8 +262,9 @@ def put_files_cache(self): if not conn: conn = get_connection(push=True) sock_send(conn, ssig, cnt, node.abspath()) + Logs.debug('netcache: sent %r', node) except Exception as e: - Logs.debug("netcache: could not push the files %r" % e) + Logs.debug('netcache: could not push the files %r', e) # broken connection? remove this one close_connection(conn) @@ -326,6 +326,8 @@ def make_cached(cls): m1 = cls.run def run(self): + if getattr(self, 'nocache', False): + return m1(self) if self.can_retrieve_cache(): return 0 return m1(self) @@ -333,10 +335,15 @@ def run(self): m2 = cls.post_run def post_run(self): + if getattr(self, 'nocache', False): + return m2(self) bld = self.generator.bld ret = m2(self) if bld.cache_global: self.put_files_cache() + if hasattr(self, 'chmod'): + for node in self.outputs: + os.chmod(node.abspath(), self.chmod) return ret cls.post_run = post_run diff --git a/waflib/extras/objcopy.py b/waflib/extras/objcopy.py index 923a7f2e37..bb7ca6ef22 100644 --- a/waflib/extras/objcopy.py +++ b/waflib/extras/objcopy.py @@ -3,7 +3,7 @@ """ Support for converting linked targets to ihex, srec or binary files using -objcopy. Use the 'objcopy' feature in conjuction with the 'cc' or 'cxx' +objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx' feature. The 'objcopy' feature uses the following attributes: objcopy_bfdname Target object format name (eg. ihex, srec, binary). @@ -15,7 +15,7 @@ """ from waflib.Utils import def_attrs -from waflib import Task +from waflib import Task, Options from waflib.TaskGen import feature, after_method class objcopy(Task.Task): @@ -24,7 +24,7 @@ class objcopy(Task.Task): @feature('objcopy') @after_method('apply_link') -def objcopy(self): +def map_objcopy(self): def_attrs(self, objcopy_bfdname = 'ihex', objcopy_target = None, @@ -34,9 +34,7 @@ def objcopy(self): link_output = self.link_task.outputs[0] if not self.objcopy_target: self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name - task = self.create_task('objcopy', - src=link_output, - tgt=self.path.find_or_declare(self.objcopy_target)) + task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target)) task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname) try: @@ -45,10 +43,11 @@ def objcopy(self): pass if self.objcopy_install_path: - self.bld.install_files(self.objcopy_install_path, - task.outputs[0], - env=task.env.derive()) + self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0]) def configure(ctx): - objcopy = ctx.find_program('objcopy', var='OBJCOPY', mandatory=True) - + program_name = 'objcopy' + prefix = getattr(Options.options, 'cross_prefix', None) + if prefix: + program_name = '{}-{}'.format(prefix, program_name) + ctx.find_program(program_name, var='OBJCOPY', mandatory=True) diff --git a/waflib/extras/ocaml.py b/waflib/extras/ocaml.py index 14db45fc66..7d785c6f54 100644 --- a/waflib/extras/ocaml.py +++ b/waflib/extras/ocaml.py @@ -15,14 +15,17 @@ EXT_MLC = ['.c'] EXT_ML = ['.ml'] -open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M) +open_re = re.compile(r'^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M) foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M) def filter_comments(txt): meh = [0] def repl(m): - if m.group(1): meh[0] += 1 - elif m.group(2): meh[0] -= 1 - elif not meh[0]: return m.group(0) + if m.group(1): + meh[0] += 1 + elif m.group(2): + meh[0] -= 1 + elif not meh[0]: + return m.group() return '' return foo.sub(repl, txt) @@ -42,7 +45,8 @@ def scan(self): nd = None for x in self.incpaths: nd = x.find_resource(name.lower()+'.ml') - if not nd: nd = x.find_resource(name+'.ml') + if not nd: + nd = x.find_resource(name+'.ml') if nd: found_lst.append(nd) break @@ -83,12 +87,14 @@ def init_envs_ml(self): self.native_env = None if self.type in native_lst: self.native_env = self.env.derive() - if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a' + if self.islibrary: + self.native_env['OCALINKFLAGS'] = '-a' self.bytecode_env = None if self.type in bytecode_lst: self.bytecode_env = self.env.derive() - if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a' + if self.islibrary: + self.bytecode_env['OCALINKFLAGS'] = '-a' if self.type == 'c_object': self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj') @@ -126,8 +132,10 @@ def apply_vars_ml(self): for vname in varnames: cnt = self.env[vname+'_'+name] if cnt: - if self.bytecode_env: self.bytecode_env.append_value(vname, cnt) - if self.native_env: self.native_env.append_value(vname, cnt) + if self.bytecode_env: + self.bytecode_env.append_value(vname, cnt) + if self.native_env: + self.native_env.append_value(vname, cnt) @feature('ocaml') @after_method('process_source') @@ -143,9 +151,12 @@ def apply_link_ml(self): self.linktasks.append(linktask) if self.native_env: - if self.type == 'c_object': ext = '.o' - elif self.islibrary: ext = '.cmxa' - else: ext = '' + if self.type == 'c_object': + ext = '.o' + elif self.islibrary: + ext = '.cmxa' + else: + ext = '' linktask = self.create_task('ocalinkx') linktask.set_outputs(self.path.find_or_declare(self.target + ext)) @@ -207,8 +218,10 @@ def compile_may_start(self): # the evil part is that we can only compute the dependencies after the # source files can be read (this means actually producing the source files) - if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks - else: alltasks = self.generator.native_tasks + if getattr(self, 'bytecode', ''): + alltasks = self.generator.bytecode_tasks + else: + alltasks = self.generator.native_tasks self.signature() # ensure that files are scanned - unfortunately tree = self.generator.bld @@ -216,7 +229,8 @@ def compile_may_start(self): lst = tree.node_deps[self.uid()] for depnode in lst: for t in alltasks: - if t == self: continue + if t == self: + continue if depnode in t.inputs: self.set_run_after(t) @@ -270,8 +284,10 @@ def base(self): def link_may_start(self): - if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks - else: alltasks = self.generator.native_tasks + if getattr(self, 'bytecode', 0): + alltasks = self.generator.bytecode_tasks + else: + alltasks = self.generator.native_tasks for x in alltasks: if not x.hasrun: @@ -286,7 +302,8 @@ def link_may_start(self): pendant = []+alltasks while pendant: task = pendant.pop(0) - if task in seen: continue + if task in seen: + continue for x in task.run_after: if not x in seen: pendant.append(task) diff --git a/waflib/extras/package.py b/waflib/extras/package.py index cb8345fb17..c06498eba8 100644 --- a/waflib/extras/package.py +++ b/waflib/extras/package.py @@ -64,13 +64,13 @@ def download_archive(self, src, dst): else: tmp = self.root.make_node(dst) tmp.write(web.read()) - Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url)) + Logs.warn('Downloaded %s from %s', tmp.abspath(), url) break else: self.fatal('Could not get the package %s' % src) @conf def load_packages(self): - cache = self.get_package_cache_dir() + self.get_package_cache_dir() # read the dependencies, get the archives, .. diff --git a/waflib/extras/parallel_debug.py b/waflib/extras/parallel_debug.py index 1b99a889f2..4ffec5e53e 100644 --- a/waflib/extras/parallel_debug.py +++ b/waflib/extras/parallel_debug.py @@ -3,22 +3,24 @@ # Thomas Nagy, 2007-2010 (ita) """ -Debugging helper for parallel compilation, outputs -a file named pdebug.svg in the source directory:: +Debugging helper for parallel compilation. + +Copy it to your project and load it with:: def options(opt): - opt.load('parallel_debug') + opt.load('parallel_debug', tooldir='.') def build(bld): ... -""" -import os, time, sys, re -try: from Queue import Queue -except: from queue import Queue -from waflib import Runner, Options, Utils, Task, Logs, Errors +The build will then output a file named pdebug.svg in the source directory. +""" -#import random -#random.seed(100) +import re, sys, threading, time, traceback +try: + from Queue import Queue +except: + from queue import Queue +from waflib import Runner, Options, Task, Logs, Errors SVG_TEMPLATE = """ @@ -38,7 +40,7 @@ def build(bld): if (x) { g.setAttribute('class', g.getAttribute('class') + ' over'); x.setAttribute('class', x.getAttribute('class') + ' over'); - showInfo(e, g.id); + showInfo(e, g.id, e.target.attributes.tooltip.value); } }, false); @@ -52,11 +54,12 @@ def build(bld): } }, false); -function showInfo(evt, txt) { +function showInfo(evt, txt, details) { +${if project.tooltip} tooltip = document.getElementById('tooltip'); var t = document.getElementById('tooltiptext'); - t.firstChild.data = txt; + t.firstChild.data = txt + " " + details; var x = evt.clientX + 9; if (x > 250) { x -= t.getComputedTextLength() + 16; } @@ -66,6 +69,7 @@ def build(bld): var r = document.getElementById('tooltiprect'); r.setAttribute('width', t.getComputedTextLength() + 6); +${endif} } function hideInfo(evt) { @@ -77,8 +81,7 @@ def build(bld): + style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"> ${if project.title} ${for rect in cls.rects} - + ${endfor} ${endfor} @@ -103,10 +106,12 @@ def build(bld): ${endfor} +${if project.tooltip} - + +${endif} """ @@ -125,7 +130,8 @@ def compile_template(line): extr = [] def repl(match): g = match.group - if g('dollar'): return "$" + if g('dollar'): + return "$" elif g('backslash'): return "\\" elif g('subst'): @@ -150,14 +156,14 @@ def app(txt): app("lst.append(%r)" % params[x]) f = extr[x] - if f.startswith('if') or f.startswith('for'): + if f.startswith(('if', 'for')): app(f + ':') indent += 1 elif f.startswith('py:'): app(f[3:]) - elif f.startswith('endif') or f.startswith('endfor'): + elif f.startswith(('endif', 'endfor')): indent -= 1 - elif f.startswith('else') or f.startswith('elif'): + elif f.startswith(('else', 'elif')): indent -= 1 app(f + ':') indent += 1 @@ -207,31 +213,23 @@ def map_to_color(name): return color2code['RED'] def process(self): - m = self.master - if m.stop: - m.out.put(self) - return - - self.master.set_running(1, id(Utils.threading.currentThread()), self) - - # remove the task signature immediately before it is executed - # in case of failure the task will be executed again + m = self.generator.bld.producer try: + # TODO another place for this? del self.generator.bld.task_sigs[self.uid()] - except: + except KeyError: pass + self.generator.bld.producer.set_running(1, self) + try: - self.generator.bld.returned_tasks.append(self) - self.log_display(self.generator.bld) ret = self.run() except Exception: - self.err_msg = Utils.ex_stack() + self.err_msg = traceback.format_exc() self.hasrun = Task.EXCEPTION # TODO cleanup m.error_handler(self) - m.out.put(self) return if ret: @@ -243,17 +241,17 @@ def process(self): except Errors.WafError: pass except Exception: - self.err_msg = Utils.ex_stack() + self.err_msg = traceback.format_exc() self.hasrun = Task.EXCEPTION else: self.hasrun = Task.SUCCESS if self.hasrun != Task.SUCCESS: m.error_handler(self) - self.master.set_running(-1, id(Utils.threading.currentThread()), self) - m.out.put(self) -Task.TaskBase.process_back = Task.TaskBase.process -Task.TaskBase.process = process + self.generator.bld.producer.set_running(-1, self) + +Task.Task.process_back = Task.Task.process +Task.Task.process = process old_start = Runner.Parallel.start def do_start(self): @@ -268,8 +266,26 @@ def do_start(self): make_picture(self) Runner.Parallel.start = do_start -def set_running(self, by, i, tsk): - self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) ) +lock_running = threading.Lock() +def set_running(self, by, tsk): + with lock_running: + try: + cache = self.lock_cache + except AttributeError: + cache = self.lock_cache = {} + + i = 0 + if by > 0: + vals = cache.values() + for i in range(self.numjobs): + if i not in vals: + cache[tsk] = i + break + else: + i = cache[tsk] + del cache[tsk] + + self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) ) Runner.Parallel.set_running = set_running def name2class(name): @@ -309,7 +325,7 @@ def make_picture(producer): acc = [] for x in tmp: thread_count += x[6] - acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count)) + acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7])) data_node = producer.bld.path.make_node('pdebug.dat') data_node.write('\n'.join(acc)) @@ -350,7 +366,7 @@ def make_picture(producer): end = line[2] #print id, thread_id, begin, end #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) ) - acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) ) + acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) ) break if Options.options.dmaxtime < 0.1: @@ -377,16 +393,18 @@ class tobject(object): model.width = gwidth + 4 model.height = gheight + 4 + model.tooltip = not Options.options.dnotooltip + model.title = Options.options.dtitle model.title_x = gwidth / 2 model.title_y = gheight + - 5 groups = {} - for (x, y, w, h, clsname) in acc: + for (x, y, w, h, clsname, name) in acc: try: - groups[clsname].append((x, y, w, h)) + groups[clsname].append((x, y, w, h, name)) except: - groups[clsname] = [(x, y, w, h)] + groups[clsname] = [(x, y, w, h, name)] # groups of rectangles (else js highlighting is slow) model.groups = [] @@ -395,13 +413,14 @@ class tobject(object): model.groups.append(g) g.classname = name2class(cls) g.rects = [] - for (x, y, w, h) in groups[cls]: + for (x, y, w, h, name) in groups[cls]: r = tobject() g.rects.append(r) r.x = 2 + x * ratio r.y = 2 + y r.width = w * ratio r.height = h + r.name = name r.color = map_to_color(cls) cnt = THREAD_AMOUNT @@ -430,7 +449,7 @@ class tobject(object): node = producer.bld.path.make_node('pdebug.svg') node.write(txt) - Logs.warn('Created the diagram %r' % node.abspath()) + Logs.warn('Created the diagram %r', node) def options(opt): opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv), @@ -439,4 +458,5 @@ def options(opt): opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime') opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband') opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime') + opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip') diff --git a/waflib/extras/pch.py b/waflib/extras/pch.py index 187b66d8a7..b44c7a2e8f 100644 --- a/waflib/extras/pch.py +++ b/waflib/extras/pch.py @@ -83,7 +83,7 @@ def apply_pch(self): if getattr(self.bld, 'pch_tasks', None) is None: self.bld.pch_tasks = {} - if getattr(self, 'headers', None) is None: + if getattr(self, 'headers', None) is None: return self.headers = self.to_nodes(self.headers) @@ -91,7 +91,7 @@ def apply_pch(self): if getattr(self, 'name', None): try: task = self.bld.pch_tasks[self.name] - self.bld.fatal("Duplicated 'pch' task with name %r" % self.name) + self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx)) except KeyError: pass @@ -102,7 +102,7 @@ def apply_pch(self): # target should be an absolute path of `out`, but without precompiled header extension task.target = out.abspath()[:-len(out.suffix())] - self.pch_task = task + self.pch_task = task if getattr(self, 'name', None): self.bld.pch_tasks[self.name] = task @@ -129,7 +129,7 @@ def add_pch(self): x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target]) class gchx(Task.Task): - run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}' + run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' scan = c_preproc.scan color = 'BLUE' ext_out=['.h'] diff --git a/waflib/extras/pep8.py b/waflib/extras/pep8.py index 5cace1d47f..676beedb25 100644 --- a/waflib/extras/pep8.py +++ b/waflib/extras/pep8.py @@ -9,7 +9,7 @@ or $ pip install pep8 -To add the boost tool to the waf file: +To add the pep8 tool to the waf file: $ ./waf-light --tools=compat15,pep8 or, if you have waf >= 1.6.2 $ ./waf update --files=pep8 @@ -24,7 +24,7 @@ def run_pep8(self, node): ''' import threading -from waflib import TaskGen, Task, Options +from waflib import Task, Options pep8 = __import__('pep8') diff --git a/waflib/extras/pgicc.py b/waflib/extras/pgicc.py index a791c1477e..f8068d53c0 100644 --- a/waflib/extras/pgicc.py +++ b/waflib/extras/pgicc.py @@ -7,6 +7,7 @@ """ import sys, re +from waflib import Errors from waflib.Configure import conf from waflib.Tools.compiler_c import c_compiler c_compiler['linux'].append('pgicc') @@ -21,10 +22,14 @@ def find_pgi_compiler(conf, var, name): v = conf.env cc = None - if v[var]: cc = v[var] - elif var in conf.environ: cc = conf.environ[var] - if not cc: cc = conf.find_program(name, var=var) - if not cc: conf.fatal('PGI Compiler (%s) was not found' % name) + if v[var]: + cc = v[var] + elif var in conf.environ: + cc = conf.environ[var] + if not cc: + cc = conf.find_program(name, var=var) + if not cc: + conf.fatal('PGI Compiler (%s) was not found' % name) v[var + '_VERSION'] = conf.get_pgi_version(cc) v[var] = cc @@ -38,11 +43,13 @@ def get_pgi_version(conf, cc): try: out, err = conf.cmd_and_log(cmd, output=0) - except Exception: + except Errors.WafError: conf.fatal('Could not find pgi compiler %r' % cmd) - if out: match = version_re(out) - else: match = version_re(err) + if out: + match = version_re(out) + else: + match = version_re(err) if not match: conf.fatal('Could not verify PGI signature') @@ -50,10 +57,10 @@ def get_pgi_version(conf, cc): cmd = cc + ['-help=variable'] try: out, err = conf.cmd_and_log(cmd, output=0) - except Exception: + except Errors.WafError: conf.fatal('Could not find pgi compiler %r' % cmd) - version = re.findall('^COMPVER\s*=(.*)', out, re.M) + version = re.findall(r'^COMPVER\s*=(.*)', out, re.M) if len(version) != 1: conf.fatal('Could not determine the compiler version') return version[0] diff --git a/waflib/extras/pgicxx.py b/waflib/extras/pgicxx.py index 926f40a0d5..eae121c403 100644 --- a/waflib/extras/pgicxx.py +++ b/waflib/extras/pgicxx.py @@ -6,8 +6,6 @@ Detect the PGI C++ compiler """ -import sys, re -from waflib.Configure import conf from waflib.Tools.compiler_cxx import cxx_compiler cxx_compiler['linux'].append('pgicxx') diff --git a/waflib/extras/prefork.py b/waflib/extras/prefork.py deleted file mode 100755 index 2de1fd9539..0000000000 --- a/waflib/extras/prefork.py +++ /dev/null @@ -1,402 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2015 (ita) - -""" -Execute commands through pre-forked servers. This tool creates as many servers as build threads. -On a benchmark executed on Linux Kubuntu 14, 8 virtual cores and SSD drive:: - - ./genbench.py /tmp/build 200 100 15 5 - waf clean build -j24 - # no prefork: 2m7.179s - # prefork: 0m55.400s - -To use:: - - def options(opt): - # optional, will spawn 40 servers early - opt.load('prefork') - - def build(bld): - bld.load('prefork') - ... - more code - -The servers and the build process are using a shared nonce to prevent undesirable external connections. -""" - -import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random, signal -try: - import SocketServer -except ImportError: - import socketserver as SocketServer -try: - from queue import Queue -except ImportError: - from Queue import Queue -try: - import cPickle -except ImportError: - import pickle as cPickle - -SHARED_KEY = None -HEADER_SIZE = 64 - -REQ = 'REQ' -RES = 'RES' -BYE = 'BYE' - -def make_header(params, cookie=''): - header = ','.join(params) - header = header.ljust(HEADER_SIZE - len(cookie)) - assert(len(header) == HEADER_SIZE - len(cookie)) - header = header + cookie - if sys.hexversion > 0x3000000: - header = header.encode('iso8859-1') - return header - -def safe_compare(x, y): - sum = 0 - for (a, b) in zip(x, y): - sum |= ord(a) ^ ord(b) - return sum == 0 - -re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$') -class req(SocketServer.StreamRequestHandler): - def handle(self): - try: - while self.process_command(): - pass - except KeyboardInterrupt: - return - except Exception as e: - print(e) - - def send_response(self, ret, out, err, exc): - if out or err or exc: - data = (out, err, exc) - data = cPickle.dumps(data, -1) - else: - data = '' - - params = [RES, str(ret), str(len(data))] - - # no need for the cookie in the response - self.wfile.write(make_header(params)) - if data: - self.wfile.write(data) - self.wfile.flush() - - def process_command(self): - query = self.rfile.read(HEADER_SIZE) - if not query: - return None - #print(len(query)) - assert(len(query) == HEADER_SIZE) - if sys.hexversion > 0x3000000: - query = query.decode('iso8859-1') - - # magic cookie - key = query[-20:] - if not safe_compare(key, SHARED_KEY): - print('%r %r' % (key, SHARED_KEY)) - self.send_response(-1, '', '', 'Invalid key given!') - return 'meh' - - query = query[:-20] - #print "%r" % query - if not re_valid_query.match(query): - self.send_response(-1, '', '', 'Invalid query %r' % query) - raise ValueError('Invalid query %r' % query) - - query = query.strip().split(',') - - if query[0] == REQ: - self.run_command(query[1:]) - elif query[0] == BYE: - raise ValueError('Exit') - else: - raise ValueError('Invalid query %r' % query) - return 'ok' - - def run_command(self, query): - - size = int(query[0]) - data = self.rfile.read(size) - assert(len(data) == size) - kw = cPickle.loads(data) - - # run command - ret = out = err = exc = None - cmd = kw['cmd'] - del kw['cmd'] - #print(cmd) - - try: - if kw['stdout'] or kw['stderr']: - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() - ret = p.returncode - else: - ret = subprocess.Popen(cmd, **kw).wait() - except KeyboardInterrupt: - raise - except Exception as e: - ret = -1 - exc = str(e) + traceback.format_exc() - - self.send_response(ret, out, err, exc) - -def create_server(conn, cls): - # child processes do not need the key, so we remove it from the OS environment - global SHARED_KEY - SHARED_KEY = os.environ['SHARED_KEY'] - os.environ['SHARED_KEY'] = '' - - ppid = int(os.environ['PREFORKPID']) - def reap(): - if os.sep != '/': - os.waitpid(ppid, 0) - else: - while 1: - try: - os.kill(ppid, 0) - except OSError: - break - else: - time.sleep(1) - os.kill(os.getpid(), signal.SIGKILL) - t = threading.Thread(target=reap) - t.setDaemon(True) - t.start() - - server = SocketServer.TCPServer(conn, req) - print(server.server_address[1]) - sys.stdout.flush() - #server.timeout = 6000 # seconds - server.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - try: - server.serve_forever(poll_interval=0.001) - except KeyboardInterrupt: - pass - -if __name__ == '__main__': - conn = ("127.0.0.1", 0) - #print("listening - %r %r\n" % conn) - create_server(conn, req) -else: - - from waflib import Logs, Utils, Runner, Errors, Options - - def init_task_pool(self): - # lazy creation, and set a common pool for all task consumers - pool = self.pool = [] - for i in range(self.numjobs): - consumer = Runner.get_pool() - pool.append(consumer) - consumer.idx = i - self.ready = Queue(0) - def setq(consumer): - consumer.ready = self.ready - try: - threading.current_thread().idx = consumer.idx - except Exception as e: - print(e) - for x in pool: - x.ready.put(setq) - return pool - Runner.Parallel.init_task_pool = init_task_pool - - def make_server(bld, idx): - cmd = [sys.executable, os.path.abspath(__file__)] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - return proc - - def make_conn(bld, srv): - port = srv.port - conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - conn.connect(('127.0.0.1', port)) - return conn - - - SERVERS = [] - CONNS = [] - def close_all(): - global SERVERS, CONNS - while CONNS: - conn = CONNS.pop() - try: - conn.close() - except: - pass - while SERVERS: - srv = SERVERS.pop() - try: - srv.kill() - except: - pass - atexit.register(close_all) - - def put_data(conn, data): - cnt = 0 - while cnt < len(data): - sent = conn.send(data[cnt:]) - if sent == 0: - raise RuntimeError('connection ended') - cnt += sent - - def read_data(conn, siz): - cnt = 0 - buf = [] - while cnt < siz: - data = conn.recv(min(siz - cnt, 1024)) - if not data: - raise RuntimeError('connection ended %r %r' % (cnt, siz)) - buf.append(data) - cnt += len(data) - if sys.hexversion > 0x3000000: - ret = ''.encode('iso8859-1').join(buf) - else: - ret = ''.join(buf) - return ret - - def exec_command(self, cmd, **kw): - if 'stdout' in kw: - if kw['stdout'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - elif 'stderr' in kw: - if kw['stderr'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - - kw['shell'] = isinstance(cmd, str) - Logs.debug('runner: %r' % cmd) - Logs.debug('runner_env: kw=%s' % kw) - - if self.logger: - self.logger.info(cmd) - - if 'stdout' not in kw: - kw['stdout'] = subprocess.PIPE - if 'stderr' not in kw: - kw['stderr'] = subprocess.PIPE - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!" % cmd[0]) - - idx = threading.current_thread().idx - kw['cmd'] = cmd - - # serialization.. - #print("sub %r %r" % (idx, cmd)) - #print("write to %r %r" % (idx, cmd)) - - data = cPickle.dumps(kw, -1) - params = [REQ, str(len(data))] - header = make_header(params, self.SHARED_KEY) - - conn = CONNS[idx] - - put_data(conn, header + data) - #put_data(conn, data) - - #print("running %r %r" % (idx, cmd)) - #print("read from %r %r" % (idx, cmd)) - - data = read_data(conn, HEADER_SIZE) - if sys.hexversion > 0x3000000: - data = data.decode('iso8859-1') - - #print("received %r" % data) - lst = data.split(',') - ret = int(lst[1]) - dlen = int(lst[2]) - - out = err = None - if dlen: - data = read_data(conn, dlen) - (out, err, exc) = cPickle.loads(data) - if exc: - raise Errors.WafError('Execution failure: %s' % exc) - - if out: - if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.debug('out: %s' % out) - else: - Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) - if err: - if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.error('err: %s' % err) - else: - Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) - - return ret - - def init_key(ctx): - try: - key = ctx.SHARED_KEY = os.environ['SHARED_KEY'] - except KeyError: - key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)]) - os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key - - os.environ['PREFORKPID'] = str(os.getpid()) - return key - - def init_servers(ctx, maxval): - while len(SERVERS) < maxval: - i = len(SERVERS) - srv = make_server(ctx, i) - SERVERS.append(srv) - while len(CONNS) < maxval: - i = len(CONNS) - srv = SERVERS[i] - - # postpone the connection - srv.port = int(srv.stdout.readline()) - - conn = None - for x in range(30): - try: - conn = make_conn(ctx, srv) - break - except socket.error: - time.sleep(0.01) - if not conn: - raise ValueError('Could not start the server!') - if srv.poll() is not None: - Logs.warn('Looks like it it not our server process - concurrent builds are unsupported at this stage') - raise ValueError('Could not start the server') - CONNS.append(conn) - - def init_smp(self): - if not getattr(Options.options, 'smp', getattr(self, 'smp', None)): - return - if Utils.unversioned_sys_platform() in ('freebsd',): - pid = os.getpid() - cmd = ['cpuset', '-l', '0', '-p', str(pid)] - elif Utils.unversioned_sys_platform() in ('linux',): - pid = os.getpid() - cmd = ['taskset', '-pc', '0', str(pid)] - if cmd: - self.cmd_and_log(cmd, quiet=0) - - def options(opt): - init_key(opt) - init_servers(opt, 40) - opt.add_option('--pin-process', action='store_true', dest='smp', default=False) - - def build(bld): - if bld.cmd == 'clean': - return - - init_key(bld) - init_servers(bld, bld.jobs) - init_smp(bld) - - bld.__class__.exec_command_old = bld.__class__.exec_command - bld.__class__.exec_command = exec_command - diff --git a/waflib/extras/preforkjava.py b/waflib/extras/preforkjava.py deleted file mode 100755 index 7088e9bdba..0000000000 --- a/waflib/extras/preforkjava.py +++ /dev/null @@ -1,241 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2015 (ita) - -# TODO: have the child process terminate if the parent is killed abruptly - -import os, re, socket, threading, sys, subprocess, time, atexit, traceback, random -try: - import SocketServer -except ImportError: - import socketserver as SocketServer -try: - from queue import Queue -except ImportError: - from Queue import Queue - -import json as pickle - -SHARED_KEY = None -HEADER_SIZE = 64 - -REQ = 'REQ' -RES = 'RES' -BYE = 'BYE' - -def make_header(params, cookie=''): - header = ','.join(params) - header = header.ljust(HEADER_SIZE - len(cookie)) - assert(len(header) == HEADER_SIZE - len(cookie)) - header = header + cookie - if sys.hexversion > 0x3000000: - header = header.encode('iso8859-1') - return header - -if 1: - from waflib import Logs, Utils, Runner, Errors, Options - - def init_task_pool(self): - # lazy creation, and set a common pool for all task consumers - pool = self.pool = [] - for i in range(self.numjobs): - consumer = Runner.get_pool() - pool.append(consumer) - consumer.idx = i - self.ready = Queue(0) - def setq(consumer): - consumer.ready = self.ready - try: - threading.current_thread().idx = consumer.idx - except Exception as e: - print(e) - for x in pool: - x.ready.put(setq) - return pool - Runner.Parallel.init_task_pool = init_task_pool - - def make_server(bld, idx): - top = getattr(bld, 'preforkjava_top', os.path.dirname(os.path.abspath('__file__'))) - cp = getattr(bld, 'preforkjava_cp', os.path.join(top, 'minimal-json-0.9.3-SNAPSHOT.jar') + os.pathsep + top) - - for x in cp.split(os.pathsep): - if x and not os.path.exists(x): - Logs.warn('Invalid classpath: %r' % cp) - Logs.warn('Set for example bld.preforkjava_cp to /path/to/minimal-json:/path/to/Prefork.class/') - - cwd = getattr(bld, 'preforkjava_cwd', top) - port = getattr(bld, 'preforkjava_port', 51200) - cmd = getattr(bld, 'preforkjava_cmd', 'java -cp %s%s Prefork %d' % (cp, os.pathsep, port)) - proc = subprocess.Popen(cmd.split(), shell=False, cwd=cwd) - proc.port = port - return proc - - def make_conn(bld, srv): - #port = PORT + idx - port = srv.port - conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - conn.connect(('127.0.0.1', port)) - return conn - - SERVERS = [] - CONNS = [] - def close_all(): - global SERVERS - while SERVERS: - srv = SERVERS.pop() - pid = srv.pid - try: - srv.kill() - except Exception as e: - pass - atexit.register(close_all) - - def put_data(conn, data): - cnt = 0 - while cnt < len(data): - sent = conn.send(data[cnt:]) - if sent == 0: - raise RuntimeError('connection ended') - cnt += sent - - def read_data(conn, siz): - cnt = 0 - buf = [] - while cnt < siz: - data = conn.recv(min(siz - cnt, 1024)) - if not data: - raise RuntimeError('connection ended %r %r' % (cnt, siz)) - buf.append(data) - cnt += len(data) - if sys.hexversion > 0x3000000: - ret = ''.encode('iso8859-1').join(buf) - else: - ret = ''.join(buf) - return ret - - def exec_command(self, cmd, **kw): - if 'stdout' in kw: - if kw['stdout'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - elif 'stderr' in kw: - if kw['stderr'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - - kw['shell'] = isinstance(cmd, str) - Logs.debug('runner: %r' % cmd) - Logs.debug('runner_env: kw=%s' % kw) - - if self.logger: - self.logger.info(cmd) - - if 'stdout' not in kw: - kw['stdout'] = subprocess.PIPE - if 'stderr' not in kw: - kw['stderr'] = subprocess.PIPE - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!" % cmd[0]) - - idx = threading.current_thread().idx - kw['cmd'] = cmd - - data = pickle.dumps(kw) - params = [REQ, str(len(data))] - header = make_header(params, self.SHARED_KEY) - - conn = CONNS[idx] - - if sys.hexversion > 0x3000000: - data = data.encode('iso8859-1') - put_data(conn, header + data) - - data = read_data(conn, HEADER_SIZE) - if sys.hexversion > 0x3000000: - data = data.decode('iso8859-1') - - #print("received %r" % data) - lst = data.split(',') - ret = int(lst[1]) - dlen = int(lst[2]) - - out = err = None - if dlen: - data = read_data(conn, dlen) - (out, err, exc) = pickle.loads(data) - if exc: - raise Errors.WafError('Execution failure: %s' % exc) - - if out: - if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.debug('out: %s' % out) - else: - Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) - if err: - if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.error('err: %s' % err) - else: - Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) - - return ret - - def init_key(ctx): - try: - key = ctx.SHARED_KEY = os.environ['SHARED_KEY'] - except KeyError: - key = "".join([chr(random.SystemRandom().randint(40, 126)) for x in range(20)]) - os.environ['SHARED_KEY'] = ctx.SHARED_KEY = key - os.environ['PREFORKPID'] = str(os.getpid()) - return key - - def init_servers(ctx, maxval): - while len(SERVERS) < 1: - i = len(SERVERS) - srv = make_server(ctx, i) - SERVERS.append(srv) - while len(CONNS) < maxval: - i = len(CONNS) - srv = SERVERS[0] - conn = None - for x in range(30): - try: - conn = make_conn(ctx, srv) - break - except socket.error: - time.sleep(0.01) - if not conn: - raise ValueError('Could not start the server!') - CONNS.append(conn) - - def init_smp(self): - if not getattr(Options.options, 'smp', getattr(self, 'smp', None)): - return - if Utils.unversioned_sys_platform() in ('freebsd',): - pid = os.getpid() - cmd = ['cpuset', '-l', '0', '-p', str(pid)] - elif Utils.unversioned_sys_platform() in ('linux',): - pid = os.getpid() - cmd = ['taskset', '-pc', '0', str(pid)] - if cmd: - self.cmd_and_log(cmd, quiet=0) - - def options(opt): - opt.add_option('--pin-process', action='store_true', dest='smp', default=False) - init_key(opt) - init_servers(opt, 40) - - def build(bld): - if bld.cmd == 'clean': - return - - init_key(bld) - init_servers(bld, bld.jobs) - init_smp(bld) - - bld.__class__.exec_command_old = bld.__class__.exec_command - bld.__class__.exec_command = exec_command - diff --git a/waflib/extras/preforkunix.py b/waflib/extras/preforkunix.py deleted file mode 100755 index 94a1287d61..0000000000 --- a/waflib/extras/preforkunix.py +++ /dev/null @@ -1,318 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2015 (ita) - -""" -A version of prefork.py that uses unix sockets. The advantage is that it does not expose -connections to the outside. Yet performance it only works on unix-like systems -and performance can be slightly worse. - -To use:: - - def options(opt): - # recommended, fork new processes before using more memory - opt.load('preforkunix') - - def build(bld): - bld.load('preforkunix') - ... - more code -""" - -import os, re, socket, threading, sys, subprocess, atexit, traceback, signal, time -try: - from queue import Queue -except ImportError: - from Queue import Queue -try: - import cPickle -except ImportError: - import pickle as cPickle - -HEADER_SIZE = 20 - -REQ = 'REQ' -RES = 'RES' -BYE = 'BYE' - -def make_header(params, cookie=''): - header = ','.join(params) - header = header.ljust(HEADER_SIZE - len(cookie)) - assert(len(header) == HEADER_SIZE - len(cookie)) - header = header + cookie - if sys.hexversion > 0x3000000: - header = header.encode('iso8859-1') - return header - -re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$') -if 1: - def send_response(conn, ret, out, err, exc): - if out or err or exc: - data = (out, err, exc) - data = cPickle.dumps(data, -1) - else: - data = '' - - params = [RES, str(ret), str(len(data))] - - # no need for the cookie in the response - conn.send(make_header(params)) - if data: - conn.send(data) - - def process_command(conn): - query = conn.recv(HEADER_SIZE) - if not query: - return None - #print(len(query)) - assert(len(query) == HEADER_SIZE) - if sys.hexversion > 0x3000000: - query = query.decode('iso8859-1') - - #print "%r" % query - if not re_valid_query.match(query): - send_response(conn, -1, '', '', 'Invalid query %r' % query) - raise ValueError('Invalid query %r' % query) - - query = query.strip().split(',') - - if query[0] == REQ: - run_command(conn, query[1:]) - elif query[0] == BYE: - raise ValueError('Exit') - else: - raise ValueError('Invalid query %r' % query) - return 'ok' - - def run_command(conn, query): - - size = int(query[0]) - data = conn.recv(size) - assert(len(data) == size) - kw = cPickle.loads(data) - - # run command - ret = out = err = exc = None - cmd = kw['cmd'] - del kw['cmd'] - #print(cmd) - - try: - if kw['stdout'] or kw['stderr']: - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() - ret = p.returncode - else: - ret = subprocess.Popen(cmd, **kw).wait() - except KeyboardInterrupt: - raise - except Exception as e: - ret = -1 - exc = str(e) + traceback.format_exc() - - send_response(conn, ret, out, err, exc) - -if 1: - - from waflib import Logs, Utils, Runner, Errors, Options - - def init_task_pool(self): - # lazy creation, and set a common pool for all task consumers - pool = self.pool = [] - for i in range(self.numjobs): - consumer = Runner.get_pool() - pool.append(consumer) - consumer.idx = i - self.ready = Queue(0) - def setq(consumer): - consumer.ready = self.ready - try: - threading.current_thread().idx = consumer.idx - except Exception as e: - print(e) - for x in pool: - x.ready.put(setq) - return pool - Runner.Parallel.init_task_pool = init_task_pool - - def make_conn(bld): - child_socket, parent_socket = socket.socketpair(socket.AF_UNIX) - ppid = os.getpid() - pid = os.fork() - if pid == 0: - parent_socket.close() - - # if the parent crashes, try to exit cleanly - def reap(): - while 1: - try: - os.kill(ppid, 0) - except OSError: - break - else: - time.sleep(1) - os.kill(os.getpid(), signal.SIGKILL) - t = threading.Thread(target=reap) - t.setDaemon(True) - t.start() - - # write to child_socket only - try: - while process_command(child_socket): - pass - except KeyboardInterrupt: - sys.exit(2) - else: - child_socket.close() - return (pid, parent_socket) - - SERVERS = [] - CONNS = [] - def close_all(): - global SERVERS, CONS - while CONNS: - conn = CONNS.pop() - try: - conn.close() - except: - pass - while SERVERS: - pid = SERVERS.pop() - try: - os.kill(pid, 9) - except: - pass - atexit.register(close_all) - - def put_data(conn, data): - cnt = 0 - while cnt < len(data): - sent = conn.send(data[cnt:]) - if sent == 0: - raise RuntimeError('connection ended') - cnt += sent - - def read_data(conn, siz): - cnt = 0 - buf = [] - while cnt < siz: - data = conn.recv(min(siz - cnt, 1024)) - if not data: - raise RuntimeError('connection ended %r %r' % (cnt, siz)) - buf.append(data) - cnt += len(data) - if sys.hexversion > 0x3000000: - ret = ''.encode('iso8859-1').join(buf) - else: - ret = ''.join(buf) - return ret - - def exec_command(self, cmd, **kw): - if 'stdout' in kw: - if kw['stdout'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - elif 'stderr' in kw: - if kw['stderr'] not in (None, subprocess.PIPE): - return self.exec_command_old(cmd, **kw) - - kw['shell'] = isinstance(cmd, str) - Logs.debug('runner: %r' % cmd) - Logs.debug('runner_env: kw=%s' % kw) - - if self.logger: - self.logger.info(cmd) - - if 'stdout' not in kw: - kw['stdout'] = subprocess.PIPE - if 'stderr' not in kw: - kw['stderr'] = subprocess.PIPE - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!" % cmd[0]) - - idx = threading.current_thread().idx - kw['cmd'] = cmd - - # serialization.. - #print("sub %r %r" % (idx, cmd)) - #print("write to %r %r" % (idx, cmd)) - - data = cPickle.dumps(kw, -1) - params = [REQ, str(len(data))] - header = make_header(params) - - conn = CONNS[idx] - - put_data(conn, header + data) - - #print("running %r %r" % (idx, cmd)) - #print("read from %r %r" % (idx, cmd)) - - data = read_data(conn, HEADER_SIZE) - if sys.hexversion > 0x3000000: - data = data.decode('iso8859-1') - - #print("received %r" % data) - lst = data.split(',') - ret = int(lst[1]) - dlen = int(lst[2]) - - out = err = None - if dlen: - data = read_data(conn, dlen) - (out, err, exc) = cPickle.loads(data) - if exc: - raise Errors.WafError('Execution failure: %s' % exc) - - if out: - if not isinstance(out, str): - out = out.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.debug('out: %s' % out) - else: - Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) - if err: - if not isinstance(err, str): - err = err.decode(sys.stdout.encoding or 'iso8859-1') - if self.logger: - self.logger.error('err: %s' % err) - else: - Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) - - return ret - - def init_smp(self): - if not getattr(Options.options, 'smp', getattr(self, 'smp', None)): - return - if Utils.unversioned_sys_platform() in ('freebsd',): - pid = os.getpid() - cmd = ['cpuset', '-l', '0', '-p', str(pid)] - elif Utils.unversioned_sys_platform() in ('linux',): - pid = os.getpid() - cmd = ['taskset', '-pc', '0', str(pid)] - if cmd: - self.cmd_and_log(cmd, quiet=0) - - def options(opt): - # memory consumption might be at the lowest point while processing options - opt.add_option('--pin-process', action='store_true', dest='smp', default=False) - if Utils.is_win32 or os.sep != '/': - return - while len(CONNS) < 30: - (pid, conn) = make_conn(opt) - SERVERS.append(pid) - CONNS.append(conn) - - def build(bld): - if Utils.is_win32 or os.sep != '/': - return - if bld.cmd == 'clean': - return - while len(CONNS) < bld.jobs: - (pid, conn) = make_conn(bld) - SERVERS.append(pid) - CONNS.append(conn) - init_smp(bld) - bld.__class__.exec_command_old = bld.__class__.exec_command - bld.__class__.exec_command = exec_command - diff --git a/waflib/extras/print_commands.py b/waflib/extras/print_commands.py deleted file mode 100644 index 3005c3ee73..0000000000 --- a/waflib/extras/print_commands.py +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env python - -""" -Illustrate how to override a class method to do something - -In this case, print the commands being executed as strings -(the commands are usually lists, so this can be misleading) -""" - -import sys -from waflib import Context, Utils, Logs - -def exec_command(self, cmd, **kw): - subprocess = Utils.subprocess - kw['shell'] = isinstance(cmd, str) - - txt = cmd - if isinstance(cmd, list): - txt = ' '.join(cmd) - - print(txt) - Logs.debug('runner_env: kw=%s' % kw) - - try: - if self.logger: - # warning: may deadlock with a lot of output (subprocess limitation) - - self.logger.info(cmd) - - kw['stdout'] = kw['stderr'] = subprocess.PIPE - p = subprocess.Popen(cmd, **kw) - (out, err) = p.communicate() - if out: - self.logger.debug('out: %s' % out.decode(sys.stdout.encoding or 'iso8859-1')) - if err: - self.logger.error('err: %s' % err.decode(sys.stdout.encoding or 'iso8859-1')) - return p.returncode - else: - p = subprocess.Popen(cmd, **kw) - return p.wait() - except OSError: - return -1 - -Context.Context.exec_command = exec_command - - diff --git a/waflib/extras/proc.py b/waflib/extras/proc.py index f97adefbf1..764abecfc3 100644 --- a/waflib/extras/proc.py +++ b/waflib/extras/proc.py @@ -23,10 +23,8 @@ def configure(cnf): def proc(tsk): env = tsk.env gen = tsk.generator - bld = gen.bld inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES']) - # FIXME the if-else construct will not work in python 2 cmd = ( [env.PROC] + ['SQLCHECK=SEMANTICS'] + diff --git a/waflib/extras/protoc.py b/waflib/extras/protoc.py index f8fd070f86..4a519cc6a0 100644 --- a/waflib/extras/protoc.py +++ b/waflib/extras/protoc.py @@ -3,14 +3,15 @@ # Philipp Bender, 2012 # Matt Clarkson, 2012 -import re +import re, os from waflib.Task import Task -from waflib.TaskGen import extension +from waflib.TaskGen import extension +from waflib import Errors, Context, Logs """ A simple tool to integrate protocol buffers into your build system. -Example:: +Example for C++: def configure(conf): conf.load('compiler_cxx cxx protoc') @@ -18,9 +19,62 @@ def configure(conf): def build(bld): bld( features = 'cxx cxxprogram' - source = 'main.cpp file1.proto proto/file2.proto', - include = '. proto', - target = 'executable') + source = 'main.cpp file1.proto proto/file2.proto', + includes = '. proto', + target = 'executable') + +Example for Python: + + def configure(conf): + conf.load('python protoc') + + def build(bld): + bld( + features = 'py' + source = 'main.py file1.proto proto/file2.proto', + protoc_includes = 'proto') + +Example for both Python and C++ at same time: + + def configure(conf): + conf.load('cxx python protoc') + + def build(bld): + bld( + features = 'cxx py' + source = 'file1.proto proto/file2.proto', + protoc_includes = 'proto') # or includes + + +Example for Java: + + def options(opt): + opt.load('java') + + def configure(conf): + conf.load('python java protoc') + # Here you have to point to your protobuf-java JAR and have it in classpath + conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar'] + + def build(bld): + bld( + features = 'javac protoc', + name = 'pbjava', + srcdir = 'inc/ src', # directories used by javac + source = ['inc/message_inc.proto', 'inc/message.proto'], + # source is used by protoc for .proto files + use = 'PROTOBUF', + protoc_includes = ['inc']) # for protoc to search dependencies + + +Protoc includes passed via protoc_includes are either relative to the taskgen +or to the project and are searched in this order. + +Include directories external to the waf project can also be passed to the +extra by using protoc_extincludes + + protoc_extincludes = ['/usr/include/pblib'] + Notes when using this tool: @@ -35,10 +89,9 @@ def build(bld): """ class protoc(Task): - # protoc expects the input proto file to be an absolute path. - run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}' + run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${PROTOC_ST:PROTOC_EXTINCPATHS} ${SRC[0].bldpath()}' color = 'BLUE' - ext_out = ['.h', 'pb.cc'] + ext_out = ['.h', 'pb.cc', '.py', '.java'] def scan(self): """ Scan .proto dependencies @@ -48,10 +101,27 @@ def scan(self): nodes = [] names = [] seen = [] + search_nodes = [] + + if not node: + return (nodes, names) - if not node: return (nodes, names) + if 'cxx' in self.generator.features: + search_nodes = self.generator.includes_nodes + + if 'py' in self.generator.features or 'javac' in self.generator.features: + for incpath in getattr(self.generator, 'protoc_includes', []): + incpath_node = self.generator.path.find_node(incpath) + if incpath_node: + search_nodes.append(incpath_node) + else: + # Check if relative to top-level for extra tg dependencies + incpath_node = self.generator.bld.path.find_node(incpath) + if incpath_node: + search_nodes.append(incpath_node) + else: + raise Errors.WafError('protoc: include path %r does not exist' % incpath) - search_paths = [self.generator.path.find_node(x) for x in self.generator.includes] def parse_node(node): if node in seen: @@ -62,8 +132,8 @@ def parse_node(node): m = re.search(r'^import\s+"(.*)";.*(//)?.*', line) if m: dep = m.groups()[0] - for incpath in search_paths: - found = incpath.find_resource(dep) + for incnode in search_nodes: + found = incnode.find_resource(dep) if found: nodes.append(found) parse_node(found) @@ -71,25 +141,84 @@ def parse_node(node): names.append(dep) parse_node(node) + # Add also dependencies path to INCPATHS so protoc will find the included file + for deppath in nodes: + self.env.append_unique('INCPATHS', deppath.parent.bldpath()) return (nodes, names) @extension('.proto') def process_protoc(self, node): - cpp_node = node.change_ext('.pb.cc') - hpp_node = node.change_ext('.pb.h') - self.create_task('protoc', node, [cpp_node, hpp_node]) - self.source.append(cpp_node) - - if 'cxx' in self.features and not self.env.PROTOC_FLAGS: - #self.env.PROTOC_FLAGS = '--cpp_out=%s' % node.parent.get_bld().abspath() # <- this does not work - self.env.PROTOC_FLAGS = '--cpp_out=.' + incdirs = [] + out_nodes = [] + protoc_flags = [] + + # ensure PROTOC_FLAGS is a list; a copy is used below anyway + self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS) + + if 'cxx' in self.features: + cpp_node = node.change_ext('.pb.cc') + hpp_node = node.change_ext('.pb.h') + self.source.append(cpp_node) + out_nodes.append(cpp_node) + out_nodes.append(hpp_node) + protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath()) + + if 'py' in self.features: + py_node = node.change_ext('_pb2.py') + self.source.append(py_node) + out_nodes.append(py_node) + protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath()) + + if 'javac' in self.features: + # Make javac get also pick java code generated in build + if not node.parent.get_bld() in self.javac_task.srcdir: + self.javac_task.srcdir.append(node.parent.get_bld()) + + protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath()) + node.parent.get_bld().mkdir() + + tsk = self.create_task('protoc', node, out_nodes) + tsk.env.append_value('PROTOC_FLAGS', protoc_flags) + + if 'javac' in self.features: + self.javac_task.set_run_after(tsk) + + # Instruct protoc where to search for .proto included files. + # For C++ standard include files dirs are used, + # but this doesn't apply to Python for example + for incpath in getattr(self, 'protoc_includes', []): + incpath_node = self.path.find_node(incpath) + if incpath_node: + incdirs.append(incpath_node.bldpath()) + else: + # Check if relative to top-level for extra tg dependencies + incpath_node = self.bld.path.find_node(incpath) + if incpath_node: + incdirs.append(incpath_node.bldpath()) + else: + raise Errors.WafError('protoc: include path %r does not exist' % incpath) + + tsk.env.PROTOC_INCPATHS = incdirs + + # Include paths external to the waf project (ie. shared pb repositories) + tsk.env.PROTOC_EXTINCPATHS = getattr(self, 'protoc_extincludes', []) + + # PR2115: protoc generates output of .proto files in nested + # directories by canonicalizing paths. To avoid this we have to pass + # as first include the full directory file of the .proto file + tsk.env.prepend_value('INCPATHS', node.parent.bldpath()) use = getattr(self, 'use', '') if not 'PROTOBUF' in use: self.use = self.to_list(use) + ['PROTOBUF'] def configure(conf): - conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs']) + conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs']) conf.find_program('protoc', var='PROTOC') + conf.start_msg('Checking for protoc version') + protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH) + protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:] + conf.end_msg(protocver) + conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')] conf.env.PROTOC_ST = '-I%s' - + conf.env.PROTOC_FL = '%s' diff --git a/waflib/extras/pyqt5.py b/waflib/extras/pyqt5.py new file mode 100644 index 0000000000..0c083a1247 --- /dev/null +++ b/waflib/extras/pyqt5.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python + +""" +This tool helps with finding Python Qt5 tools and libraries, +and provides translation from QT5 files to Python code. + +The following snippet illustrates the tool usage:: + + def options(opt): + opt.load('py pyqt5') + + def configure(conf): + conf.load('py pyqt5') + + def build(bld): + bld( + features = 'py pyqt5', + source = 'main.py textures.qrc aboutDialog.ui', + ) + +Here, the UI description and resource files will be processed +to generate code. + +Usage +===== + +Load the "pyqt5" tool. + +Add into the sources list also the qrc resources files or ui5 +definition files and they will be translated into python code +with the system tools (PyQt5, PySide2, PyQt4 are searched in this +order) and then compiled +""" + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml = False + ContentHandler = object +else: + has_xml = True + +import os +from waflib.Tools import python +from waflib import Task, Options +from waflib.TaskGen import feature, extension +from waflib.Configure import conf +from waflib import Logs + +EXT_RCC = ['.qrc'] +""" +File extension for the resource (.qrc) files +""" + +EXT_UI = ['.ui'] +""" +File extension for the user interface (.ui) files +""" + + +class XMLHandler(ContentHandler): + """ + Parses ``.qrc`` files + """ + def __init__(self): + self.buf = [] + self.files = [] + def startElement(self, name, attrs): + if name == 'file': + self.buf = [] + def endElement(self, name): + if name == 'file': + self.files.append(str(''.join(self.buf))) + def characters(self, cars): + self.buf.append(cars) + +@extension(*EXT_RCC) +def create_pyrcc_task(self, node): + "Creates rcc and py task for ``.qrc`` files" + rcnode = node.change_ext('.py') + self.create_task('pyrcc', node, rcnode) + if getattr(self, 'install_from', None): + self.install_from = self.install_from.get_bld() + else: + self.install_from = self.path.get_bld() + self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') + self.process_py(rcnode) + +@extension(*EXT_UI) +def create_pyuic_task(self, node): + "Create uic tasks and py for user interface ``.ui`` definition files" + uinode = node.change_ext('.py') + self.create_task('ui5py', node, uinode) + if getattr(self, 'install_from', None): + self.install_from = self.install_from.get_bld() + else: + self.install_from = self.path.get_bld() + self.install_path = getattr(self, 'install_path', '${PYTHONDIR}') + self.process_py(uinode) + +@extension('.ts') +def add_pylang(self, node): + """Adds all the .ts file into ``self.lang``""" + self.lang = self.to_list(getattr(self, 'lang', [])) + [node] + +@feature('pyqt5') +def apply_pyqt5(self): + """ + The additional parameters are: + + :param lang: list of translation files (\\*.ts) to process + :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file + :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension + """ + if getattr(self, 'lang', None): + qmtasks = [] + for x in self.to_list(self.lang): + if isinstance(x, str): + x = self.path.find_resource(x + '.ts') + qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm'))) + + + if getattr(self, 'langname', None): + qmnodes = [k.outputs[0] for k in qmtasks] + rcnode = self.langname + if isinstance(rcnode, str): + rcnode = self.path.find_or_declare(rcnode + '.qrc') + t = self.create_task('qm2rcc', qmnodes, rcnode) + create_pyrcc_task(self, t.outputs[0]) + +class pyrcc(Task.Task): + """ + Processes ``.qrc`` files + """ + color = 'BLUE' + run_str = '${QT_PYRCC} ${QT_PYRCC_FLAGS} ${SRC} -o ${TGT}' + ext_out = ['.py'] + + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + + def scan(self): + """Parse the *.qrc* files""" + if not has_xml: + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') + return ([], []) + + parser = make_parser() + curHandler = XMLHandler() + parser.setContentHandler(curHandler) + fi = open(self.inputs[0].abspath(), 'r') + try: + parser.parse(fi) + finally: + fi.close() + + nodes = [] + names = [] + root = self.inputs[0].parent + for x in curHandler.files: + nd = root.find_resource(x) + if nd: + nodes.append(nd) + else: + names.append(x) + return (nodes, names) + + +class ui5py(Task.Task): + """ + Processes ``.ui`` files for python + """ + color = 'BLUE' + run_str = '${QT_PYUIC} ${QT_PYUIC_FLAGS} ${SRC} -o ${TGT}' + ext_out = ['.py'] + +class ts2qm(Task.Task): + """ + Generates ``.qm`` files from ``.ts`` files + """ + color = 'BLUE' + run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' + +class qm2rcc(Task.Task): + """ + Generates ``.qrc`` files from ``.qm`` files + """ + color = 'BLUE' + after = 'ts2qm' + def run(self): + """Create a qrc file including the inputs""" + txt = '\n'.join(['%s' % k.path_from(self.outputs[0].parent) for k in self.inputs]) + code = '\n\n%s\n\n' % txt + self.outputs[0].write(code) + +def configure(self): + self.find_pyqt5_binaries() + + # warn about this during the configuration too + if not has_xml: + Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!') + +@conf +def find_pyqt5_binaries(self): + """ + Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc + """ + env = self.env + + if getattr(Options.options, 'want_pyqt5', True): + self.find_program(['pyuic5'], var='QT_PYUIC') + self.find_program(['pyrcc5'], var='QT_PYRCC') + self.find_program(['pylupdate5'], var='QT_PYLUPDATE') + elif getattr(Options.options, 'want_pyside2', True): + self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC') + self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC') + self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE') + elif getattr(Options.options, 'want_pyqt4', True): + self.find_program(['pyuic4'], var='QT_PYUIC') + self.find_program(['pyrcc4'], var='QT_PYRCC') + self.find_program(['pylupdate4'], var='QT_PYLUPDATE') + else: + self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC') + self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC') + self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE') + + if not env.QT_PYUIC: + self.fatal('cannot find the uic compiler for python for qt5') + + if not env.QT_PYRCC: + self.fatal('cannot find the rcc compiler for python for qt5') + + self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE') + +def options(opt): + """ + Command-line options + """ + pyqt5opt=opt.add_option_group("Python QT5 Options") + pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') + pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') + pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') diff --git a/waflib/extras/pytest.py b/waflib/extras/pytest.py new file mode 100644 index 0000000000..fc9ad1c23e --- /dev/null +++ b/waflib/extras/pytest.py @@ -0,0 +1,240 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Calle Rosenquist, 2016-2018 (xbreak) + +""" +Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest` +task via the **pytest** feature. + +To use pytest the following is needed: + +1. Load `pytest` and the dependency `waf_unit_test` tools. +2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with + the following attributes: + + - `pytest_source`: Test input files. + - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or + if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``. + - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False. + - `ut_cwd`: Working directory for test runner. Defaults to directory of + first ``pytest_source`` file. + + Additionally the following `pytest` specific attributes are used in dependent taskgens: + + - `pytest_path`: Node or string list of additional Python paths. + - `pytest_libpath`: Node or string list of additional library paths. + +The `use` dependencies are used for both update calculation and to populate +the following environment variables for the `pytest` test runner: + +1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`: + + - `install_from` attribute is used to determine where the root of the Python sources + are located. If `install_from` is not specified the default is to use the taskgen path + as the root. + + - `pytest_path` attribute is used to manually specify additional Python paths. + +2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with + non-static link_task. + + - `pytest_libpath` attribute is used to manually specify additional linker paths. + +3. Java class search path (CLASSPATH) of any Java/Javalike dependency + +Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens + because the extension might be part of a Python package or used standalone: + + - When used as part of another `py` package, the `PYTHONPATH` is provided by + that taskgen so no additional action is required. + + - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly + via the `pytest_path` attribute on the `pyext` taskgen. + + For details c.f. the pytest playground examples. + + +For example:: + + # A standalone Python C extension that demonstrates unit test environment population + # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH. + # + # Note: `pytest_path` is provided here because pytest cannot automatically determine + # if the extension is part of another Python package or is used standalone. + bld(name = 'foo_ext', + features = 'c cshlib pyext', + source = 'src/foo_ext.c', + target = 'foo_ext', + pytest_path = [ bld.path.get_bld() ]) + + # Python package under test that also depend on the Python module `foo_ext` + # + # Note: `install_from` is added automatically to `PYTHONPATH`. + bld(name = 'foo', + features = 'py', + use = 'foo_ext', + source = bld.path.ant_glob('src/foo/*.py'), + install_from = 'src') + + # Unit test example using the built in module unittest and let that discover + # any test cases. + bld(name = 'foo_test', + features = 'pytest', + use = 'foo', + pytest_source = bld.path.ant_glob('test/*.py'), + ut_str = '${PYTHON} -B -m unittest discover') + +""" + +import os +from waflib import Task, TaskGen, Errors, Utils, Logs +from waflib.Tools import ccroot + +def _process_use_rec(self, name): + """ + Recursively process ``use`` for task generator with name ``name``.. + Used by pytest_process_use. + """ + if name in self.pytest_use_not or name in self.pytest_use_seen: + return + try: + tg = self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.pytest_use_not.add(name) + return + + self.pytest_use_seen.append(name) + tg.post() + + for n in self.to_list(getattr(tg, 'use', [])): + _process_use_rec(self, n) + + +@TaskGen.feature('pytest') +@TaskGen.after_method('process_source', 'apply_link') +def pytest_process_use(self): + """ + Process the ``use`` attribute which contains a list of task generator names and store + paths that later is used to populate the unit test runtime environment. + """ + self.pytest_use_not = set() + self.pytest_use_seen = [] + self.pytest_paths = [] # strings or Nodes + self.pytest_libpaths = [] # strings or Nodes + self.pytest_javapaths = [] # strings or Nodes + self.pytest_dep_nodes = [] + + names = self.to_list(getattr(self, 'use', [])) + for name in names: + _process_use_rec(self, name) + + def extend_unique(lst, varlst): + ext = [] + for x in varlst: + if x not in lst: + ext.append(x) + lst.extend(ext) + + # Collect type specific info needed to construct a valid runtime environment + # for the test. + for name in self.pytest_use_seen: + tg = self.bld.get_tgen_by_name(name) + + extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', []))) + extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', []))) + + if 'py' in tg.features: + # Python dependencies are added to PYTHONPATH + pypath = getattr(tg, 'install_from', tg.path) + + if 'buildcopy' in tg.features: + # Since buildcopy is used we assume that PYTHONPATH in build should be used, + # not source + extend_unique(self.pytest_paths, [pypath.get_bld().abspath()]) + + # Add buildcopy output nodes to dependencies + extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \ + for o in getattr(task, 'outputs', [])]) + else: + # If buildcopy is not used, depend on sources instead + extend_unique(self.pytest_dep_nodes, tg.source) + extend_unique(self.pytest_paths, [pypath.abspath()]) + + if 'javac' in tg.features: + # If a JAR is generated point to that, otherwise to directory + if getattr(tg, 'jar_task', None): + extend_unique(self.pytest_javapaths, [tg.jar_task.outputs[0].abspath()]) + else: + extend_unique(self.pytest_javapaths, [tg.path.get_bld()]) + + # And add respective dependencies if present + if tg.use_lst: + extend_unique(self.pytest_javapaths, tg.use_lst) + + if getattr(tg, 'link_task', None): + # For tasks with a link_task (C, C++, D et.c.) include their library paths: + if not isinstance(tg.link_task, ccroot.stlink_task): + extend_unique(self.pytest_dep_nodes, tg.link_task.outputs) + extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH) + + if 'pyext' in tg.features: + # If the taskgen is extending Python we also want to add the interpreter libpath. + extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT) + else: + # Only add to libpath if the link task is not a Python extension + extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()]) + + +@TaskGen.feature('pytest') +@TaskGen.after_method('pytest_process_use') +def make_pytest(self): + """ + Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``: + + - Paths in `pytest_paths` attribute are used to populate PYTHONPATH + - Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH) + """ + nodes = self.to_nodes(self.pytest_source) + tsk = self.create_task('utest', nodes) + + tsk.dep_nodes.extend(self.pytest_dep_nodes) + if getattr(self, 'ut_str', None): + self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False)) + tsk.vars = lst + tsk.vars + + if getattr(self, 'ut_cwd', None): + if isinstance(self.ut_cwd, str): + # we want a Node instance + if os.path.isabs(self.ut_cwd): + self.ut_cwd = self.bld.root.make_node(self.ut_cwd) + else: + self.ut_cwd = self.path.make_node(self.ut_cwd) + else: + if tsk.inputs: + self.ut_cwd = tsk.inputs[0].parent + else: + raise Errors.WafError("no valid input files for pytest task, check pytest_source value") + + if not self.ut_cwd.exists(): + self.ut_cwd.mkdir() + + if not hasattr(self, 'ut_env'): + self.ut_env = dict(os.environ) + def add_paths(var, lst): + # Add list of paths to a variable, lst can contain strings or nodes + lst = [ str(n) for n in lst ] + Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst) + self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '') + + # Prepend dependency paths to PYTHONPATH, CLASSPATH and LD_LIBRARY_PATH + add_paths('PYTHONPATH', self.pytest_paths) + add_paths('CLASSPATH', self.pytest_javapaths) + + if Utils.is_win32: + add_paths('PATH', self.pytest_libpaths) + elif Utils.unversioned_sys_platform() == 'darwin': + add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths) + add_paths('LD_LIBRARY_PATH', self.pytest_libpaths) + else: + add_paths('LD_LIBRARY_PATH', self.pytest_libpaths) + diff --git a/waflib/extras/qnxnto.py b/waflib/extras/qnxnto.py index 842cc00fd8..1158124da9 100644 --- a/waflib/extras/qnxnto.py +++ b/waflib/extras/qnxnto.py @@ -26,7 +26,7 @@ def __init__(self, prog, **kw): if Popen.verbose: sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw)) - do_delegate = kw.get('stdout', None) == -1 and kw.get('stderr', None) == -1 + do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1 if do_delegate: if Popen.verbose: print("Delegating to real Popen") @@ -43,9 +43,7 @@ def __getattr__(self, name): if Popen.verbose: sys.stdout.write("Getattr: %s..." % name) if name in Popen.__slots__: - if Popen.verbose: - print("In slots!") - return object.__getattr__(self, name) + return object.__getattribute__(self, name) else: if self.popen is not None: if Popen.verbose: diff --git a/waflib/Tools/qt4.py b/waflib/extras/qt4.py similarity index 96% rename from waflib/Tools/qt4.py rename to waflib/extras/qt4.py index 7a4edfa8e5..d19a4ddac3 100644 --- a/waflib/Tools/qt4.py +++ b/waflib/extras/qt4.py @@ -52,7 +52,7 @@ def add_includes_paths(self): incs = set(self.to_list(getattr(self, 'includes', ''))) for x in self.compiled_tasks: incs.add(x.inputs[0].parent.path_from(self.path)) - self.includes = list(incs) + self.includes = sorted(incs) Note: another tool provides Qt processing that does not require .moc includes, see 'playground/slow_qt/'. @@ -155,12 +155,13 @@ def create_moc_task(self, h_node, m_node): # direct injection in the build phase (safe because called from the main thread) gen = self.generator.bld.producer - gen.outstanding.insert(0, tsk) + gen.outstanding.append(tsk) gen.total += 1 return tsk def moc_h_ext(self): + ext = [] try: ext = Options.options.qt_header_ext.split() except AttributeError: @@ -189,7 +190,7 @@ def add_moc_tasks(self): include_nodes = [node.parent] + self.generator.includes_nodes moctasks = [] - mocfiles = set([]) + mocfiles = set() for d in bld.raw_deps.get(self.uid(), []): if not d.endswith('.moc'): continue @@ -238,7 +239,6 @@ class trans_update(Task.Task): """Update a .ts files from a list of C++ files""" run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}' color = 'BLUE' -Task.update_outputs(trans_update) class XMLHandler(ContentHandler): """ @@ -290,11 +290,11 @@ def build(bld): The additional parameters are: - :param lang: list of translation files (\*.ts) to process + :param lang: list of translation files (\\*.ts) to process :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension - :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**) + :param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**) :type update: bool - :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file + :param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension """ if getattr(self, 'lang', None): @@ -321,7 +321,8 @@ def build(bld): lst = [] for flag in self.to_list(self.env['CXXFLAGS']): - if len(flag) < 2: continue + if len(flag) < 2: + continue f = flag[0:2] if f in ('-D', '-I', '/D', '/I'): if (f[0] == '/'): @@ -368,8 +369,10 @@ def scan(self): root = self.inputs[0].parent for x in curHandler.files: nd = root.find_resource(x) - if nd: nodes.append(nd) - else: names.append(x) + if nd: + nodes.append(nd) + else: + names.append(x) return (nodes, names) class moc(Task.Task): @@ -441,7 +444,7 @@ def find_qt4_binaries(self): # the qt directory has been given from QT4_ROOT - deduce the qt binary path if not qtdir: qtdir = os.environ.get('QT4_ROOT', '') - qtbin = os.environ.get('QT4_BIN', None) or os.path.join(qtdir, 'bin') + qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin') if qtbin: paths = [qtbin] @@ -531,7 +534,7 @@ def find_bin(lst, var): @conf def find_qt4_libraries(self): - qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR", None) + qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR") if not qtlibs: try: qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip() @@ -540,13 +543,13 @@ def find_qt4_libraries(self): qtlibs = os.path.join(qtdir, 'lib') self.msg('Found the Qt4 libraries in', qtlibs) - qtincludes = os.environ.get("QT4_INCLUDES", None) or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() + qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip() env = self.env if not 'PKG_CONFIG_PATH' in os.environ: os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs) try: - if os.environ.get("QT4_XCOMPILE", None): + if os.environ.get("QT4_XCOMPILE"): raise self.errors.ConfigurationError() self.check_cfg(atleast_pkgconfig_version='0.1') except self.errors.ConfigurationError: diff --git a/waflib/extras/relocation.py b/waflib/extras/relocation.py index ebbf6e4b0a..7e821f4166 100644 --- a/waflib/extras/relocation.py +++ b/waflib/extras/relocation.py @@ -15,7 +15,7 @@ import os from waflib import Build, ConfigSet, Task, Utils, Errors -from waflib.TaskGen import feature, before_method, after_method +from waflib.TaskGen import feature, after_method EXTRA_LOCK = '.old_srcdir' diff --git a/waflib/extras/remote.py b/waflib/extras/remote.py index de9269bf9b..f43b600f02 100644 --- a/waflib/extras/remote.py +++ b/waflib/extras/remote.py @@ -76,7 +76,7 @@ def build(bld): 4. Setup the ssh server and ssh keys - The ssh key should not be protected by a password, or it will prompt for it everytime. + The ssh key should not be protected by a password, or it will prompt for it every time. Create the key on the client: .. code:: bash @@ -203,7 +203,7 @@ def skip_unbuildable_variant(self): Options.commands.remove(k) def login_to_host(self, login): - return re.sub('(\w+@)', '', login) + return re.sub(r'(\w+@)', '', login) def variant_to_login(self, variant): """linux_32_debug -> search env.LINUX_32 and then env.LINUX""" diff --git a/waflib/extras/resx.py b/waflib/extras/resx.py index 1709c8cecf..caf4d318bb 100644 --- a/waflib/extras/resx.py +++ b/waflib/extras/resx.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# +# encoding: utf-8 import os from waflib import Task @@ -18,8 +18,8 @@ def resx_file(self, node): self.bld.fatal('resx_file has no link task for use %r' % self) # Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources' - assembly = os.path.splitext(self.gen)[0] - res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.') + assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0]) + res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.') out = self.path.find_or_declare(assembly + '.' + res + '.resources') tsk = self.create_task('resgen', node, out) diff --git a/waflib/extras/review.py b/waflib/extras/review.py index 4a7ad2f4fe..561e06219d 100644 --- a/waflib/extras/review.py +++ b/waflib/extras/review.py @@ -79,18 +79,12 @@ def plist(self): return p def __str__(self): - s = "{" - l = len(self._keys) + buf = [] + buf.append("{ ") for k, v in self.items(): - l -= 1 - strkey = str(k) - if isinstance(k, basestring): strkey = "'"+strkey+"'" - strval = str(v) - if isinstance(v, basestring): strval = "'"+strval+"'" - s += strkey + ":" + strval - if l > 0: s += ", " - s += "}" - return s + buf.append('%r : %r, ' % (k, v)) + buf.append("}") + return ''.join(buf) review_options = Odict() """ @@ -248,7 +242,8 @@ def compare_review_set(self, set1, set2): """ Return true if the review sets specified are equal. """ - if len(set1.keys()) != len(set2.keys()): return False + if len(set1.keys()) != len(set2.keys()): + return False for key in set1.keys(): if not key in set2 or set1[key] != set2[key]: return False @@ -265,7 +260,8 @@ def display_review_set(self, review_set): name = ", ".join(opt._short_opts + opt._long_opts) help = opt.help actual = None - if dest in review_set: actual = review_set[dest] + if dest in review_set: + actual = review_set[dest] default = review_defaults[dest] lines.append(self.format_option(name, help, actual, default, term_width)) return "Configuration:\n\n" + "\n\n".join(lines) + "\n" @@ -284,7 +280,8 @@ def val_to_str(val): w = textwrap.TextWrapper() w.width = term_width - 1 - if w.width < 60: w.width = 60 + if w.width < 60: + w.width = 60 out = "" diff --git a/waflib/extras/rst.py b/waflib/extras/rst.py index afab08292c..f3c3a5eba4 100644 --- a/waflib/extras/rst.py +++ b/waflib/extras/rst.py @@ -30,14 +30,17 @@ def build(bld): """ -import os, re +import re from waflib import Node, Utils, Task, Errors, Logs from waflib.TaskGen import feature, before_method rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split() -def parse_rst_node(node, nodes, names, seen): +def parse_rst_node(task, node, nodes, names, seen, dirs=None): # TODO add extensibility, to handle custom rst include tags... + if dirs is None: + dirs = (node.parent,node.get_bld().parent) + if node in seen: return seen.append(node) @@ -46,14 +49,19 @@ def parse_rst_node(node, nodes, names, seen): for match in re_rst.finditer(code): ipath = match.group('file') itype = match.group('type') - Logs.debug("rst: visiting %s: %s" % (itype, ipath)) - found = node.parent.find_resource(ipath) - if found: - nodes.append(found) - if itype == 'include': - parse_rst_node(found, nodes, names, seen) - else: - names.append(ipath) + Logs.debug('rst: visiting %s: %s', itype, ipath) + found = False + for d in dirs: + Logs.debug('rst: looking for %s in %s', ipath, d.abspath()) + found = d.find_node(ipath) + if found: + Logs.debug('rst: found %s as %s', ipath, found.abspath()) + nodes.append((itype, found)) + if itype == 'include': + parse_rst_node(task, found, nodes, names, seen) + break + if not found: + names.append((itype, ipath)) class docutils(Task.Task): """ @@ -74,13 +82,13 @@ def scan(self): if not node: return (nodes, names) - parse_rst_node(node, nodes, names, seen) + parse_rst_node(self, node, nodes, names, seen) - Logs.debug("rst: %s: found the following file deps: %s" % (repr(self), nodes)) + Logs.debug('rst: %r: found the following file deps: %r', self, nodes) if names: - Logs.warn("rst: %s: could not find the following file deps: %s" % (repr(self), names)) + Logs.warn('rst: %r: could not find the following file deps: %r', self, names) - return (nodes, names) + return ([v for (t,v) in nodes], [v for (t,v) in names]) def check_status(self, msg, retcode): """ @@ -92,7 +100,7 @@ def check_status(self, msg, retcode): :type retcode: boolean """ if retcode != 0: - raise Errors.WafError("%r command exit status %r" % (msg, retcode)) + raise Errors.WafError('%r command exit status %r' % (msg, retcode)) def run(self): """ @@ -116,7 +124,7 @@ def scan(self): if stylesheet is not None: ssnode = self.generator.to_nodes(stylesheet)[0] nodes.append(ssnode) - Logs.debug("rst: adding dep to %s %s" % (attribute, stylesheet)) + Logs.debug('rst: adding dep to %s %s', attribute, stylesheet) return nodes, names @@ -235,7 +243,7 @@ def apply_rst(self): inst_to = getattr(self, 'install_path', None) if inst_to: - self.install_task = self.bld.install_files(inst_to, task.outputs[:], env=self.env) + self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:]) self.source = [] diff --git a/waflib/extras/run_do_script.py b/waflib/extras/run_do_script.py index 2bf714897e..07e3aa2591 100644 --- a/waflib/extras/run_do_script.py +++ b/waflib/extras/run_do_script.py @@ -69,7 +69,6 @@ def configure(ctx): ctx.env.STATAFLAGS = STATAFLAGS ctx.env.STATAENCODING = STATAENCODING -@Task.update_outputs class run_do_script_base(Task.Task): """Run a Stata do-script from the bldnode directory.""" run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"' @@ -84,8 +83,8 @@ def run(self): run_do_script_base.run(self) ret, log_tail = self.check_erase_log_file() if ret: - Logs.error("""Running Stata on %s failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""" % ( - self.inputs[0].abspath(), ret, self.env.LOGFILEPATH, log_tail)) + Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, self.env.LOGFILEPATH, log_tail) return ret def check_erase_log_file(self): @@ -102,7 +101,7 @@ def check_erase_log_file(self): with open(**kwargs) as log: log_tail = log.readlines()[-10:] for line in log_tail: - error_found = re.match("r\(([0-9]+)\)", line) + error_found = re.match(r"r\(([0-9]+)\)", line) if error_found: return error_found.group(1), ''.join(log_tail) else: @@ -133,7 +132,7 @@ def apply_run_do_script(tg): if not node: tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) tsk.dep_nodes.append(node) - Logs.debug('deps: found dependencies %r for running %r' % (tsk.dep_nodes, src_node.abspath())) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) # Bypass the execution of process_source by setting the source to an empty list tg.source = [] diff --git a/waflib/extras/run_m_script.py b/waflib/extras/run_m_script.py index 534c8d927d..b5f27ebe08 100644 --- a/waflib/extras/run_m_script.py +++ b/waflib/extras/run_m_script.py @@ -36,7 +36,6 @@ def configure(ctx): Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS) ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize' -@Task.update_outputs class run_m_script_base(Task.Task): """Run a Matlab script.""" run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"' @@ -55,8 +54,8 @@ def run(self): mode = 'rb' with open(logfile, mode=mode) as f: tail = f.readlines()[-10:] - Logs.error("""Running Matlab on %s returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""" % ( - self.inputs[0].abspath(), ret, logfile, '\n'.join(tail))) + Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, logfile, '\n'.join(tail)) else: os.remove(logfile) return ret @@ -83,7 +82,7 @@ def apply_run_m_script(tg): if not node: tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) tsk.dep_nodes.append(node) - Logs.debug('deps: found dependencies %r for running %r' % (tsk.dep_nodes, src_node.abspath())) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) # Bypass the execution of process_source by setting the source to an empty list tg.source = [] diff --git a/waflib/extras/run_py_script.py b/waflib/extras/run_py_script.py index 99370efcd5..367038114c 100644 --- a/waflib/extras/run_py_script.py +++ b/waflib/extras/run_py_script.py @@ -35,13 +35,11 @@ def configure(conf): if not conf.env.PY2CMD and not conf.env.PY3CMD: conf.fatal("No Python interpreter found!") -@Task.update_outputs class run_py_2_script(Task.Task): """Run a Python 2 script.""" run_str = '${PY2CMD} ${SRC[0].abspath()}' shell=True -@Task.update_outputs class run_py_3_script(Task.Task): """Run a Python 3 script.""" run_str = '${PY3CMD} ${SRC[0].abspath()}' @@ -51,25 +49,25 @@ class run_py_3_script(Task.Task): @TaskGen.before_method('process_source') def apply_run_py_script(tg): """Task generator for running either Python 2 or Python 3 on a single - script. - + script. + Attributes: * source -- A **single** source node or string. (required) - * target -- A single target or list of targets (nodes or strings). + * target -- A single target or list of targets (nodes or strings) * deps -- A single dependency or list of dependencies (nodes or strings) - * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable. - + * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable + If the build environment has an attribute "PROJECT_PATHS" with a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH. - """ # Set the Python version to use, default to 3. v = getattr(tg, 'version', 3) - if v not in (2, 3): raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v) + if v not in (2, 3): + raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v) - # Convert sources and targets to nodes + # Convert sources and targets to nodes src_node = tg.path.find_resource(tg.source) tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)] @@ -86,7 +84,7 @@ def apply_run_py_script(tg): if getattr(tg, 'add_to_pythonpath', None): tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath - # Clean up the PYTHONPATH -- replace double occurrences of path separator + # Clean up the PYTHONPATH -- replace double occurrences of path separator tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH']) # Clean up the PYTHONPATH -- doesn't like starting with path separator @@ -99,7 +97,7 @@ def apply_run_py_script(tg): if not node: tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) tsk.dep_nodes.append(node) - Logs.debug('deps: found dependencies %r for running %r' % (tsk.dep_nodes, src_node.abspath())) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) # Bypass the execution of process_source by setting the source to an empty list tg.source = [] diff --git a/waflib/extras/run_r_script.py b/waflib/extras/run_r_script.py index 8f3e0c86f7..b0d8f2b2cd 100644 --- a/waflib/extras/run_r_script.py +++ b/waflib/extras/run_r_script.py @@ -6,12 +6,12 @@ Run a R script in the directory specified by **ctx.bldnode**. For error-catching purposes, keep an own log-file that is destroyed if the -task finished without error. If not, it will show up as rscript_[index].log +task finished without error. If not, it will show up as rscript_[index].log in the bldnode directory. Usage:: - ctx(features='run_r_script', + ctx(features='run_r_script', source='some_script.r', target=['some_table.tex', 'some_figure.eps'], deps='some_data.csv') @@ -34,7 +34,6 @@ def configure(ctx): Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS) ctx.env.RFLAGS = 'CMD BATCH --slave' -@Task.update_outputs class run_r_script_base(Task.Task): """Run a R script.""" run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"' @@ -53,8 +52,8 @@ def run(self): mode = 'rb' with open(logfile, mode=mode) as f: tail = f.readlines()[-10:] - Logs.error("""Running R on %s returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""" % ( - self.inputs[0].abspath(), ret, logfile, '\n'.join(tail))) + Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""", + self.inputs[0], ret, logfile, '\n'.join(tail)) else: os.remove(logfile) return ret @@ -80,7 +79,7 @@ def apply_run_r_script(tg): if not node: tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath())) tsk.dep_nodes.append(node) - Logs.debug('deps: found dependencies %r for running %r' % (tsk.dep_nodes, src_node.abspath())) + Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath()) # Bypass the execution of process_source by setting the source to an empty list tg.source = [] diff --git a/waflib/extras/sas.py b/waflib/extras/sas.py index e279612d1d..754c6148de 100644 --- a/waflib/extras/sas.py +++ b/waflib/extras/sas.py @@ -4,8 +4,8 @@ "SAS support" -import os, re -from waflib import Utils, Task, TaskGen, Runner, Build, Errors, Node, Logs +import os +from waflib import Task, Errors, Logs from waflib.TaskGen import feature, before_method sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False) @@ -14,9 +14,6 @@ class sas(Task.Task): vars = ['SAS', 'SASFLAGS'] def run(task): command = 'SAS' - env = task.env - bld = task.generator.bld - fun = sas_fun node = task.inputs[0] @@ -25,7 +22,7 @@ def run(task): # set the cwd task.cwd = task.inputs[0].parent.get_src().abspath() - Logs.debug('runner: %s on %s' % (command, node.abspath)) + Logs.debug('runner: %r on %r', command, node) SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep task.env.env = {'SASINPUTS': SASINPUTS} @@ -35,10 +32,10 @@ def run(task): task.env.LSTFILE = lstfilenode.abspath() ret = fun(task) if ret: - Logs.error('Running %s on %r returned a non-zero exit' % (command, node)) - Logs.error('SRCFILE = %r' % node) - Logs.error('LOGFILE = %r' % logfilenode) - Logs.error('LSTFILE = %r' % lstfilenode) + Logs.error('Running %s on %r returned a non-zero exit', command, node) + Logs.error('SRCFILE = %r', node) + Logs.error('LOGFILE = %r', logfilenode) + Logs.error('LSTFILE = %r', lstfilenode) return ret @feature('sas') @@ -56,9 +53,12 @@ def apply_sas(self): deps = self.to_list(self.deps) for filename in deps: n = self.path.find_resource(filename) - if not n: n = self.bld.root.find_resource(filename) - if not n: raise Errors.WafError('cannot find input file %s for processing' % filename) - if not n in deps_lst: deps_lst.append(n) + if not n: + n = self.bld.root.find_resource(filename) + if not n: + raise Errors.WafError('cannot find input file %s for processing' % filename) + if not n in deps_lst: + deps_lst.append(n) for node in self.to_nodes(self.source): if self.type == 'sas': diff --git a/waflib/extras/satellite_assembly.py b/waflib/extras/satellite_assembly.py new file mode 100644 index 0000000000..005eb074f9 --- /dev/null +++ b/waflib/extras/satellite_assembly.py @@ -0,0 +1,57 @@ +#!/usr/bin/python +# encoding: utf-8 +# vim: tabstop=4 noexpandtab + +""" +Create a satellite assembly from "*.??.txt" files. ?? stands for a language code. + +The projects Resources subfolder contains resources.??.txt string files for several languages. +The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll + +#gen becomes template (It is called gen because it also uses resx.py). +bld(source='Resources/resources.de.txt',gen=ExeName) +""" + +import os, re +from waflib import Task +from waflib.TaskGen import feature,before_method + +class al(Task.Task): + run_str = '${AL} ${ALFLAGS}' + +@feature('satellite_assembly') +@before_method('process_source') +def satellite_assembly(self): + if not getattr(self, 'gen', None): + self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter') + res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I) + + # self.source can contain node objects, so this will break in one way or another + self.source = self.to_list(self.source) + for i, x in enumerate(self.source): + #x = 'resources/resources.de.resx' + #x = 'resources/resources.de.txt' + mo = res_lang.match(x) + if mo: + template = os.path.splitext(self.gen)[0] + templatedir, templatename = os.path.split(template) + res = mo.group(1) + lang = mo.group(2) + #./Resources/resources.de.resources + resources = self.path.find_or_declare(res+ '.' + lang + '.resources') + self.create_task('resgen', self.to_nodes(x), [resources]) + #./de/Exename.resources.dll + satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll') + tsk = self.create_task('al',[resources],[satellite]) + tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen)) + tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath()) + tsk.env.append_value('ALFLAGS','/culture:'+lang) + tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath()) + self.source[i] = None + # remove the None elements that we just substituted + self.source = list(filter(lambda x:x, self.source)) + +def configure(ctx): + ctx.find_program('al', var='AL', mandatory=True) + ctx.load('resx') + diff --git a/waflib/extras/scala.py b/waflib/extras/scala.py index eeecd8a66c..a9880f0234 100644 --- a/waflib/extras/scala.py +++ b/waflib/extras/scala.py @@ -8,9 +8,8 @@ scalac outputs files a bit where it wants to """ -import os, re -from waflib.Configure import conf -from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node +import os +from waflib import Task, Utils, Node from waflib.TaskGen import feature, before_method, after_method from waflib.Tools import ccroot @@ -26,8 +25,6 @@ def apply_scalac(self): sourcepath='.', srcdir='.', jar_mf_attributes={}, jar_mf_classpath=[]) - nodes_lst = [] - outdir = getattr(self, 'outdir', None) if outdir: if not isinstance(outdir, Node.Node): @@ -90,7 +87,8 @@ def run(self): bld = gen.bld wd = bld.bldnode.abspath() def to_list(xx): - if isinstance(xx, str): return [xx] + if isinstance(xx, str): + return [xx] return xx self.last_cmd = lst = [] lst.extend(to_list(env['SCALAC'])) @@ -125,5 +123,6 @@ def configure(self): v['CLASSPATH'] = self.environ['CLASSPATH'] v.SCALACFLAGS = ['-verbose'] - if not v['SCALAC']: self.fatal('scalac is required for compiling scala classes') + if not v['SCALAC']: + self.fatal('scalac is required for compiling scala classes') diff --git a/waflib/extras/slow_qt4.py b/waflib/extras/slow_qt4.py index 20a9371519..ec7880bf9d 100644 --- a/waflib/extras/slow_qt4.py +++ b/waflib/extras/slow_qt4.py @@ -22,7 +22,7 @@ def configure(conf): @extension(*waflib.Tools.qt4.EXT_QT4) def cxx_hook(self, node): - self.create_compiled_task('cxx_qt', node) + return self.create_compiled_task('cxx_qt', node) class cxx_qt(Task.classes['cxx']): def runnable_status(self): @@ -63,7 +63,7 @@ def runnable_status(self): # moc is trying to be too smart but it is too dumb: # why forcing the #include when Q_OBJECT is in the cpp file? gen = self.generator.bld.producer - gen.outstanding.insert(0, tsk) + gen.outstanding.append(tsk) gen.total += 1 self.set_run_after(tsk) else: diff --git a/waflib/extras/smart_continue.py b/waflib/extras/smart_continue.py deleted file mode 100644 index 3af7b1f532..0000000000 --- a/waflib/extras/smart_continue.py +++ /dev/null @@ -1,81 +0,0 @@ -#! /usr/bin/env python -# Thomas Nagy, 2011 - -# Try to cancel the tasks that cannot run with the option -k when an error occurs: -# 1 direct file dependencies -# 2 tasks listed in the before/after/ext_in/ext_out attributes - -from waflib import Task, Runner - -Task.CANCELED = 4 - -def cancel_next(self, tsk): - if not isinstance(tsk, Task.TaskBase): - return - if tsk.hasrun >= Task.SKIPPED: - # normal execution, no need to do anything here - return - - try: - canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes - except AttributeError: - canceled_tasks = self.canceled_tasks = set([]) - canceled_nodes = self.canceled_nodes = set([]) - - try: - canceled_nodes.update(tsk.outputs) - except AttributeError: - pass - - try: - canceled_tasks.add(tsk) - except AttributeError: - pass - -def get_out(self): - tsk = self.out.get() - if not self.stop: - self.add_more_tasks(tsk) - self.count -= 1 - self.dirty = True - self.cancel_next(tsk) # new code - -def error_handler(self, tsk): - if not self.bld.keep: - self.stop = True - self.error.append(tsk) - self.cancel_next(tsk) # new code - -Runner.Parallel.cancel_next = cancel_next -Runner.Parallel.get_out = get_out -Runner.Parallel.error_handler = error_handler - -def get_next_task(self): - tsk = self.get_next_task_smart_continue() - if not tsk: - return tsk - - try: - canceled_tasks, canceled_nodes = self.canceled_tasks, self.canceled_nodes - except AttributeError: - pass - else: - # look in the tasks that this one is waiting on - # if one of them was canceled, cancel this one too - for x in tsk.run_after: - if x in canceled_tasks: - tsk.hasrun = Task.CANCELED - self.cancel_next(tsk) - break - else: - # so far so good, now consider the nodes - for x in getattr(tsk, 'inputs', []) + getattr(tsk, 'deps', []): - if x in canceled_nodes: - tsk.hasrun = Task.CANCELED - self.cancel_next(tsk) - break - return tsk - -Runner.Parallel.get_next_task_smart_continue = Runner.Parallel.get_next_task -Runner.Parallel.get_next_task = get_next_task - diff --git a/waflib/extras/softlink_libs.py b/waflib/extras/softlink_libs.py index ad63da43a0..50c777f28c 100644 --- a/waflib/extras/softlink_libs.py +++ b/waflib/extras/softlink_libs.py @@ -48,9 +48,11 @@ def run(self): result.seek(0) for line in result.readlines(): words = line.split() - if len(words) < 3 or words[1] != '=>': continue + if len(words) < 3 or words[1] != '=>': + continue lib = words[2] - if lib == 'not': continue + if lib == 'not': + continue if any([lib.startswith(p) for p in [bld.bldnode.abspath(), '('] + self.env.SOFTLINK_EXCLUDE]): diff --git a/waflib/extras/sphinx.py b/waflib/extras/sphinx.py new file mode 100644 index 0000000000..08f3cfd8a0 --- /dev/null +++ b/waflib/extras/sphinx.py @@ -0,0 +1,120 @@ +"""Support for Sphinx documentation + +This is a wrapper for sphinx-build program. Please note that sphinx-build supports only +one output format at a time, but the tool can create multiple tasks to handle more. +The output formats can be passed via the sphinx_output_format, which is an array of +strings. For backwards compatibility if only one output is needed, it can be passed +as a single string. +The default output format is html. + +Specific formats can be installed in different directories by specifying the +install_path_ attribute. If not defined, the standard install_path +will be used instead. + +Example wscript: + +def configure(cnf): + conf.load('sphinx') + +def build(bld): + bld( + features='sphinx', + sphinx_source='sources', # path to source directory + sphinx_options='-a -v', # sphinx-build program additional options + sphinx_output_format=['html', 'man'], # output format of sphinx documentation + install_path_man='${DOCDIR}/man' # put man pages in a specific directory + ) + +""" + +from waflib.Node import Node +from waflib import Utils +from waflib import Task +from waflib.TaskGen import feature, after_method + + +def configure(cnf): + """Check if sphinx-build program is available and loads gnu_dirs tool.""" + cnf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False) + cnf.load('gnu_dirs') + + +@feature('sphinx') +def build_sphinx(self): + """Builds sphinx sources. + """ + if not self.env.SPHINX_BUILD: + self.bld.fatal('Program SPHINX_BUILD not defined.') + if not getattr(self, 'sphinx_source', None): + self.bld.fatal('Attribute sphinx_source not defined.') + if not isinstance(self.sphinx_source, Node): + self.sphinx_source = self.path.find_node(self.sphinx_source) + if not self.sphinx_source: + self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source) + + # In the taskgen we have the complete list of formats + Utils.def_attrs(self, sphinx_output_format='html') + self.sphinx_output_format = Utils.to_list(self.sphinx_output_format) + + self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', []) + + for source_file in self.sphinx_source.ant_glob('**/*'): + self.bld.add_manual_dependency(self.sphinx_source, source_file) + + for cfmt in self.sphinx_output_format: + sphinx_build_task = self.create_task('SphinxBuildingTask') + sphinx_build_task.set_inputs(self.sphinx_source) + # In task we keep the specific format this task is generating + sphinx_build_task.env.SPHINX_OUTPUT_FORMAT = cfmt + + # the sphinx-build results are in directory + sphinx_build_task.sphinx_output_directory = self.path.get_bld().make_node(cfmt) + sphinx_build_task.set_outputs(sphinx_build_task.sphinx_output_directory) + sphinx_build_task.sphinx_output_directory.mkdir() + + Utils.def_attrs(sphinx_build_task, install_path=getattr(self, 'install_path_' + cfmt, getattr(self, 'install_path', get_install_path(sphinx_build_task)))) + + +def get_install_path(object): + if object.env.SPHINX_OUTPUT_FORMAT == 'man': + return object.env.MANDIR + elif object.env.SPHINX_OUTPUT_FORMAT == 'info': + return object.env.INFODIR + else: + return object.env.DOCDIR + + +class SphinxBuildingTask(Task.Task): + color = 'BOLD' + run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} -d ${TGT[0].bld_dir()}/doctrees-${SPHINX_OUTPUT_FORMAT} ${SPHINX_OPTIONS}' + + def keyword(self): + return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT + + def runnable_status(self): + + for x in self.run_after: + if not x.hasrun: + return Task.ASK_LATER + + self.signature() + ret = Task.Task.runnable_status(self) + if ret == Task.SKIP_ME: + # in case the files were removed + self.add_install() + return ret + + + def post_run(self): + self.add_install() + return Task.Task.post_run(self) + + + def add_install(self): + nodes = self.sphinx_output_directory.ant_glob('**/*', quiet=True) + self.outputs += nodes + self.generator.add_install_files(install_to=self.install_path, + install_from=nodes, + postpone=False, + cwd=self.sphinx_output_directory.make_node(self.env.SPHINX_OUTPUT_FORMAT), + relative_trick=True) diff --git a/waflib/extras/stale.py b/waflib/extras/stale.py index c89550e2ee..25e8905978 100644 --- a/waflib/extras/stale.py +++ b/waflib/extras/stale.py @@ -13,8 +13,17 @@ Of course, it will only work if there are no dynamically generated nodes/tasks, in which case the method will have to be modified to exclude some folders for example. + +Make sure to specify bld.post_mode = waflib.Build.POST_AT_ONCE:: + + def build(bld): + bld.load('stale') + import waflib.Build + bld.post_mode = waflib.Build.POST_AT_ONCE + """ +import os from waflib import Logs, Build from waflib.Runner import Parallel @@ -24,7 +33,7 @@ def can_delete(node): """Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files""" if not node.name.endswith('.moc'): - return True + return os.path.isfile(node.abspath()) base = node.name[:-4] p1 = node.parent.get_src() p2 = node.parent.get_bld() @@ -49,7 +58,7 @@ def stale_rec(node, nodes): return if getattr(node, 'children', []): - for x in node.children.values(): + for x in list(node.children.values()): if x.name != "c4che": stale_rec(x, nodes) else: @@ -59,7 +68,7 @@ def stale_rec(node, nodes): else: if not node in nodes: if can_delete(node): - Logs.warn("Removing stale file -> %s" % node.abspath()) + Logs.warn('Removing stale file -> %r', node) node.delete() old = Parallel.refill_task_list @@ -73,22 +82,26 @@ def refill_task_list(self): self.stale_done = True # this does not work in partial builds - if hasattr(bld, 'options') and bld.options.targets and bld.options.targets != '*': + if bld.targets not in ('', '*'): return iit # this does not work in dynamic builds - if not hasattr(bld, 'post_mode') or bld.post_mode == Build.POST_LAZY: + if getattr(bld, 'post_mode') != Build.POST_AT_ONCE: + Logs.warn('waflib.extras.stale is incompatible with dynamic builds') return iit # obtain the nodes to use during the build nodes = [] - for i in range(len(bld.groups)): - tasks = bld.get_tasks_group(i) - for x in tasks: + for group in bld.groups: + for tg in group: try: - nodes.extend(x.outputs) - except: - pass + nodes.extend(tg.outputs) + except AttributeError: + for task in tg.tasks: + try: + nodes.extend(task.outputs) + except AttributeError: + pass stale_rec(bld.bldnode, nodes) return iit diff --git a/waflib/extras/stracedeps.py b/waflib/extras/stracedeps.py index 1b611e8995..37d82cbb72 100644 --- a/waflib/extras/stracedeps.py +++ b/waflib/extras/stracedeps.py @@ -66,11 +66,8 @@ def get_strace_args(self): @task_method def exec_command(self, cmd, **kw): bld = self.generator.bld - try: - if not kw.get('cwd', None): - kw['cwd'] = bld.cwd - except AttributeError: - bld.cwd = kw['cwd'] = bld.variant_dir + if not 'cwd' in kw: + kw['cwd'] = self.get_cwd() args = self.get_strace_args() fname = self.get_strace_file() @@ -103,6 +100,9 @@ def parse_strace_deps(self, path, cwd): except OSError: pass + if not isinstance(cwd, str): + cwd = cwd.abspath() + nodes = [] bld = self.generator.bld try: @@ -114,7 +114,7 @@ def parse_strace_deps(self, path, cwd): pid_to_cwd = {} global BANNED - done = set([]) + done = set() for m in re.finditer(re_lines, cnt): # scraping the output of strace pid = m.group('pid') @@ -162,7 +162,7 @@ def parse_strace_deps(self, path, cwd): # record the dependencies then force the task signature recalculation for next time if Logs.verbose: - Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) + Logs.debug('deps: real scanner for %r returned %r', self, nodes) bld = self.generator.bld bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] diff --git a/waflib/extras/swig.py b/waflib/extras/swig.py index 43aff46daf..967caeb5a8 100644 --- a/waflib/extras/swig.py +++ b/waflib/extras/swig.py @@ -5,7 +5,7 @@ import re from waflib import Task, Logs -from waflib.TaskGen import extension +from waflib.TaskGen import extension, feature, after_method from waflib.Configure import conf from waflib.Tools import c_preproc @@ -17,10 +17,10 @@ SWIG_EXTS = ['.swig', '.i'] -re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M) +re_module = re.compile(r'%module(?:\s*\(.*\))?\s+([^\r\n]+)', re.M) re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M) -re_2 = re.compile('[#%]include [<"](.*)[">]', re.M) +re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M) class swig(Task.Task): color = 'BLUE' @@ -64,6 +64,7 @@ def scan(self): lst_src = [] seen = [] + missing = [] to_see = [self.inputs[0]] while to_see: @@ -87,14 +88,14 @@ def scan(self): to_see.append(u) break else: - Logs.warn('could not find %r' % n) - - return (lst_src, []) + missing.append(n) + return (lst_src, missing) # provide additional language processing swig_langs = {} def swigf(fun): swig_langs[fun.__name__.replace('swig_', '')] = fun + return fun swig.swigf = swigf def swig_c(self): @@ -111,9 +112,16 @@ def swig_c(self): c_tsk.set_run_after(self) - ge = self.generator.bld.producer - ge.outstanding.insert(0, c_tsk) - ge.total += 1 + # transfer weights from swig task to c task + if getattr(self, 'weight', None): + c_tsk.weight = self.weight + if getattr(self, 'tree_weight', None): + c_tsk.tree_weight = self.tree_weight + + try: + self.more_tasks.append(c_tsk) + except AttributeError: + self.more_tasks = [c_tsk] try: ltask = self.generator.link_task @@ -121,6 +129,9 @@ def swig_c(self): pass else: ltask.set_run_after(c_tsk) + # setting input nodes does not declare the build order + # because the build already started, but it sets + # the dependency to enable rebuilds ltask.inputs.append(c_tsk.outputs[0]) self.outputs.append(out_node) @@ -130,12 +141,18 @@ def swig_c(self): @swigf def swig_python(tsk): - tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py')) + node = tsk.inputs[0].parent + if tsk.outdir: + node = tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.py')) @swigf def swig_ocaml(tsk): - tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml')) - tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli')) + node = tsk.inputs[0].parent + if tsk.outdir: + node = tsk.outdir + tsk.set_outputs(node.find_or_declare(tsk.module+'.ml')) + tsk.set_outputs(node.find_or_declare(tsk.module+'.mli')) @extension(*SWIG_EXTS) def i_file(self, node): @@ -147,21 +164,71 @@ def i_file(self, node): flags = self.to_list(getattr(self, 'swig_flags', [])) tsk.env.append_value('SWIGFLAGS', flags) - # looks like this is causing problems - #if not '-outdir' in flags: - # tsk.env.append_value('SWIGFLAGS', ['-outdir', node.parent.abspath()]) + tsk.outdir = None + if '-outdir' in flags: + outdir = flags[flags.index('-outdir')+1] + outdir = tsk.generator.bld.bldnode.make_node(outdir) + outdir.mkdir() + tsk.outdir = outdir + +@feature('c', 'cxx', 'd', 'fc', 'asm') +@after_method('apply_link', 'process_source') +def enforce_swig_before_link(self): + try: + link_task = self.link_task + except AttributeError: + pass + else: + for x in self.tasks: + if x.__class__.__name__ == 'swig': + link_task.run_after.add(x) @conf -def check_swig_version(self): - """Returns a tuple representing the swig version, like (1,3,28)""" +def check_swig_version(conf, minver=None): + """ + Check if the swig tool is found matching a given minimum version. + minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver. + + If successful, SWIG_VERSION is defined as 'MAJOR.MINOR' + (eg. '1.3') of the actual swig version found. + + :param minver: minimum version + :type minver: tuple of int + :return: swig version + :rtype: tuple of int + """ + assert minver is None or isinstance(minver, tuple) + swigbin = conf.env['SWIG'] + if not swigbin: + conf.fatal('could not find the swig executable') + + # Get swig version string + cmd = swigbin + ['-version'] + Logs.debug('swig: Running swig command %r', cmd) reg_swig = re.compile(r'SWIG Version\s(.*)', re.M) - swig_out = self.cmd_and_log(self.env.SWIG + ['-version']) + swig_out = conf.cmd_and_log(cmd) + swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')]) + + # Compare swig version with the minimum required + result = (minver is None) or (swigver_tuple >= minver) + + if result: + # Define useful environment variables + swigver = '.'.join([str(x) for x in swigver_tuple[:2]]) + conf.env['SWIG_VERSION'] = swigver + + # Feedback + swigver_full = '.'.join(map(str, swigver_tuple[:3])) + if minver is None: + conf.msg('Checking for swig version', swigver_full) + else: + minver_str = '.'.join(map(str, minver)) + conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW') + + if not result: + conf.fatal('The swig version is too old, expecting %r' % (minver,)) - swigver = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')]) - self.env['SWIG_VERSION'] = swigver - msg = 'Checking for swig version' - self.msg(msg, '.'.join(map(str, swigver))) - return swigver + return swigver_tuple def configure(conf): conf.find_program('swig', var='SWIG') diff --git a/waflib/extras/syms.py b/waflib/extras/syms.py index 8bd1c56899..562f708e1e 100644 --- a/waflib/extras/syms.py +++ b/waflib/extras/syms.py @@ -2,7 +2,7 @@ # encoding: utf-8 """ -this tool supports the export_symbols_regex to export the symbols in a shared library. +This tool supports the export_symbols_regex to export the symbols in a shared library. by default, all symbols are exported by gcc, and nothing by msvc. to use the tool, do something like: @@ -12,7 +12,6 @@ def build(ctx): only the symbols starting with 'mylib_' will be exported. """ -import os import re from waflib.Context import STDOUT from waflib.Task import Task @@ -23,26 +22,20 @@ class gen_sym(Task): def run(self): obj = self.inputs[0] kw = {} - if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): - re_nm = re.compile(r'External\s+\|\s+_(' + self.generator.export_symbols_regex + r')\b') + reg = getattr(self.generator, 'export_symbols_regex', '.+?') + if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): + re_nm = re.compile(r'External\s+\|\s+_(?P%s)\b' % reg) cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()] - - # Dumpbin requires custom environment sniffed out by msvc.py earlier - if self.env['PATH']: - env = dict(self.env.env or os.environ) - env.update(PATH = os.pathsep.join(self.env['PATH'])) - kw['env'] = env - else: if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows - re_nm = re.compile(r'T\s+_(' + self.generator.export_symbols_regex + r')\b') + re_nm = re.compile(r'(T|D)\s+_(?P%s)\b' % reg) elif self.env.DEST_BINFMT=='mac-o': - re_nm=re.compile(r'T\s+(_?'+self.generator.export_symbols_regex+r')\b') + re_nm=re.compile(r'(T|D)\s+(?P_?(%s))\b' % reg) else: - re_nm = re.compile(r'T\s+(' + self.generator.export_symbols_regex + r')\b') - cmd = [self.env.NM[0] or 'nm', '-g', obj.abspath()] - syms = re_nm.findall(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw)) + re_nm = re.compile(r'(T|D)\s+(?P%s)\b' % reg) + cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()] + syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))] self.outputs[0].write('%r' % syms) class compile_sym(Task): @@ -64,24 +57,28 @@ def run(self): raise WafError('NotImplemented') @feature('syms') -@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local') +@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars') def do_the_symbol_stuff(self): - ins = [x.outputs[0] for x in self.compiled_tasks] - self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins] + def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def')) + compiled_tasks = getattr(self, 'compiled_tasks', None) + if compiled_tasks: + ins = [x.outputs[0] for x in compiled_tasks] + self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins] + self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node) + + link_task = getattr(self, 'link_task', None) + if link_task: + self.link_task.dep_nodes.append(def_node) - tsk = self.create_task('compile_sym', - [x.outputs[0] for x in self.gen_sym_tasks], - self.path.find_or_declare(getattr(self, 'sym_filename', self.target + '.def'))) - self.link_task.set_run_after(tsk) - self.link_task.dep_nodes.append(tsk.outputs[0]) - if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): - self.link_task.env.append_value('LINKFLAGS', ['/def:' + tsk.outputs[0].bldpath()]) - elif self.env.DEST_BINFMT == 'pe': #gcc on windows takes *.def as an additional input - self.link_task.inputs.append(tsk.outputs[0]) - elif self.env.DEST_BINFMT == 'elf': - self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + tsk.outputs[0].bldpath()]) - elif self.env.DEST_BINFMT=='mac-o': - self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,'+tsk.outputs[0].bldpath()]) - else: - raise WafError('NotImplemented') + if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME): + self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()]) + elif self.env.DEST_BINFMT == 'pe': + # gcc on windows takes *.def as an additional input + self.link_task.inputs.append(def_node) + elif self.env.DEST_BINFMT == 'elf': + self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()]) + elif self.env.DEST_BINFMT=='mac-o': + self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()]) + else: + raise WafError('NotImplemented') diff --git a/waflib/extras/sync_exec.py b/waflib/extras/sync_exec.py deleted file mode 100644 index 1d5f228e78..0000000000 --- a/waflib/extras/sync_exec.py +++ /dev/null @@ -1,9 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -This tool is obsolete, the sync_exec feature is now the default -""" - -pass - diff --git a/waflib/extras/ticgt.py b/waflib/extras/ticgt.py index 9cde2522a8..f43a7ea5cb 100644 --- a/waflib/extras/ticgt.py +++ b/waflib/extras/ticgt.py @@ -20,7 +20,7 @@ It has a few idiosyncracies, such as not giving the liberty of the .o file names - automatically activate them when using the TI compiler - handle the tconf tool - The tool + The tool TODO: @@ -32,28 +32,27 @@ import os, re -from waflib import Configure, Options, Utils, Task, TaskGen +from waflib import Options, Utils, Task, TaskGen from waflib.Tools import c, ccroot, c_preproc from waflib.Configure import conf -from waflib.TaskGen import feature, before_method, taskgen_method -from waflib.Tools.ccroot import link_task, stlink_task +from waflib.TaskGen import feature, before_method from waflib.Tools.c import cprogram opj = os.path.join @conf def find_ticc(conf): - cc = conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) conf.env.CC_NAME = 'ticc' @conf def find_tild(conf): - ld = conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) conf.env.LINK_CC_NAME = 'tild' @conf def find_tiar(conf): - ar = conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) + conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin')) conf.env.AR_NAME = 'tiar' conf.env.ARFLAGS = 'qru' @@ -61,7 +60,8 @@ def find_tiar(conf): def ticc_common_flags(conf): v = conf.env - if not v['LINK_CC']: v['LINK_CC'] = v['CC'] + if not v['LINK_CC']: + v['LINK_CC'] = v['CC'] v['CCLNK_SRC_F'] = [] v['CCLNK_TGT_F'] = ['-o'] v['CPPPATH_ST'] = '-I%s' @@ -94,15 +94,15 @@ def configure(conf): conf.cc_add_flags() conf.link_add_flags() conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR) - + conf.env.TCONF_INCLUDES += [ opj(conf.env.TI_DSPBIOS_DIR, 'packages'), ] - + conf.env.INCLUDES += [ opj(conf.env.TI_CGT_DIR, 'include'), ] - + conf.env.LIBPATH += [ opj(conf.env.TI_CGT_DIR, "lib"), ] @@ -110,7 +110,7 @@ def configure(conf): conf.env.INCLUDES_DSPBIOS += [ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'), ] - + conf.env.LIBPATH_DSPBIOS += [ opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'), ] @@ -118,7 +118,7 @@ def configure(conf): conf.env.INCLUDES_DSPLINK += [ opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'), ] - + @conf def ti_set_debug(cfg, debug=1): """ @@ -152,7 +152,7 @@ def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board): opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp), d, ] - + cfg.env.LINKFLAGS_DSPLINK += [ opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x) for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio') @@ -194,7 +194,7 @@ class ti_c(Task.Task): """ "Compile C files into object files" - run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT}' + run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}' vars = ['CCDEPS'] # unused variable to depend on, just in case ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] scan = c_preproc.scan @@ -261,7 +261,7 @@ def apply_tconf(self): target_node.change_ext("cfg.cmd"), ] - s62task = create_compiled_task(self, 'ti_c', task.outputs[1]) + create_compiled_task(self, 'ti_c', task.outputs[1]) ctask = create_compiled_task(self, 'ti_c', task.outputs[0]) ctask.env = self.env.derive() diff --git a/waflib/extras/unc.py b/waflib/extras/unc.py deleted file mode 100644 index d4a9b687d8..0000000000 --- a/waflib/extras/unc.py +++ /dev/null @@ -1,111 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2014 (ita) - -""" -This module enables automatic handling of network paths of the form \\server\share for both input -and output files. While a typical script may require the following:: - - import os - def build(bld): - - node = bld.root.make_node('\\\\COMPUTER\\share\\test.txt') - - # mark the server/share levels as folders - k = node.parent - while k: - k.cache_isdir = True - k = k.parent - - # clear the file if removed - if not os.path.isfile(node.abspath()): - node.sig = None - - # create the folder structure - if node.parent.height() > 2: - node.parent.mkdir() - - # then the task generator - def myfun(tsk): - tsk.outputs[0].write("data") - bld(rule=myfun, source='wscript', target=[nd]) - -this tool will make the process much easier, for example:: - - def configure(conf): - conf.load('unc') # do not import the module directly - - def build(bld): - def myfun(tsk): - tsk.outputs[0].write("data") - bld(rule=myfun, update_outputs=True, - source='wscript', - target='\\\\COMPUTER\\share\\test.txt') - bld(rule=myfun, update_outputs=True, - source='\\\\COMPUTER\\share\\test.txt', - target='\\\\COMPUTER\\share\\test2.txt') -""" - -import os -from waflib import Node, Utils, Context - -def find_resource(self, lst): - if isinstance(lst, str): - lst = [x for x in Node.split_path(lst) if x and x != '.'] - - if lst[0].startswith('\\\\'): - if len(lst) < 3: - return None - node = self.ctx.root.make_node(lst[0]).make_node(lst[1]) - node.cache_isdir = True - node.parent.cache_isdir = True - - ret = node.search_node(lst[2:]) - if not ret: - ret = node.find_node(lst[2:]) - if ret and os.path.isdir(ret.abspath()): - return None - return ret - - return self.find_resource_orig(lst) - -def find_or_declare(self, lst): - if isinstance(lst, str): - lst = [x for x in Node.split_path(lst) if x and x != '.'] - - if lst[0].startswith('\\\\'): - if len(lst) < 3: - return None - node = self.ctx.root.make_node(lst[0]).make_node(lst[1]) - node.cache_isdir = True - node.parent.cache_isdir = True - ret = node.find_node(lst[2:]) - if not ret: - ret = node.make_node(lst[2:]) - if not os.path.isfile(ret.abspath()): - ret.sig = None - ret.parent.mkdir() - return ret - - return self.find_or_declare_orig(lst) - -def abspath(self): - """For MAX_PATH limitations""" - ret = self.abspath_orig() - if not ret.startswith("\\"): - return "\\\\?\\" + ret - return ret - -if Utils.is_win32: - Node.Node.find_resource_orig = Node.Node.find_resource - Node.Node.find_resource = find_resource - - Node.Node.find_or_declare_orig = Node.Node.find_or_declare - Node.Node.find_or_declare = find_or_declare - - Node.Node.abspath_orig = Node.Node.abspath - Node.Node.abspath = abspath - - for k in list(Context.cache_modules.keys()): - Context.cache_modules["\\\\?\\" + k] = Context.cache_modules[k] - diff --git a/waflib/extras/unity.py b/waflib/extras/unity.py index d21e35198e..78128ed385 100644 --- a/waflib/extras/unity.py +++ b/waflib/extras/unity.py @@ -2,67 +2,107 @@ # encoding: utf-8 """ -Compile whole groups of C/C++ files at once. +Compile whole groups of C/C++ files at once +(C and C++ files are processed independently though). + +To enable globally:: + + def options(opt): + opt.load('compiler_cxx') + def build(bld): + bld.load('compiler_cxx unity') + +To enable for specific task generators only:: + + def build(bld): + bld(features='c cprogram unity', source='main.c', ...) + +The file order is often significant in such builds, so it can be +necessary to adjust the order of source files and the batch sizes. +To control the amount of files processed in a batch per target +(the default is 50):: + + def build(bld): + bld(features='c cprogram', unity_size=20) -def build(bld): - bld.load('compiler_cxx unity') """ -import sys from waflib import Task, Options from waflib.Tools import c_preproc from waflib import TaskGen MAX_BATCH = 50 +EXTS_C = ('.c',) +EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++') + def options(opt): global MAX_BATCH - opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, help='batch size (0 for no batch)') + opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH, + help='default unity batch size (0 disables unity builds)') + +@TaskGen.taskgen_method +def batch_size(self): + default = getattr(Options.options, 'batchsize', MAX_BATCH) + if default < 1: + return 0 + return getattr(self, 'unity_size', default) + class unity(Task.Task): color = 'BLUE' scan = c_preproc.scan + def to_include(self, node): + ret = node.path_from(self.outputs[0].parent) + ret = ret.replace('\\', '\\\\').replace('"', '\\"') + return ret def run(self): - lst = ['#include "%s"\n' % node.abspath() for node in self.inputs] + lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs] txt = ''.join(lst) self.outputs[0].write(txt) + def __str__(self): + node = self.outputs[0] + return node.path_from(node.ctx.launch_node()) -@TaskGen.taskgen_method -def batch_size(self): - return getattr(Options.options, 'batchsize', MAX_BATCH) - -def make_batch_fun(ext): - # this generic code makes this quite unreadable, defining the function two times might have been better - def make_batch(self, node): - cnt = self.batch_size() - if cnt <= 1: - return self.create_compiled_task(ext, node) - x = getattr(self, 'master_%s' % ext, None) - if not x or len(x.inputs) >= cnt: - x = self.create_task('unity') - setattr(self, 'master_%s' % ext, x) - - cnt_cur = getattr(self, 'cnt_%s' % ext, 0) - cxxnode = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, ext)) - x.outputs = [cxxnode] - setattr(self, 'cnt_%s' % ext, cnt_cur + 1) - self.create_compiled_task(ext, cxxnode) - x.inputs.append(node) - return make_batch - -def enable_support(cc, cxx): - if cxx or not cc: - make_cxx_batch = TaskGen.extension('.cpp', '.cc', '.cxx', '.C', '.c++')(make_batch_fun('cxx')) - if cc: - make_c_batch = TaskGen.extension('.c')(make_batch_fun('c')) - else: - TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] - -has_c = '.c' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_c' in sys.modules -has_cpp = '.cpp' in TaskGen.task_gen.mappings or 'waflib.Tools.compiler_cxx' in sys.modules -enable_support(has_c, has_cpp) # by default +def bind_unity(obj, cls_name, exts): + if not 'mappings' in obj.__dict__: + obj.mappings = dict(obj.mappings) + + for j in exts: + fun = obj.mappings[j] + if fun.__name__ == 'unity_fun': + raise ValueError('Attempt to bind unity mappings multiple times %r' % j) + + def unity_fun(self, node): + cnt = self.batch_size() + if cnt <= 1: + return fun(self, node) + x = getattr(self, 'master_%s' % cls_name, None) + if not x or len(x.inputs) >= cnt: + x = self.create_task('unity') + setattr(self, 'master_%s' % cls_name, x) + + cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0) + c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name)) + x.outputs = [c_node] + setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1) + fun(self, c_node) + x.inputs.append(node) + + obj.mappings[j] = unity_fun + +@TaskGen.feature('unity') +@TaskGen.before('process_source') +def single_unity(self): + lst = self.to_list(self.features) + if 'c' in lst: + bind_unity(self, 'c', EXTS_C) + if 'cxx' in lst: + bind_unity(self, 'cxx', EXTS_CXX) def build(bld): - # it is best to do this - enable_support(bld.env.CC_NAME, bld.env.CXX_NAME) + if bld.env.CC_NAME: + bind_unity(TaskGen.task_gen, 'c', EXTS_C) + if bld.env.CXX_NAME: + bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX) diff --git a/waflib/extras/use_config.py b/waflib/extras/use_config.py index e2404f1e3c..ef5129f219 100644 --- a/waflib/extras/use_config.py +++ b/waflib/extras/use_config.py @@ -50,6 +50,21 @@ def configure(conf): import os.path as osp import os +local_repo = '' +"""Local repository containing additional Waf tools (plugins)""" +remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/' +""" +Remote directory containing downloadable waf tools. The missing tools can be downloaded by using:: + + $ waf configure --download +""" + +remote_locs = ['waflib/extras', 'waflib/Tools'] +""" +Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo` +""" + + try: from urllib import request except ImportError: @@ -58,7 +73,7 @@ def configure(conf): urlopen = request.urlopen -from waflib import Errors, Context, Logs, Utils, Options +from waflib import Errors, Context, Logs, Utils, Options, Configure try: from urllib.parse import urlparse @@ -94,12 +109,12 @@ def download_check(node): def download_tool(tool, force=False, ctx=None): """ - Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`:: + Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`:: $ waf configure --download """ - for x in Utils.to_list(Context.remote_repo): - for sub in Utils.to_list(Context.remote_locs): + for x in Utils.to_list(remote_repo): + for sub in Utils.to_list(remote_locs): url = '/'.join((x, sub, tool + '.py')) try: web = urlopen(url) @@ -115,12 +130,12 @@ def download_tool(tool, force=False, ctx=None): else: tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py'))) tmp.write(web.read(), 'wb') - Logs.warn('Downloaded %s from %s' % (tool, url)) + Logs.warn('Downloaded %s from %s', tool, url) download_check(tmp) try: module = Context.load_tool(tool) except Exception: - Logs.warn('The tool %s from %s is unusable' % (tool, url)) + Logs.warn('The tool %s from %s is unusable', tool, url) try: tmp.delete() except Exception: @@ -130,10 +145,13 @@ def download_tool(tool, force=False, ctx=None): raise Errors.WafError('Could not load the Waf tool') -def load_tool(tool, tooldir=None, ctx=None): +def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): try: - module = Context.load_tool_default(tool, tooldir) + module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path) except ImportError as e: + if not ctx or not hasattr(Options.options, 'download'): + Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool) + raise if Options.options.download: module = download_tool(tool, ctx=ctx) if not module: @@ -144,7 +162,7 @@ def load_tool(tool, tooldir=None, ctx=None): Context.load_tool_default = Context.load_tool Context.load_tool = load_tool - +Configure.download_tool = download_tool def configure(self): opts = self.options diff --git a/waflib/extras/valadoc.py b/waflib/extras/valadoc.py index 97ac15c104..c50f69e747 100644 --- a/waflib/extras/valadoc.py +++ b/waflib/extras/valadoc.py @@ -7,8 +7,8 @@ TODO: tabs vs spaces """ -from waflib import Task, Utils, Node, Errors, Logs -from waflib.TaskGen import feature, extension, after_method +from waflib import Task, Utils, Errors, Logs +from waflib.TaskGen import feature VALADOC_STR = '${VALADOC}' @@ -30,6 +30,8 @@ def __init__(self, *k, **kw): self.private = False self.inherit = False self.deps = False + self.vala_defines = [] + self.vala_target_glib = None self.enable_non_null_experimental = False self.force = False @@ -57,6 +59,11 @@ def run(self): cmd.append ('--inherit') if getattr(self, 'deps', None): cmd.append ('--deps') + if getattr(self, 'vala_defines', None): + for define in self.vala_defines: + cmd.append ('--define %s' % define) + if getattr(self, 'vala_target_glib', None): + cmd.append ('--target-glib=%s' % self.vala_target_glib) if getattr(self, 'enable_non_null_experimental', None): cmd.append ('--enable-non-null-experimental') if getattr(self, 'force', None): @@ -106,7 +113,7 @@ def process_valadoc(self): try: task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath()) except AttributeError: - Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir) + Logs.warn('Unable to locate Vala API directory: %r', vapi_dir) if getattr(self, 'files', None): task.files = self.files else: @@ -119,6 +126,10 @@ def process_valadoc(self): task.inherit = self.inherit if getattr(self, 'deps', None): task.deps = self.deps + if getattr(self, 'vala_defines', None): + task.vala_defines = Utils.to_list(self.vala_defines) + if getattr(self, 'vala_target_glib', None): + task.vala_target_glib = self.vala_target_glib if getattr(self, 'enable_non_null_experimental', None): task.enable_non_null_experimental = self.enable_non_null_experimental if getattr(self, 'force', None): diff --git a/waflib/extras/waf_xattr.py b/waflib/extras/waf_xattr.py new file mode 100644 index 0000000000..351dd63a78 --- /dev/null +++ b/waflib/extras/waf_xattr.py @@ -0,0 +1,150 @@ +#! /usr/bin/env python +# encoding: utf-8 + +""" +Use extended attributes instead of database files + +1. Input files will be made writable +2. This is only for systems providing extended filesystem attributes +3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below) +4. The module enables "deep_inputs" on all tasks by propagating task signatures +5. This module also skips task signature comparisons for task code changes due to point 4. +6. This module is for Python3/Linux only, but it could be extended to Python2/other systems + using the xattr library +7. For projects in which tasks always declare output files, it should be possible to + store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps) + but this is not done here + +On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed: +total build time: 20s -> 22s +no-op build time: 2.4s -> 1.8s +pickle file size: 2.9MB -> 2.6MB +""" + +import os +from waflib import Logs, Node, Task, Utils, Errors +from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING + +HASH_CACHE = True +SIG_VAR = 'user.waf.sig' +SEP = ','.encode() +TEMPLATE = '%b%d,%d'.encode() + +try: + PermissionError +except NameError: + PermissionError = IOError + +def getxattr(self): + return os.getxattr(self.abspath(), SIG_VAR) + +def setxattr(self, val): + os.setxattr(self.abspath(), SIG_VAR, val) + +def h_file(self): + try: + ret = getxattr(self) + except OSError: + if HASH_CACHE: + st = os.stat(self.abspath()) + mtime = st.st_mtime + size = st.st_size + else: + if len(ret) == 16: + # for build directory files + return ret + + if HASH_CACHE: + # check if timestamp and mtime match to avoid re-hashing + st = os.stat(self.abspath()) + mtime, size = ret[16:].split(SEP) + if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size): + return ret[:16] + + ret = Utils.h_file(self.abspath()) + if HASH_CACHE: + val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size)) + try: + setxattr(self, val) + except PermissionError: + os.chmod(self.abspath(), st.st_mode | 128) + setxattr(self, val) + return ret + +def runnable_status(self): + bld = self.generator.bld + if bld.is_install < 0: + return SKIP_ME + + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + elif t.hasrun < SKIPPED: + # a dependency has an error + return CANCEL_ME + + # first compute the signature + try: + new_sig = self.signature() + except Errors.TaskNotReady: + return ASK_LATER + + if not self.outputs: + # compare the signature to a signature computed previously + # this part is only for tasks with no output files + key = self.uid() + try: + prev_sig = bld.task_sigs[key] + except KeyError: + Logs.debug('task: task %r must run: it was never run before or the task code changed', self) + return RUN_ME + if new_sig != prev_sig: + Logs.debug('task: task %r must run: the task signature changed', self) + return RUN_ME + + # compare the signatures of the outputs to make a decision + for node in self.outputs: + try: + sig = node.h_file() + except EnvironmentError: + Logs.debug('task: task %r must run: an output node does not exist', self) + return RUN_ME + if sig != new_sig: + Logs.debug('task: task %r must run: an output node is stale', self) + return RUN_ME + + return (self.always_run and RUN_ME) or SKIP_ME + +def post_run(self): + bld = self.generator.bld + sig = self.signature() + for node in self.outputs: + if not node.exists(): + self.hasrun = MISSING + self.err_msg = '-> missing file: %r' % node.abspath() + raise Errors.WafError(self.err_msg) + os.setxattr(node.abspath(), 'user.waf.sig', sig) + if not self.outputs: + # only for task with no outputs + bld.task_sigs[self.uid()] = sig + if not self.keep_last_cmd: + try: + del self.last_cmd + except AttributeError: + pass + +try: + os.getxattr +except AttributeError: + pass +else: + h_file.__doc__ = Node.Node.h_file.__doc__ + + # keep file hashes as file attributes + Node.Node.h_file = h_file + + # enable "deep_inputs" on all tasks + Task.Task.runnable_status = runnable_status + Task.Task.post_run = post_run + Task.Task.sig_deep_inputs = Utils.nada + diff --git a/waflib/extras/wafcache.py b/waflib/extras/wafcache.py new file mode 100644 index 0000000000..30ac3ef518 --- /dev/null +++ b/waflib/extras/wafcache.py @@ -0,0 +1,648 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2019 (ita) + +""" +Filesystem-based cache system to share and re-use build artifacts + +Cache access operations (copy to and from) are delegated to +independent pre-forked worker subprocesses. + +The following environment variables may be set: +* WAFCACHE: several possibilities: + - File cache: + absolute path of the waf cache (~/.cache/wafcache_user, + where `user` represents the currently logged-in user) + - URL to a cache server, for example: + export WAFCACHE=http://localhost:8080/files/ + in that case, GET/POST requests are made to urls of the form + http://localhost:8080/files/000000000/0 (cache management is delegated to the server) + - GCS, S3 or MINIO bucket + gs://my-bucket/ (uses gsutil command line tool or WAFCACHE_CMD) + s3://my-bucket/ (uses aws command line tool or WAFCACHE_CMD) + minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD) +* WAFCACHE_CMD: bucket upload/download command, for example: + WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}" + Note that the WAFCACHE bucket value is used for the source or destination + depending on the operation (upload or download). For example, with: + WAFCACHE="gs://mybucket/" + the following commands may be run: + gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1 + gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile +* WAFCACHE_NO_PUSH: if set, disables pushing to the cache +* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations +* WAFCACHE_STATS: if set, displays cache usage statistics on exit + +File cache specific options: + Files are copied using hard links by default; if the cache is located + onto another partition, the system switches to file copies instead. +* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M) +* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB) +* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try + and trim the cache (3 minutes) + +Upload specific options: +* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously + this may improve build performance with many/long file uploads + the default is unset (synchronous uploads) +* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False) + this requires asynchonous uploads to have an effect + +Usage:: + + def build(bld): + bld.load('wafcache') + ... + +To troubleshoot:: + + waf clean build --zone=wafcache +""" + +import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex +try: + import subprocess32 as subprocess +except ImportError: + import subprocess + +base_cache = os.path.expanduser('~/.cache/') +if not os.path.isdir(base_cache): + base_cache = '/tmp/' +default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser()) + +CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir) +WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD') +TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000)) +EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3)) +EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10)) +WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0 +WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0 +WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0 +WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS') +WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT') +OK = "ok" + +re_waf_cmd = re.compile('(?P%{SRC})|(?P%{TGT})') + +try: + import cPickle +except ImportError: + import pickle as cPickle + +if __name__ != '__main__': + from waflib import Task, Logs, Utils, Build + +def can_retrieve_cache(self): + """ + New method for waf Task classes + """ + if not self.outputs: + return False + + self.cached = False + + sig = self.signature() + ssig = Utils.to_hex(self.uid() + sig) + + if WAFCACHE_STATS: + self.generator.bld.cache_reqs += 1 + + files_to = [node.abspath() for node in self.outputs] + proc = get_process() + err = cache_command(proc, ssig, [], files_to) + process_pool.append(proc) + if err.startswith(OK): + if WAFCACHE_VERBOSITY: + Logs.pprint('CYAN', ' Fetched %r from cache' % files_to) + else: + Logs.debug('wafcache: fetched %r from cache', files_to) + if WAFCACHE_STATS: + self.generator.bld.cache_hits += 1 + else: + if WAFCACHE_VERBOSITY: + Logs.pprint('YELLOW', ' No cache entry %s' % files_to) + else: + Logs.debug('wafcache: No cache entry %s: %s', files_to, err) + return False + + self.cached = True + return True + +def put_files_cache(self): + """ + New method for waf Task classes + """ + if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs: + return + + files_from = [] + for node in self.outputs: + path = node.abspath() + if not os.path.isfile(path): + return + files_from.append(path) + + bld = self.generator.bld + old_sig = self.signature() + + for node in self.inputs: + try: + del node.ctx.cache_sig[node] + except KeyError: + pass + + delattr(self, 'cache_sig') + sig = self.signature() + + def _async_put_files_cache(bld, ssig, files_from): + proc = get_process() + if WAFCACHE_ASYNC_WORKERS: + with bld.wafcache_lock: + if bld.wafcache_stop: + process_pool.append(proc) + return + bld.wafcache_procs.add(proc) + + err = cache_command(proc, ssig, files_from, []) + process_pool.append(proc) + if err.startswith(OK): + if WAFCACHE_VERBOSITY: + Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from) + else: + Logs.debug('wafcache: Successfully uploaded %r to cache', files_from) + if WAFCACHE_STATS: + bld.cache_puts += 1 + else: + if WAFCACHE_VERBOSITY: + Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err)) + else: + Logs.debug('wafcache: Error caching results %s: %s', files_from, err) + + if old_sig == sig: + ssig = Utils.to_hex(self.uid() + sig) + if WAFCACHE_ASYNC_WORKERS: + fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from) + bld.wafcache_uploads.append(fut) + else: + _async_put_files_cache(bld, ssig, files_from) + else: + Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs) + + bld.task_sigs[self.uid()] = self.cache_sig + +def hash_env_vars(self, env, vars_lst): + """ + Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths + """ + if not env.table: + env = env.parent + if not env: + return Utils.SIG_NIL + + idx = str(id(env)) + str(vars_lst) + try: + cache = self.cache_env + except AttributeError: + cache = self.cache_env = {} + else: + try: + return self.cache_env[idx] + except KeyError: + pass + + v = str([env[a] for a in vars_lst]) + v = v.replace(self.srcnode.abspath().__repr__()[:-1], '') + m = Utils.md5() + m.update(v.encode()) + ret = m.digest() + + Logs.debug('envhash: %r %r', ret, v) + + cache[idx] = ret + + return ret + +def uid(self): + """ + Reimplement Task.uid() so that the signature does not depend on local paths + """ + try: + return self.uid_ + except AttributeError: + m = Utils.md5() + src = self.generator.bld.srcnode + up = m.update + up(self.__class__.__name__.encode()) + for x in self.inputs + self.outputs: + up(x.path_from(src).encode()) + self.uid_ = m.digest() + return self.uid_ + + +def make_cached(cls): + """ + Enable the waf cache for a given task class + """ + if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False): + return + + full_name = "%s.%s" % (cls.__module__, cls.__name__) + if full_name in ('waflib.Tools.ccroot.vnum', 'waflib.Build.inst'): + return + + m1 = getattr(cls, 'run', None) + def run(self): + if getattr(self, 'nocache', False): + return m1(self) + if self.can_retrieve_cache(): + return 0 + return m1(self) + cls.run = run + + m2 = getattr(cls, 'post_run', None) + def post_run(self): + if getattr(self, 'nocache', False): + return m2(self) + ret = m2(self) + self.put_files_cache() + return ret + cls.post_run = post_run + cls.has_cache = True + +process_pool = [] +def get_process(): + """ + Returns a worker process that can process waf cache commands + The worker process is assumed to be returned to the process pool when unused + """ + try: + return process_pool.pop() + except IndexError: + filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py' + cmd = [sys.executable, '-c', Utils.readf(filepath)] + return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0) + +def atexit_pool(): + for proc in process_pool: + proc.kill() +atexit.register(atexit_pool) + +def build(bld): + """ + Called during the build process to enable file caching + """ + + if WAFCACHE_ASYNC_WORKERS: + try: + num_workers = int(WAFCACHE_ASYNC_WORKERS) + except ValueError: + Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS) + else: + from concurrent.futures import ThreadPoolExecutor + bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers) + bld.wafcache_uploads = [] + bld.wafcache_procs = set([]) + bld.wafcache_stop = False + bld.wafcache_lock = threading.Lock() + + def finalize_upload_async(bld): + if WAFCACHE_ASYNC_NOWAIT: + with bld.wafcache_lock: + bld.wafcache_stop = True + + for fut in reversed(bld.wafcache_uploads): + fut.cancel() + + for proc in bld.wafcache_procs: + proc.kill() + + bld.wafcache_procs.clear() + else: + Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads)) + bld.wafcache_executor.shutdown(wait=True) + bld.add_post_fun(finalize_upload_async) + + if WAFCACHE_STATS: + # Init counter for statistics and hook to print results at the end + bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0 + + def printstats(bld): + hit_ratio = 0 + if bld.cache_reqs > 0: + hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100 + Logs.pprint('CYAN', ' wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' % + (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) ) + bld.add_post_fun(printstats) + + if process_pool: + # already called once + return + + # pre-allocation + processes = [get_process() for x in range(bld.jobs)] + process_pool.extend(processes) + + Task.Task.can_retrieve_cache = can_retrieve_cache + Task.Task.put_files_cache = put_files_cache + Task.Task.uid = uid + Build.BuildContext.hash_env_vars = hash_env_vars + for x in reversed(list(Task.classes.values())): + make_cached(x) + +def cache_command(proc, sig, files_from, files_to): + """ + Create a command for cache worker processes, returns a pickled + base64-encoded tuple containing the task signature, a list of files to + cache and a list of files files to get from cache (one of the lists + is assumed to be empty) + """ + obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to])) + proc.stdin.write(obj) + proc.stdin.write('\n'.encode()) + proc.stdin.flush() + obj = proc.stdout.readline() + if not obj: + raise OSError('Preforked sub-process %r died' % proc.pid) + return cPickle.loads(base64.b64decode(obj)) + +try: + copyfun = os.link +except NameError: + copyfun = shutil.copy2 + +def atomic_copy(orig, dest): + """ + Copy files to the cache, the operation is atomic for a given file + """ + global copyfun + tmp = dest + '.tmp' + up = os.path.dirname(dest) + try: + os.makedirs(up) + except OSError: + pass + + try: + copyfun(orig, tmp) + except OSError as e: + if e.errno == errno.EXDEV: + copyfun = shutil.copy2 + copyfun(orig, tmp) + else: + raise + os.rename(tmp, dest) + +def lru_trim(): + """ + the cache folders take the form: + `CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9` + they are listed in order of last access, and then removed + until the amount of folders is within TRIM_MAX_FOLDERS and the total space + taken by files is less than EVICT_MAX_BYTES + """ + lst = [] + for up in os.listdir(CACHE_DIR): + if len(up) == 2: + sub = os.path.join(CACHE_DIR, up) + for hval in os.listdir(sub): + path = os.path.join(sub, hval) + + size = 0 + for fname in os.listdir(path): + try: + size += os.lstat(os.path.join(path, fname)).st_size + except OSError: + pass + lst.append((os.stat(path).st_mtime, size, path)) + + lst.sort(key=lambda x: x[0]) + lst.reverse() + + tot = sum(x[1] for x in lst) + while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS: + _, tmp_size, path = lst.pop() + tot -= tmp_size + + tmp = path + '.remove' + try: + shutil.rmtree(tmp) + except OSError: + pass + try: + os.rename(path, tmp) + except OSError: + sys.stderr.write('Could not rename %r to %r\n' % (path, tmp)) + else: + try: + shutil.rmtree(tmp) + except OSError: + sys.stderr.write('Could not remove %r\n' % tmp) + sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst))) + + +def lru_evict(): + """ + Reduce the cache size + """ + lockfile = os.path.join(CACHE_DIR, 'all.lock') + try: + st = os.stat(lockfile) + except EnvironmentError as e: + if e.errno == errno.ENOENT: + with open(lockfile, 'w') as f: + f.write('') + return + else: + raise + + if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60: + # check every EVICT_INTERVAL_MINUTES minutes if the cache is too big + # OCLOEXEC is unnecessary because no processes are spawned + fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755) + try: + try: + fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) + except EnvironmentError: + if WAFCACHE_VERBOSITY: + sys.stderr.write('wafcache: another cleaning process is running\n') + else: + # now dow the actual cleanup + lru_trim() + os.utime(lockfile, None) + finally: + os.close(fd) + +class netcache(object): + def __init__(self): + self.http = urllib3.PoolManager() + + def url_of(self, sig, i): + return "%s/%s/%s" % (CACHE_DIR, sig, i) + + def upload(self, file_path, sig, i): + url = self.url_of(sig, i) + with open(file_path, 'rb') as f: + file_data = f.read() + r = self.http.request('POST', url, timeout=60, + fields={ 'file': ('%s/%s' % (sig, i), file_data), }) + if r.status >= 400: + raise OSError("Invalid status %r %r" % (url, r.status)) + + def download(self, file_path, sig, i): + url = self.url_of(sig, i) + with self.http.request('GET', url, preload_content=False, timeout=60) as inf: + if inf.status >= 400: + raise OSError("Invalid status %r %r" % (url, inf.status)) + with open(file_path, 'wb') as out: + shutil.copyfileobj(inf, out) + + def copy_to_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_from): + if not os.path.islink(x): + self.upload(x, sig, i) + except Exception: + return traceback.format_exc() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_to): + self.download(x, sig, i) + except Exception: + return traceback.format_exc() + return OK + +class fcache(object): + def __init__(self): + if not os.path.exists(CACHE_DIR): + try: + os.makedirs(CACHE_DIR) + except OSError: + pass + if not os.path.exists(CACHE_DIR): + raise ValueError('Could not initialize the cache directory') + + def copy_to_cache(self, sig, files_from, files_to): + """ + Copy files to the cache, existing files are overwritten, + and the copy is atomic only for a given file, not for all files + that belong to a given task object + """ + try: + for i, x in enumerate(files_from): + dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + atomic_copy(x, dest) + except Exception: + return traceback.format_exc() + else: + # attempt trimming if caching was successful: + # we may have things to trim! + try: + lru_evict() + except Exception: + return traceback.format_exc() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + """ + Copy files from the cache + """ + try: + for i, x in enumerate(files_to): + orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + atomic_copy(orig, x) + + # success! update the cache time + os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None) + except Exception: + return traceback.format_exc() + return OK + +class bucket_cache(object): + def bucket_copy(self, source, target): + if WAFCACHE_CMD: + def replacer(match): + if match.group('src'): + return source + elif match.group('tgt'): + return target + cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)] + elif CACHE_DIR.startswith('s3://'): + cmd = ['aws', 's3', 'cp', source, target] + elif CACHE_DIR.startswith('gs://'): + cmd = ['gsutil', 'cp', source, target] + else: + cmd = ['mc', 'cp', source, target] + + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = proc.communicate() + if proc.returncode: + raise OSError('Error copy %r to %r using: %r (exit %r):\n out:%s\n err:%s' % ( + source, target, cmd, proc.returncode, out.decode(errors='replace'), err.decode(errors='replace'))) + + def copy_to_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_from): + dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + self.bucket_copy(x, dest) + except Exception: + return traceback.format_exc() + return OK + + def copy_from_cache(self, sig, files_from, files_to): + try: + for i, x in enumerate(files_to): + orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i)) + self.bucket_copy(orig, x) + except EnvironmentError: + return traceback.format_exc() + return OK + +def loop(service): + """ + This function is run when this file is run as a standalone python script, + it assumes a parent process that will communicate the commands to it + as pickled-encoded tuples (one line per command) + + The commands are to copy files to the cache or copy files from the + cache to a target destination + """ + # one operation is performed at a single time by a single process + # therefore stdin never has more than one line + txt = sys.stdin.readline().strip() + if not txt: + # parent process probably ended + sys.exit(1) + ret = OK + + [sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt)) + if files_from: + # TODO return early when pushing files upstream + ret = service.copy_to_cache(sig, files_from, files_to) + elif files_to: + # the build process waits for workers to (possibly) obtain files from the cache + ret = service.copy_from_cache(sig, files_from, files_to) + else: + ret = "Invalid command" + + obj = base64.b64encode(cPickle.dumps(ret)) + sys.stdout.write(obj.decode()) + sys.stdout.write('\n') + sys.stdout.flush() + +if __name__ == '__main__': + if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'): + if CACHE_DIR.startswith('minio://'): + CACHE_DIR = CACHE_DIR[8:] # minio doesn't need the protocol part, uses config aliases + service = bucket_cache() + elif CACHE_DIR.startswith('http'): + service = netcache() + else: + service = fcache() + while 1: + try: + loop(service) + except KeyboardInterrupt: + break + diff --git a/waflib/extras/why.py b/waflib/extras/why.py index 554914e388..1bb941f6c9 100644 --- a/waflib/extras/why.py +++ b/waflib/extras/why.py @@ -17,11 +17,13 @@ def configure(conf): def signature(self): # compute the result one time, and suppose the scan_signature will give the good result - try: return self.cache_sig - except AttributeError: pass + try: + return self.cache_sig + except AttributeError: + pass self.m = Utils.md5() - self.m.update(self.hcode.encode()) + self.m.update(self.hcode) id_sig = self.m.digest() # explicit deps @@ -62,7 +64,7 @@ def runnable_status(self): def v(x): return Utils.to_hex(x) - Logs.debug("Task %r" % self) + Logs.debug('Task %r', self) msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable'] tmp = 'task: -> %s: %s %s' for x in range(len(msgs)): @@ -70,7 +72,7 @@ def v(x): a = new_sigs[x*l : (x+1)*l] b = old_sigs[x*l : (x+1)*l] if (a != b): - Logs.debug(tmp % (msgs[x].ljust(35), v(a), v(b))) + Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b)) return ret Task.Task.runnable_status = runnable_status diff --git a/waflib/extras/win32_opts.py b/waflib/extras/win32_opts.py index 3b6933538d..9f7443c39b 100644 --- a/waflib/extras/win32_opts.py +++ b/waflib/extras/win32_opts.py @@ -4,15 +4,13 @@ """ Windows-specific optimizations -This module can help reducing the overhead of listing files on windows (more than 10000 files). +This module can help reducing the overhead of listing files on windows +(more than 10000 files). Python 3.5 already provides the listdir +optimization though. """ import os -try: - import cPickle -except ImportError: - import pickle as cPickle -from waflib import Utils, Build, Context, Node, Logs +from waflib import Utils, Build, Node, Logs try: TP = '%s\\*'.decode('ascii') @@ -20,7 +18,7 @@ TP = '%s\\*' if Utils.is_win32: - from waflib.extras import md5_tstamp + from waflib.Tools import md5_tstamp import ctypes, ctypes.wintypes FindFirstFile = ctypes.windll.kernel32.FindFirstFileW @@ -65,7 +63,7 @@ def cached_hash_file(self): lst_files[str(findData.cFileName)] = d if not FindNextFile(find, ctypes.byref(findData)): break - except Exception as e: + except Exception: cache[id(self.parent)] = {} raise IOError('Not a file') finally: @@ -103,13 +101,7 @@ def get_bld_sig_win32(self): pass except AttributeError: self.ctx.hash_cache = {} - - if not self.is_bld(): - if self.is_child_of(self.ctx.srcnode): - self.sig = self.cached_hash_file() - else: - self.sig = Utils.h_file(self.abspath()) - self.ctx.hash_cache[id(self)] = ret = self.sig + self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath()) return ret Node.Node.get_bld_sig = get_bld_sig_win32 @@ -130,7 +122,7 @@ def isfile_cached(self): find = FindFirstFile(TP % curpath, ctypes.byref(findData)) if find == INVALID_HANDLE_VALUE: - Logs.error("invalid win32 handle isfile_cached %r" % self.abspath()) + Logs.error("invalid win32 handle isfile_cached %r", self.abspath()) return os.path.isfile(self.abspath()) try: @@ -142,7 +134,7 @@ def isfile_cached(self): if not FindNextFile(find, ctypes.byref(findData)): break except Exception as e: - Logs.error('exception while listing a folder %r %r' % (self.abspath(), e)) + Logs.error('exception while listing a folder %r %r', self.abspath(), e) return os.path.isfile(self.abspath()) finally: FindClose(find) @@ -152,12 +144,11 @@ def isfile_cached(self): def find_or_declare_win32(self, lst): # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile if isinstance(lst, str): - lst = [x for x in Node.split_path(lst) if x and x != '.'] + lst = [x for x in Utils.split_path(lst) if x and x != '.'] - node = self.get_bld().search(lst) + node = self.get_bld().search_node(lst) if node: if not node.isfile_cached(): - node.sig = None try: node.parent.mkdir() except OSError: @@ -167,7 +158,6 @@ def find_or_declare_win32(self, lst): node = self.find_node(lst) if node: if not node.isfile_cached(): - node.sig = None try: node.parent.mkdir() except OSError: diff --git a/waflib/extras/wix.py b/waflib/extras/wix.py new file mode 100644 index 0000000000..d87bfbb1ec --- /dev/null +++ b/waflib/extras/wix.py @@ -0,0 +1,87 @@ +#!/usr/bin/python +# encoding: utf-8 +# vim: tabstop=4 noexpandtab + +""" +Windows Installer XML Tool (WiX) + +.wxs --- candle ---> .wxobj --- light ---> .msi + +bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..]) + +bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..]) +""" + +import os, copy +from waflib import TaskGen +from waflib import Task +from waflib.Utils import winreg + +class candle(Task.Task): + run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}', + +class light(Task.Task): + run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}" + +@TaskGen.feature('wix') +@TaskGen.before_method('process_source') +def wix(self): + #X.wxs -> ${SRC} for CANDLE + #X.wxobj -> ${SRC} for LIGHT + #X.dll -> -ext X in ${LIGHTFLAGS} + #X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS} + wxobj = [] + wxs = [] + exts = [] + wxl = [] + rest = [] + for x in self.source: + if x.endswith('.wxobj'): + wxobj.append(x) + elif x.endswith('.wxs'): + wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj')) + wxs.append(x) + elif x.endswith('.dll'): + exts.append(x[:-4]) + elif '.' not in x: + exts.append(x) + elif x.endswith('.wxl'): + wxl.append(x) + else: + rest.append(x) + self.source = self.to_nodes(rest) #.wxs + + cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj)) + lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen)) + + cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[])) + lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[])) + + for x in wxl: + lght.env.append_value('LIGHTFLAGS','wixui.wixlib') + lght.env.append_value('LIGHTFLAGS','-loc') + lght.env.append_value('LIGHTFLAGS',x) + for x in exts: + cndl.env.append_value('CANDLEFLAGS','-ext') + cndl.env.append_value('CANDLEFLAGS',x) + lght.env.append_value('LIGHTFLAGS','-ext') + lght.env.append_value('LIGHTFLAGS',x) + +#wix_bin_path() +def wix_bin_path(): + basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders" + query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey) + cnt=winreg.QueryInfoKey(query)[0] + thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK' + for i in range(cnt-1,-1,-1): + thiskey = winreg.EnumKey(query,i) + if 'WiX' in thiskey: + break + winreg.CloseKey(query) + return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin') + +def configure(ctx): + path_list=[wix_bin_path()] + ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list) + ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list) + diff --git a/waflib/extras/xcode.py b/waflib/extras/xcode.py deleted file mode 100644 index 4a975771fa..0000000000 --- a/waflib/extras/xcode.py +++ /dev/null @@ -1,312 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# XCode 3/XCode 4 generator for Waf -# Nicolas Mercier 2011 - -""" -Usage: - -def options(opt): - opt.load('xcode') - -$ waf configure xcode -""" - -# TODO: support iOS projects - -from waflib import Context, TaskGen, Build, Utils -import os, sys, random, time - -HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' - -MAP_EXT = { - '.h' : "sourcecode.c.h", - - '.hh': "sourcecode.cpp.h", - '.inl': "sourcecode.cpp.h", - '.hpp': "sourcecode.cpp.h", - - '.c': "sourcecode.c.c", - - '.m': "sourcecode.c.objc", - - '.mm': "sourcecode.cpp.objcpp", - - '.cc': "sourcecode.cpp.cpp", - - '.cpp': "sourcecode.cpp.cpp", - '.C': "sourcecode.cpp.cpp", - '.cxx': "sourcecode.cpp.cpp", - '.c++': "sourcecode.cpp.cpp", - - '.l': "sourcecode.lex", # luthor - '.ll': "sourcecode.lex", - - '.y': "sourcecode.yacc", - '.yy': "sourcecode.yacc", - - '.plist': "text.plist.xml", - ".nib": "wrapper.nib", - ".xib": "text.xib", -} - - -part1 = 0 -part2 = 10000 -part3 = 0 -id = 562000999 -def newid(): - global id - id = id + 1 - return "%04X%04X%04X%012d" % (0, 10000, 0, id) - -class XCodeNode: - def __init__(self): - self._id = newid() - - def tostring(self, value): - if isinstance(value, dict): - result = "{\n" - for k,v in value.items(): - result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v)) - result = result + "\t\t}" - return result - elif isinstance(value, str): - return "\"%s\"" % value - elif isinstance(value, list): - result = "(\n" - for i in value: - result = result + "\t\t\t%s,\n" % self.tostring(i) - result = result + "\t\t)" - return result - elif isinstance(value, XCodeNode): - return value._id - else: - return str(value) - - def write_recursive(self, value, file): - if isinstance(value, dict): - for k,v in value.items(): - self.write_recursive(v, file) - elif isinstance(value, list): - for i in value: - self.write_recursive(i, file) - elif isinstance(value, XCodeNode): - value.write(file) - - def write(self, file): - for attribute,value in self.__dict__.items(): - if attribute[0] != '_': - self.write_recursive(value, file) - - w = file.write - w("\t%s = {\n" % self._id) - w("\t\tisa = %s;\n" % self.__class__.__name__) - for attribute,value in self.__dict__.items(): - if attribute[0] != '_': - w("\t\t%s = %s;\n" % (attribute, self.tostring(value))) - w("\t};\n\n") - - - -# Configurations -class XCBuildConfiguration(XCodeNode): - def __init__(self, name, settings = {}, env=None): - XCodeNode.__init__(self) - self.baseConfigurationReference = "" - self.buildSettings = settings - self.name = name - if env and env.ARCH: - settings['ARCHS'] = " ".join(env.ARCH) - - -class XCConfigurationList(XCodeNode): - def __init__(self, settings): - XCodeNode.__init__(self) - self.buildConfigurations = settings - self.defaultConfigurationIsVisible = 0 - self.defaultConfigurationName = settings and settings[0].name or "" - -# Group/Files -class PBXFileReference(XCodeNode): - def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"): - XCodeNode.__init__(self) - self.fileEncoding = 4 - if not filetype: - _, ext = os.path.splitext(name) - filetype = MAP_EXT.get(ext, 'text') - self.lastKnownFileType = filetype - self.name = name - self.path = path - self.sourceTree = sourcetree - -class PBXGroup(XCodeNode): - def __init__(self, name, sourcetree = ""): - XCodeNode.__init__(self) - self.children = [] - self.name = name - self.sourceTree = sourcetree - - def add(self, root, sources): - folders = {} - def folder(n): - if not n.is_child_of(root): - return self - try: - return folders[n] - except KeyError: - f = PBXGroup(n.name) - p = folder(n.parent) - folders[n] = f - p.children.append(f) - return f - for s in sources: - f = folder(s.parent) - source = PBXFileReference(s.name, s.abspath()) - f.children.append(source) - - -# Targets -class PBXLegacyTarget(XCodeNode): - def __init__(self, action, target=''): - XCodeNode.__init__(self) - self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})]) - if not target: - self.buildArgumentsString = "%s %s" % (sys.argv[0], action) - else: - self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target) - self.buildPhases = [] - self.buildToolPath = sys.executable - self.buildWorkingDirectory = "" - self.dependencies = [] - self.name = target or action - self.productName = target or action - self.passBuildSettingsInEnvironment = 0 - -class PBXShellScriptBuildPhase(XCodeNode): - def __init__(self, action, target): - XCodeNode.__init__(self) - self.buildActionMask = 2147483647 - self.files = [] - self.inputPaths = [] - self.outputPaths = [] - self.runOnlyForDeploymentPostProcessing = 0 - self.shellPath = "/bin/sh" - self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target) - -class PBXNativeTarget(XCodeNode): - def __init__(self, action, target, node, env): - XCodeNode.__init__(self) - conf = XCBuildConfiguration('waf', {'PRODUCT_NAME':target, 'CONFIGURATION_BUILD_DIR':node.parent.abspath()}, env) - self.buildConfigurationList = XCConfigurationList([conf]) - self.buildPhases = [PBXShellScriptBuildPhase(action, target)] - self.buildRules = [] - self.dependencies = [] - self.name = target - self.productName = target - self.productType = "com.apple.product-type.application" - self.productReference = PBXFileReference(target, node.abspath(), 'wrapper.application', 'BUILT_PRODUCTS_DIR') - -# Root project object -class PBXProject(XCodeNode): - def __init__(self, name, version): - XCodeNode.__init__(self) - self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})]) - self.compatibilityVersion = version[0] - self.hasScannedForEncodings = 1; - self.mainGroup = PBXGroup(name) - self.projectRoot = "" - self.projectDirPath = "" - self.targets = [] - self._objectVersion = version[1] - self._output = PBXGroup('out') - self.mainGroup.children.append(self._output) - - def write(self, file): - w = file.write - w("// !$*UTF8*$!\n") - w("{\n") - w("\tarchiveVersion = 1;\n") - w("\tclasses = {\n") - w("\t};\n") - w("\tobjectVersion = %d;\n" % self._objectVersion) - w("\tobjects = {\n\n") - - XCodeNode.write(self, file) - - w("\t};\n") - w("\trootObject = %s;\n" % self._id) - w("}\n") - - def add_task_gen(self, tg): - if not getattr(tg, 'mac_app', False): - self.targets.append(PBXLegacyTarget('build', tg.name)) - else: - target = PBXNativeTarget('build', tg.name, tg.link_task.outputs[0].change_ext('.app'), tg.env) - self.targets.append(target) - self._output.children.append(target.productReference) - -class xcode(Build.BuildContext): - cmd = 'xcode' - fun = 'build' - - def collect_source(self, tg): - source_files = tg.to_nodes(getattr(tg, 'source', [])) - plist_files = tg.to_nodes(getattr(tg, 'mac_plist', [])) - resource_files = [tg.path.find_node(i) for i in Utils.to_list(getattr(tg, 'mac_resources', []))] - include_dirs = Utils.to_list(getattr(tg, 'includes', [])) + Utils.to_list(getattr(tg, 'export_dirs', [])) - include_files = [] - for x in include_dirs: - if not isinstance(x, str): - include_files.append(x) - continue - d = tg.path.find_node(x) - if d: - lst = [y for y in d.ant_glob(HEADERS_GLOB, flat=False)] - include_files.extend(lst) - - # remove duplicates - source = list(set(source_files + plist_files + resource_files + include_files)) - source.sort(key=lambda x: x.abspath()) - return source - - def execute(self): - """ - Entry point - """ - self.restore() - if not self.all_envs: - self.load_envs() - self.recurse([self.run_dir]) - - appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) - p = PBXProject(appname, ('Xcode 3.2', 46)) - - for g in self.groups: - for tg in g: - if not isinstance(tg, TaskGen.task_gen): - continue - - tg.post() - - features = Utils.to_list(getattr(tg, 'features', '')) - - group = PBXGroup(tg.name) - group.add(tg.path, self.collect_source(tg)) - p.mainGroup.children.append(group) - - if 'cprogram' or 'cxxprogram' in features: - p.add_task_gen(tg) - - - # targets that don't produce the executable but that you might want to run - p.targets.append(PBXLegacyTarget('configure')) - p.targets.append(PBXLegacyTarget('dist')) - p.targets.append(PBXLegacyTarget('install')) - p.targets.append(PBXLegacyTarget('check')) - node = self.srcnode.make_node('%s.xcodeproj' % appname) - node.mkdir() - node = node.make_node('project.pbxproj') - p.write(open(node.abspath(), 'w')) - - diff --git a/waflib/extras/xcode6.py b/waflib/extras/xcode6.py new file mode 100644 index 0000000000..c5b309120c --- /dev/null +++ b/waflib/extras/xcode6.py @@ -0,0 +1,727 @@ +#! /usr/bin/env python +# encoding: utf-8 +# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf +# Based on work by Nicolas Mercier 2011 +# Extended by Simon Warg 2015, https://github.com/mimon +# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html + +""" +See playground/xcode6/ for usage examples. + +""" + +from waflib import Context, TaskGen, Build, Utils, Errors, Logs +import os, sys + +# FIXME too few extensions +XCODE_EXTS = ['.c', '.cpp', '.m', '.mm'] + +HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)' + +MAP_EXT = { + '': "folder", + '.h' : "sourcecode.c.h", + + '.hh': "sourcecode.cpp.h", + '.inl': "sourcecode.cpp.h", + '.hpp': "sourcecode.cpp.h", + + '.c': "sourcecode.c.c", + + '.m': "sourcecode.c.objc", + + '.mm': "sourcecode.cpp.objcpp", + + '.cc': "sourcecode.cpp.cpp", + + '.cpp': "sourcecode.cpp.cpp", + '.C': "sourcecode.cpp.cpp", + '.cxx': "sourcecode.cpp.cpp", + '.c++': "sourcecode.cpp.cpp", + + '.l': "sourcecode.lex", # luthor + '.ll': "sourcecode.lex", + + '.y': "sourcecode.yacc", + '.yy': "sourcecode.yacc", + + '.plist': "text.plist.xml", + ".nib": "wrapper.nib", + ".xib": "text.xib", +} + +# Used in PBXNativeTarget elements +PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application' +PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework' +PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool' +PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static' +PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic' +PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension' +PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit' + +# Used in PBXFileReference elements +FILE_TYPE_APPLICATION = 'wrapper.cfbundle' +FILE_TYPE_FRAMEWORK = 'wrapper.framework' +FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib' +FILE_TYPE_LIB_STATIC = 'archive.ar' +FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable' + +# Tuple packs of the above +TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework') +TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app') +TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib') +TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a') +TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '') + +# Maps target type string to its data +TARGET_TYPES = { + 'framework': TARGET_TYPE_FRAMEWORK, + 'app': TARGET_TYPE_APPLICATION, + 'dylib': TARGET_TYPE_DYNAMIC_LIB, + 'stlib': TARGET_TYPE_STATIC_LIB, + 'exe' :TARGET_TYPE_EXECUTABLE, +} + +def delete_invalid_values(dct): + """ Deletes entries that are dictionaries or sets """ + for k, v in list(dct.items()): + if isinstance(v, dict) or isinstance(v, set): + del dct[k] + return dct + +""" +Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION' +which is a dictionary of configuration name and buildsettings pair. +E.g.: +env.PROJ_CONFIGURATION = { + 'Debug': { + 'ARCHS': 'x86', + ... + } + 'Release': { + 'ARCHS': x86_64' + ... + } +} +The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created +based on env variable +""" +def configure(self): + if not self.env.PROJ_CONFIGURATION: + self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n") + + # Check for any added config files added by the tool 'c_config'. + if 'cfg_files' in self.env: + self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files] + + # Create default project configuration? + if 'PROJ_CONFIGURATION' not in self.env: + defaults = delete_invalid_values(self.env.get_merged_dict()) + self.env.PROJ_CONFIGURATION = { + "Debug": defaults, + "Release": defaults, + } + + # Some build settings are required to be present by XCode. We will supply default values + # if user hasn't defined any. + defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')] + for cfgname,settings in self.env.PROJ_CONFIGURATION.items(): + for default_var, default_val in defaults_required: + if default_var not in settings: + settings[default_var] = default_val + + # Error check customization + if not isinstance(self.env.PROJ_CONFIGURATION, dict): + raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.") + +part1 = 0 +part2 = 10000 +part3 = 0 +id = 562000999 +def newid(): + global id + id += 1 + return "%04X%04X%04X%012d" % (0, 10000, 0, id) + +""" +Represents a tree node in the XCode project plist file format. +When written to a file, all attributes of XCodeNode are stringified together with +its value. However, attributes starting with an underscore _ are ignored +during that process and allows you to store arbitrary values that are not supposed +to be written out. +""" +class XCodeNode(object): + def __init__(self): + self._id = newid() + self._been_written = False + + def tostring(self, value): + if isinstance(value, dict): + result = "{\n" + for k,v in value.items(): + result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v)) + result = result + "\t\t}" + return result + elif isinstance(value, str): + return '"%s"' % value.replace('"', '\\\\\\"') + elif isinstance(value, list): + result = "(\n" + for i in value: + result = result + "\t\t\t\t%s,\n" % self.tostring(i) + result = result + "\t\t\t)" + return result + elif isinstance(value, XCodeNode): + return value._id + else: + return str(value) + + def write_recursive(self, value, file): + if isinstance(value, dict): + for k,v in value.items(): + self.write_recursive(v, file) + elif isinstance(value, list): + for i in value: + self.write_recursive(i, file) + elif isinstance(value, XCodeNode): + value.write(file) + + def write(self, file): + if not self._been_written: + self._been_written = True + for attribute,value in self.__dict__.items(): + if attribute[0] != '_': + self.write_recursive(value, file) + w = file.write + w("\t%s = {\n" % self._id) + w("\t\tisa = %s;\n" % self.__class__.__name__) + for attribute,value in self.__dict__.items(): + if attribute[0] != '_': + w("\t\t%s = %s;\n" % (attribute, self.tostring(value))) + w("\t};\n\n") + +# Configurations +class XCBuildConfiguration(XCodeNode): + def __init__(self, name, settings = {}, env=None): + XCodeNode.__init__(self) + self.baseConfigurationReference = "" + self.buildSettings = settings + self.name = name + if env and env.ARCH: + settings['ARCHS'] = " ".join(env.ARCH) + + +class XCConfigurationList(XCodeNode): + def __init__(self, configlst): + """ :param configlst: list of XCConfigurationList """ + XCodeNode.__init__(self) + self.buildConfigurations = configlst + self.defaultConfigurationIsVisible = 0 + self.defaultConfigurationName = configlst and configlst[0].name or "" + +# Group/Files +class PBXFileReference(XCodeNode): + def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"): + + XCodeNode.__init__(self) + self.fileEncoding = 4 + if not filetype: + _, ext = os.path.splitext(name) + filetype = MAP_EXT.get(ext, 'text') + self.lastKnownFileType = filetype + self.explicitFileType = filetype + self.name = name + self.path = path + self.sourceTree = sourcetree + + def __hash__(self): + return (self.path+self.name).__hash__() + + def __eq__(self, other): + return (self.path, self.name) == (other.path, other.name) + +class PBXBuildFile(XCodeNode): + """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """ + def __init__(self, fileRef, settings={}): + XCodeNode.__init__(self) + + # fileRef is a reference to a PBXFileReference object + self.fileRef = fileRef + + # A map of key/value pairs for additional settings. + self.settings = settings + + def __hash__(self): + return (self.fileRef).__hash__() + + def __eq__(self, other): + return self.fileRef == other.fileRef + +class PBXGroup(XCodeNode): + def __init__(self, name, sourcetree = 'SOURCE_TREE'): + XCodeNode.__init__(self) + self.children = [] + self.name = name + self.sourceTree = sourcetree + + # Maintain a lookup table for all PBXFileReferences + # that are contained in this group. + self._filerefs = {} + + def add(self, sources): + """ + Add a list of PBXFileReferences to this group + + :param sources: list of PBXFileReferences objects + """ + self._filerefs.update(dict(zip(sources, sources))) + self.children.extend(sources) + + def get_sub_groups(self): + """ + Returns all child PBXGroup objects contained in this group + """ + return list(filter(lambda x: isinstance(x, PBXGroup), self.children)) + + def find_fileref(self, fileref): + """ + Recursively search this group for an existing PBXFileReference. Returns None + if none were found. + + The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy. + If it isn't, the consequence is that certain UI features like 'Reveal in Finder' + stops working. + """ + if fileref in self._filerefs: + return self._filerefs[fileref] + elif self.children: + for childgroup in self.get_sub_groups(): + f = childgroup.find_fileref(fileref) + if f: + return f + return None + +class PBXContainerItemProxy(XCodeNode): + """ This is the element for to decorate a target item. """ + def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1): + XCodeNode.__init__(self) + self.containerPortal = containerPortal # PBXProject + self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget + self.remoteInfo = remoteInfo # Target name + self.proxyType = proxyType + +class PBXTargetDependency(XCodeNode): + """ This is the element for referencing other target through content proxies. """ + def __init__(self, native_target, proxy): + XCodeNode.__init__(self) + self.target = native_target + self.targetProxy = proxy + +class PBXFrameworksBuildPhase(XCodeNode): + """ This is the element for the framework link build phase, i.e. linking to frameworks """ + def __init__(self, pbxbuildfiles): + XCodeNode.__init__(self) + self.buildActionMask = 2147483647 + self.runOnlyForDeploymentPostprocessing = 0 + self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib) + +class PBXHeadersBuildPhase(XCodeNode): + """ This is the element for adding header files to be packaged into the .framework """ + def __init__(self, pbxbuildfiles): + XCodeNode.__init__(self) + self.buildActionMask = 2147483647 + self.runOnlyForDeploymentPostprocessing = 0 + self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib) + +class PBXCopyFilesBuildPhase(XCodeNode): + """ + Represents the PBXCopyFilesBuildPhase section. PBXBuildFile + can be added to this node to copy files after build is done. + """ + def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs): + XCodeNode.__init__(self) + self.files = pbxbuildfiles + self.dstPath = dstpath + self.dstSubfolderSpec = dstSubpathSpec + +class PBXSourcesBuildPhase(XCodeNode): + """ Represents the 'Compile Sources' build phase in a Xcode target """ + def __init__(self, buildfiles): + XCodeNode.__init__(self) + self.files = buildfiles # List of PBXBuildFile objects + +class PBXLegacyTarget(XCodeNode): + def __init__(self, action, target=''): + XCodeNode.__init__(self) + self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})]) + if not target: + self.buildArgumentsString = "%s %s" % (sys.argv[0], action) + else: + self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target) + self.buildPhases = [] + self.buildToolPath = sys.executable + self.buildWorkingDirectory = "" + self.dependencies = [] + self.name = target or action + self.productName = target or action + self.passBuildSettingsInEnvironment = 0 + +class PBXShellScriptBuildPhase(XCodeNode): + def __init__(self, action, target): + XCodeNode.__init__(self) + self.buildActionMask = 2147483647 + self.files = [] + self.inputPaths = [] + self.outputPaths = [] + self.runOnlyForDeploymentPostProcessing = 0 + self.shellPath = "/bin/sh" + self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target) + +class PBXNativeTarget(XCodeNode): + """ Represents a target in XCode, e.g. App, DyLib, Framework etc. """ + def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]): + XCodeNode.__init__(self) + product_type = target_type[0] + file_type = target_type[1] + + self.buildConfigurationList = XCConfigurationList(configlist) + self.buildPhases = buildphases + self.buildRules = [] + self.dependencies = [] + self.name = target + self.productName = target + self.productType = product_type # See TARGET_TYPE_ tuples constants + self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '') + + def add_configuration(self, cf): + """ :type cf: XCBuildConfiguration """ + self.buildConfigurationList.buildConfigurations.append(cf) + + def add_build_phase(self, phase): + # Some build phase types may appear only once. If a phase type already exists, then merge them. + if ( (phase.__class__ == PBXFrameworksBuildPhase) + or (phase.__class__ == PBXSourcesBuildPhase) ): + for b in self.buildPhases: + if b.__class__ == phase.__class__: + b.files.extend(phase.files) + return + self.buildPhases.append(phase) + + def add_dependency(self, depnd): + self.dependencies.append(depnd) + +# Root project object +class PBXProject(XCodeNode): + def __init__(self, name, version, env): + XCodeNode.__init__(self) + + if not isinstance(env.PROJ_CONFIGURATION, dict): + raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?") + + # Retrieve project configuration + configurations = [] + for config_name, settings in env.PROJ_CONFIGURATION.items(): + cf = XCBuildConfiguration(config_name, settings) + configurations.append(cf) + + self.buildConfigurationList = XCConfigurationList(configurations) + self.compatibilityVersion = version[0] + self.hasScannedForEncodings = 1 + self.mainGroup = PBXGroup(name) + self.projectRoot = "" + self.projectDirPath = "" + self.targets = [] + self._objectVersion = version[1] + + def create_target_dependency(self, target, name): + """ : param target : PXBNativeTarget """ + proxy = PBXContainerItemProxy(self, target, name) + dependency = PBXTargetDependency(target, proxy) + return dependency + + def write(self, file): + + # Make sure this is written only once + if self._been_written: + return + + w = file.write + w("// !$*UTF8*$!\n") + w("{\n") + w("\tarchiveVersion = 1;\n") + w("\tclasses = {\n") + w("\t};\n") + w("\tobjectVersion = %d;\n" % self._objectVersion) + w("\tobjects = {\n\n") + + XCodeNode.write(self, file) + + w("\t};\n") + w("\trootObject = %s;\n" % self._id) + w("}\n") + + def add_target(self, target): + self.targets.append(target) + + def get_target(self, name): + """ Get a reference to PBXNativeTarget if it exists """ + for t in self.targets: + if t.name == name: + return t + return None + +@TaskGen.feature('c', 'cxx') +@TaskGen.after('propagate_uselib_vars', 'apply_incpaths') +def process_xcode(self): + bld = self.bld + try: + p = bld.project + except AttributeError: + return + + if not hasattr(self, 'target_type'): + return + + products_group = bld.products_group + + target_group = PBXGroup(self.name) + p.mainGroup.children.append(target_group) + + # Determine what type to build - framework, app bundle etc. + target_type = getattr(self, 'target_type', 'app') + if target_type not in TARGET_TYPES: + raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name)) + else: + target_type = TARGET_TYPES[target_type] + file_ext = target_type[2] + + # Create the output node + target_node = self.path.find_or_declare(self.name+file_ext) + target = PBXNativeTarget(self.name, target_node, target_type, [], []) + + products_group.children.append(target.productReference) + + # Pull source files from the 'source' attribute and assign them to a UI group. + # Use a default UI group named 'Source' unless the user + # provides a 'group_files' dictionary to customize the UI grouping. + sources = getattr(self, 'source', []) + if hasattr(self, 'group_files'): + group_files = getattr(self, 'group_files', []) + for grpname,files in group_files.items(): + group = bld.create_group(grpname, files) + target_group.children.append(group) + else: + group = bld.create_group('Source', sources) + target_group.children.append(group) + + # Create a PBXFileReference for each source file. + # If the source file already exists as a PBXFileReference in any of the UI groups, then + # reuse that PBXFileReference object (XCode does not like it if we don't reuse) + for idx, path in enumerate(sources): + fileref = PBXFileReference(path.name, path.abspath()) + existing_fileref = target_group.find_fileref(fileref) + if existing_fileref: + sources[idx] = existing_fileref + else: + sources[idx] = fileref + + # If the 'source' attribute contains any file extension that XCode can't work with, + # then remove it. The allowed file extensions are defined in XCODE_EXTS. + is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS + sources = list(filter(is_valid_file_extension, sources)) + + buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources] + target.add_build_phase(PBXSourcesBuildPhase(buildfiles)) + + # Check if any framework to link against is some other target we've made + libs = getattr(self, 'tmp_use_seen', []) + for lib in libs: + use_target = p.get_target(lib) + if use_target: + # Create an XCode dependency so that XCode knows to build the other target before this target + dependency = p.create_target_dependency(use_target, use_target.name) + target.add_dependency(dependency) + + buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)]) + target.add_build_phase(buildphase) + if lib in self.env.LIB: + self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB)) + + # If 'export_headers' is present, add files to the Headers build phase in xcode. + # These are files that'll get packed into the Framework for instance. + exp_hdrs = getattr(self, 'export_headers', []) + hdrs = bld.as_nodes(Utils.to_list(exp_hdrs)) + files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs] + files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files] + buildphase = PBXHeadersBuildPhase(files) + target.add_build_phase(buildphase) + + # Merge frameworks and libs into one list, and prefix the frameworks + frameworks = Utils.to_list(self.env.FRAMEWORK) + frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks]) + + libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB) + libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs) + + # Override target specific build settings + bldsettings = { + 'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'], + 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR), + 'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH), + 'OTHER_LDFLAGS': libs + ' ' + frameworks + ' ' + ' '.join(bld.env['LINKFLAGS']), + 'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']), + 'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']), + 'INSTALL_PATH': [], + 'GCC_PREPROCESSOR_DEFINITIONS': self.env['DEFINES'] + } + + # Install path + installpaths = Utils.to_list(getattr(self, 'install', [])) + prodbuildfile = PBXBuildFile(target.productReference) + for instpath in installpaths: + bldsettings['INSTALL_PATH'].append(instpath) + target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath)) + + if not bldsettings['INSTALL_PATH']: + del bldsettings['INSTALL_PATH'] + + # Create build settings which can override the project settings. Defaults to none if user + # did not pass argument. This will be filled up with target specific + # search paths, libs to link etc. + settings = getattr(self, 'settings', {}) + + # The keys represents different build configuration, e.g. Debug, Release and so on.. + # Insert our generated build settings to all configuration names + keys = set(settings.keys()) | set(bld.env.PROJ_CONFIGURATION.keys()) + for k in keys: + if k in settings: + settings[k].update(bldsettings) + else: + settings[k] = bldsettings + + for k,v in settings.items(): + target.add_configuration(XCBuildConfiguration(k, v)) + + p.add_target(target) + + +class xcode(Build.BuildContext): + cmd = 'xcode6' + fun = 'build' + + def as_nodes(self, files): + """ Returns a list of waflib.Nodes from a list of string of file paths """ + nodes = [] + for x in files: + if not isinstance(x, str): + d = x + else: + d = self.srcnode.find_node(x) + if not d: + raise Errors.WafError('File \'%s\' was not found' % x) + nodes.append(d) + return nodes + + def create_group(self, name, files): + """ + Returns a new PBXGroup containing the files (paths) passed in the files arg + :type files: string + """ + group = PBXGroup(name) + """ + Do not use unique file reference here, since XCode seem to allow only one file reference + to be referenced by a group. + """ + files_ = [] + for d in self.as_nodes(Utils.to_list(files)): + fileref = PBXFileReference(d.name, d.abspath()) + files_.append(fileref) + group.add(files_) + return group + + def unique_buildfile(self, buildfile): + """ + Returns a unique buildfile, possibly an existing one. + Use this after you've constructed a PBXBuildFile to make sure there is + only one PBXBuildFile for the same file in the same project. + """ + try: + build_files = self.build_files + except AttributeError: + build_files = self.build_files = {} + + if buildfile not in build_files: + build_files[buildfile] = buildfile + return build_files[buildfile] + + def execute(self): + """ + Entry point + """ + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + + appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath())) + + p = PBXProject(appname, ('Xcode 3.2', 46), self.env) + + # If we don't create a Products group, then + # XCode will create one, which entails that + # we'll start to see duplicate files in the UI + # for some reason. + products_group = PBXGroup('Products') + p.mainGroup.children.append(products_group) + + self.project = p + self.products_group = products_group + + # post all task generators + # the process_xcode method above will be called for each target + if self.targets and self.targets != '*': + (self._min_grp, self._exact_tg) = self.get_targets() + + self.current_group = 0 + while self.current_group < len(self.groups): + self.post_group() + self.current_group += 1 + + node = self.bldnode.make_node('%s.xcodeproj' % appname) + node.mkdir() + node = node.make_node('project.pbxproj') + with open(node.abspath(), 'w') as f: + p.write(f) + Logs.pprint('GREEN', 'Wrote %r' % node.abspath()) + +def bind_fun(tgtype): + def fun(self, *k, **kw): + tgtype = fun.__name__ + if tgtype == 'shlib' or tgtype == 'dylib': + features = 'cxx cxxshlib' + tgtype = 'dylib' + elif tgtype == 'framework': + features = 'cxx cxxshlib' + tgtype = 'framework' + elif tgtype == 'program': + features = 'cxx cxxprogram' + tgtype = 'exe' + elif tgtype == 'app': + features = 'cxx cxxprogram' + tgtype = 'app' + elif tgtype == 'stlib': + features = 'cxx cxxstlib' + tgtype = 'stlib' + lst = kw['features'] = Utils.to_list(kw.get('features', [])) + for x in features.split(): + if not x in kw['features']: + lst.append(x) + + kw['target_type'] = tgtype + return self(*k, **kw) + fun.__name__ = tgtype + setattr(Build.BuildContext, tgtype, fun) + return fun + +for xx in 'app framework dylib shlib stlib program'.split(): + bind_fun(xx) + diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py index 1adc9e0c31..c99bff4b9a 100644 --- a/waflib/fixpy2.py +++ b/waflib/fixpy2.py @@ -1,24 +1,21 @@ #!/usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2010-2015 (ita) +# Thomas Nagy, 2010-2018 (ita) -""" -burn a book, save a tree -""" +from __future__ import with_statement import os all_modifs = {} def fixdir(dir): - """call all the substitution functions on the waf folders""" - global all_modifs + """Call all substitution functions on Waf folders""" for k in all_modifs: for v in all_modifs[k]: modif(os.path.join(dir, 'waflib'), k, v) def modif(dir, name, fun): - """execute a substitution function""" + """Call a substitution function""" if name == '*': lst = [] for y in '. Tools extras'.split(): @@ -30,24 +27,17 @@ def modif(dir, name, fun): return filename = os.path.join(dir, name) - f = open(filename, 'r') - try: + with open(filename, 'r') as f: txt = f.read() - finally: - f.close() txt = fun(txt) - f = open(filename, 'w') - try: + with open(filename, 'w') as f: f.write(txt) - finally: - f.close() def subst(*k): """register a substitution function""" def do_subst(fun): - global all_modifs for x in k: try: all_modifs[x].append(fun) @@ -60,13 +50,15 @@ def do_subst(fun): def r1(code): "utf-8 fixes for python < 2.6" code = code.replace('as e:', ',e:') - code = code.replace(".decode(sys.stdout.encoding or 'iso8859-1')", '') - code = code.replace('.encode()', '') - return code + code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '') + return code.replace('.encode()', '') @subst('Runner.py') def r4(code): "generator syntax" - code = code.replace('next(self.biter)', 'self.biter.next()') - return code + return code.replace('next(self.biter)', 'self.biter.next()').replace('self.daemon = True', 'self.setDaemon(1)') + +@subst('Context.py') +def r5(code): + return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]") diff --git a/waflib/processor.py b/waflib/processor.py new file mode 100755 index 0000000000..eff2e69adf --- /dev/null +++ b/waflib/processor.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# Thomas Nagy, 2016-2018 (ita) + +import os, sys, traceback, base64, signal +try: + import cPickle +except ImportError: + import pickle as cPickle + +try: + import subprocess32 as subprocess +except ImportError: + import subprocess + +try: + TimeoutExpired = subprocess.TimeoutExpired +except AttributeError: + class TimeoutExpired(Exception): + pass + +def run(): + txt = sys.stdin.readline().strip() + if not txt: + # parent process probably ended + sys.exit(1) + [cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt)) + cargs = cargs or {} + + if not 'close_fds' in kwargs: + # workers have no fds + kwargs['close_fds'] = False + + ret = 1 + out, err, ex, trace = (None, None, None, None) + try: + proc = subprocess.Popen(cmd, **kwargs) + try: + out, err = proc.communicate(**cargs) + except TimeoutExpired: + if kwargs.get('start_new_session') and hasattr(os, 'killpg'): + os.killpg(proc.pid, signal.SIGKILL) + else: + proc.kill() + out, err = proc.communicate() + exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out) + exc.stderr = err + raise exc + ret = proc.returncode + except Exception as e: + exc_type, exc_value, tb = sys.exc_info() + exc_lines = traceback.format_exception(exc_type, exc_value, tb) + trace = str(cmd) + '\n' + ''.join(exc_lines) + ex = e.__class__.__name__ + + # it is just text so maybe we do not need to pickle() + tmp = [ret, out, err, ex, trace] + obj = base64.b64encode(cPickle.dumps(tmp)) + sys.stdout.write(obj.decode()) + sys.stdout.write('\n') + sys.stdout.flush() + +while 1: + try: + run() + except KeyboardInterrupt: + break + diff --git a/wscript b/wscript index f8923890fb..7e48d57ae5 100644 --- a/wscript +++ b/wscript @@ -1,6 +1,6 @@ #! /usr/bin/env python # encoding: utf-8 -# Thomas Nagy, 2005-2015 +# Thomas Nagy, 2005-2018 """ to make a custom waf file use the option --tools @@ -9,8 +9,9 @@ To add a tool that does not exist in the folder compat15, pass an absolute path: ./waf-light --tools=compat15,/comp/waf/aba.py --prelude=$'\tfrom waflib.extras import aba\n\taba.foo()' """ +from __future__ import with_statement -VERSION="1.8.8" +VERSION="2.0.27" APPNAME='waf' REVISION='' @@ -21,41 +22,30 @@ zip_types = ['bz2', 'gz', 'xz'] PRELUDE = '' -#from tokenize import * -import tokenize - -import os, sys, re, io, optparse - -from waflib import Utils, Options, Logs +import os, sys, re, io, optparse, tokenize from hashlib import md5 +from waflib import Errors, Utils, Options, Logs, Scripting from waflib import Configure Configure.autoconfig = 1 def sub_file(fname, lst): - - f = open(fname, 'rU') - try: + with open(fname, 'r') as f: txt = f.read() - finally: - f.close() for (key, val) in lst: re_pat = re.compile(key, re.M) txt = re_pat.sub(val, txt) - f = open(fname, 'w') - try: + with open(fname, 'w') as f: f.write(txt) - finally: - f.close() def to_bytes(x): if sys.hexversion>0x300000f: return x.encode() return x -print("------> Executing code from the top-level wscript <-----") +Logs.warn('------> Executing code from the top-level wscript <-----') def init(ctx): if Options.options.setver: # maintainer only (ita) ver = Options.options.setver @@ -70,9 +60,10 @@ def init(ctx): try: rev = ctx.cmd_and_log("git rev-parse HEAD").strip() - pats.append(('^WAFREVISION(.*)', 'WAFREVISION="%s"' % rev)) - except Exception: + except Errors.WafError: rev = '' + else: + pats.append(('^WAFREVISION(.*)', 'WAFREVISION="%s"' % rev)) sub_file('waflib/Context.py', pats) @@ -88,9 +79,15 @@ def options(opt): opt.add_option('--make-waf', action='store_true', default=True, help='creates the waf script', dest='waf') + opt.add_option('--interpreter', action='store', default=None, + help='specify the #! line on top of the waf file', dest='interpreter') + opt.add_option('--sign', action='store_true', default=False, help='make a signed file', dest='signed') - opt.add_option('--zip-type', action='store', default='bz2', + default_zip = 'bz2' + if os.name == 'java': + default_zip = 'gz' + opt.add_option('--zip-type', action='store', default=default_zip, help='specify the zip type [Allowed values: %s]' % ' '.join(zip_types), dest='zip') opt.add_option('--make-batch', action='store_true', default=False, @@ -104,6 +101,7 @@ def options(opt): # those ones are not too interesting opt.add_option('--set-version', default='', help='sets the version number for waf releases (for the maintainer)', dest='setver') + opt.add_option('--set-name', default='waf', help=optparse.SUPPRESS_HELP, dest='wafname') opt.add_option('--strip', action='store_true', default=True, help='shrinks waf (strip docstrings, saves 33kb)', @@ -115,9 +113,10 @@ def options(opt): opt.add_option('--coretools', action='store', help='Comma-separated core tools to add, eg: "vala,tex" [Default: all of them]', dest='coretools', default='default') opt.add_option('--prelude', action='store', help='Code to execute before calling waf', dest='prelude', default=PRELUDE) + opt.add_option('--namesfrom', action='store', help='Obtain the file names from a model archive', dest='namesfrom', default=None) opt.load('python') -def process_tokens(tokens): +def process_tokens(tokens, filename): accu = [] prev = tokenize.NEWLINE @@ -148,7 +147,10 @@ def process_tokens(tokens): line_buf.append(token) elif type == tokenize.STRING: if not line_buf and token.startswith('"'): pass - else: line_buf.append(token) + else: + if token.lower().startswith('f'): + raise ValueError('Found f-strings in %s which require Python >= 3.6, use "waf-light --nostrip"' % filename) + line_buf.append(token) elif type == tokenize.COMMENT: pass elif type == tokenize.OP: @@ -159,10 +161,10 @@ def process_tokens(tokens): if token != '\n': prev = type - body = "".join(accu) + body = ''.join(accu) return body -deco_re = re.compile('(def|class)\\s+(\w+)\\(.*') +deco_re = re.compile('(def|class)\\s+(\\w+)\\(.*') def process_decorators(body): lst = body.splitlines() accu = [] @@ -176,50 +178,37 @@ def process_decorators(body): if not name: raise IOError("decorator not followed by a function!" + line) for x in buf: - all_deco.append("%s(%s)" % (x, name)) + all_deco.append('%s(%s)' % (x, name)) accu.append(line) buf = [] else: accu.append(line) - return "\n".join(accu+all_deco) + return '\n'.join(accu+all_deco) def sfilter(path): - if path.endswith('.py') : if Options.options.strip_comments: if sys.version_info[0] >= 3: - f = open(path, "rb") - try: + with open(path, 'rb') as f: tk = tokenize.tokenize(f.readline) next(tk) # the first one is always tokenize.ENCODING for Python 3, ignore it - cnt = process_tokens(tk) - finally: - f.close() + cnt = process_tokens(tk, path) else: - f = open(path, "r") - try: - cnt = process_tokens(tokenize.generate_tokens(f.readline)) - finally: - f.close() + with open(path, 'r') as f: + cnt = process_tokens(tokenize.generate_tokens(f.readline), path) else: - f = open(path, "r") - try: + with open(path, 'r') as f: cnt = f.read() - finally: - f.close() - # WARNING: since we now require python 2.4, we do not process the decorators anymore - # if you need such a thing, uncomment the code below: + # WARNING: since python >= 2.5 is required, decorators are not processed anymore + # uncomment the following to enable decorator replacement: #cnt = process_decorators(cnt) #if cnt.find('set(') > -1: # cnt = 'import sys\nif sys.hexversion < 0x020400f0: from sets import Set as set\n' + cnt cnt = '#! /usr/bin/env python\n# encoding: utf-8\n# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file\n\n' + cnt else: - f = open(path, "r") - try: + with open(path, 'r') as f: cnt = f.read() - finally: - f.close() if sys.hexversion > 0x030000f0: return (io.BytesIO(cnt.encode('utf-8')), len(cnt.encode('utf-8')), cnt) @@ -227,19 +216,44 @@ def sfilter(path): def create_waf(self, *k, **kw): mw = 'tmp-waf-'+VERSION - print("-> preparing %r" % mw) + print('-> preparing %r' % mw) - import tarfile, re, zipfile + import tarfile, zipfile zipType = Options.options.zip.strip().lower() if zipType not in zip_types: zipType = zip_types[0] - + directory_files = {} files = [] add3rdparty = [] for x in Options.options.add3rdparty.split(','): - if os.path.isabs(x): + if os.path.isdir(x): + # Create mapping from files absolute path to path in module + # directory (for module mylib): + # + # {"/home/path/mylib/__init__.py": "mylib/__init__.py", + # "/home/path/mylib/lib.py": "mylib/lib.py", + # "/home/path/mylib/sub/sub.py": "mylib/sub/lib.py" + # } + # + x_dir = self.generator.bld.root.find_dir( + os.path.abspath(os.path.expanduser(x))) + + file_list = x_dir.ant_glob('**/*.py') + + for f in file_list: + + file_from = f.abspath() + file_to = os.path.join(x_dir.name, f.path_from(x_dir)) + + # If this is executed on Windows, then file_to will contain + # '\' path separators. These should be changed to '/', otherwise + # the added tools will not be accessible on Unix systems. + directory_files[file_from] = file_to.replace('\\', '/') + files.append(file_from) + + elif os.path.isabs(x): files.append(x) else: add3rdparty.append(x + '.py') @@ -248,39 +262,56 @@ def create_waf(self, *k, **kw): for x in Options.options.coretools.split(','): coretools.append(x + '.py') - for d in '. Tools extras'.split(): - dd = os.path.join('waflib', d) - for k in os.listdir(dd): - if k == '__init__.py': - files.append(os.path.join(dd, k)) + up_node = self.generator.bld.path + for node in up_node.find_dir('waflib').ant_glob(incl=['*.py', 'Tools/*.py', 'extras/*.py']): + relpath = node.path_from(up_node) + if node.name == '__init__.py': + files.append(relpath) + continue + if node.parent.name == 'Tools' and Options.options.coretools != 'default': + if node.name not in coretools: continue - if d == 'Tools' and Options.options.coretools != 'default': - if not k in coretools: - continue - if d == 'extras': - if not k in add3rdparty: - continue - if k.endswith('.py'): - files.append(os.path.join(dd, k)) + if node.parent.name == 'extras': + if node.name not in add3rdparty: + continue + files.append(relpath) + + if Options.options.namesfrom: + with tarfile.open(Options.options.namesfrom) as tar: + oldfiles = files + files = [x.name for x in tar.getmembers()] + if set(files) ^ set(oldfiles): + Logs.warn('The archive model has differences:') + Logs.warn('- Added %r', list(set(files) - set(oldfiles))) + Logs.warn('- Removed %r', list(set(oldfiles) - set(files))) #open a file as tar.[extension] for writing tar = tarfile.open('%s.tar.%s' % (mw, zipType), "w:%s" % zipType) z = zipfile.ZipFile("zip/waflib.zip", "w", compression=zipfile.ZIP_DEFLATED) for x in files: - tarinfo = tar.gettarinfo(x, x) + try: + tarinfo = tar.gettarinfo(x, x) + except NotImplementedError: + # jython 2.7.0 workaround + tarinfo = tarfile.TarInfo(x) tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = 'root' + if os.environ.get('SOURCE_DATE_EPOCH'): + tarinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH')) (code, size, cnt) = sfilter(x) tarinfo.size = size - if os.path.isabs(x): + if x in directory_files: + tarinfo.name = 'waflib/extras/' + directory_files[x] + elif os.path.isabs(x): tarinfo.name = 'waflib/extras/' + os.path.split(x)[1] - print(" adding %s as %s" % (x, tarinfo.name)) - + print(' adding %s as %s' % (x, tarinfo.name)) def dest(x): - if os.path.isabs(x): - return os.path.join("extras", os.path.basename(x)) + if x in directory_files: + return os.path.join('waflib', 'extras', directory_files[x]) + elif os.path.isabs(x): + return os.path.join('waflib', 'extras', os.path.basename(x)) else: return os.path.normpath(os.path.relpath(x, ".")) @@ -289,11 +320,12 @@ def create_waf(self, *k, **kw): tar.close() z.close() - f = open('waf-light', 'rU') - try: + with open('waf-light', 'r') as f: code1 = f.read() - finally: - f.close() + + # tune the application name if necessary + if Options.options.wafname != 'waf': + Options.options.prelude = '\tfrom waflib import Context\n\tContext.WAFNAME=%r\n' % Options.options.wafname + Options.options.prelude # now store the revision unique number in waf code1 = code1.replace("if sys.hexversion<0x206000f:\n\traise ImportError('Python >= 2.6 is required to create the waf file')\n", '') @@ -302,8 +334,8 @@ def create_waf(self, *k, **kw): # when possible, set the git revision in the waf file bld = self.generator.bld try: - rev = bld.cmd_and_log("git rev-parse HEAD", quiet=0).strip() - except Exception: + rev = bld.cmd_and_log('git rev-parse HEAD', quiet=0).strip() + except Errors.WafError: rev = '' else: reg = re.compile('^GIT(.*)', re.M) @@ -321,13 +353,10 @@ def create_waf(self, *k, **kw): elif zipType == 'xz': code1 = code1.replace('bunzip2', 'xz -d') - f = open('%s.tar.%s' % (mw, zipType), 'rb') - try: + with open('%s.tar.%s' % (mw, zipType), 'rb') as f: cnt = f.read() - finally: - f.close() - # the REVISION value is the md5 sum of the binary blob (facilitate audits) + # the REVISION value is the md5 sum of the compressed data (facilitate audits) m = md5() m.update(cnt) REVISION = m.hexdigest() @@ -343,7 +372,7 @@ def create_waf(self, *k, **kw): s = chr(i) + chr(j) if -1 == kd.find(s.encode()): return (kd.replace(ch.encode(), s.encode()), s) - raise + raise ValueError('Could not find a proper encoding') # The reverse order prevents collisions (cnt, C3) = find_unused(cnt, '\x00') @@ -351,8 +380,10 @@ def create_waf(self, *k, **kw): (cnt, C1) = find_unused(cnt, '\n') ccc = code1.replace("C1='x'", "C1='%s'" % C1).replace("C2='x'", "C2='%s'" % C2).replace("C3='x'", "C3='%s'" % C3) - f = open('waf', 'wb') - try: + if getattr(Options.options, 'interpreter', None): + ccc = ccc.replace('#!/usr/bin/env python', Options.options.interpreter) + + with open('waf', 'wb') as f: f.write(ccc.encode()) f.write(to_bytes('#==>\n#')) f.write(cnt) @@ -370,37 +401,27 @@ def create_waf(self, *k, **kw): sig = Utils.readf('waf.asc') sig = sig.replace('\r', '').replace('\n', '\\n') - f.write('#') - f.write(sig) - f.write('\n') - finally: - f.close() + f.write(to_bytes('#')) + f.write(to_bytes(sig)) + f.write(to_bytes('\n')) + os.remove('waf.asc') if sys.platform == 'win32' or Options.options.make_batch: - f = open('waf.bat', 'w') - try: - f.write('@python -x "%~dp0waf" %*\n@exit /b %ERRORLEVEL%\n') - finally: - f.close() + with open('waf.bat', 'w') as f: + f.write('@setlocal\n@set PYEXE=python\n@where %PYEXE% 1>NUL 2>NUL\n@if %ERRORLEVEL% neq 0 set PYEXE=py\n@%PYEXE% -x "%~dp0waf" %*\n@exit /b %ERRORLEVEL%\n') if sys.platform != 'win32': os.chmod('waf', Utils.O755) os.remove('%s.tar.%s' % (mw, zipType)) -def make_copy(inf, outf): - (a, b, cnt) = sfilter(inf) - f = open(outf, "wb") - try: - f.write(cnt) - finally: - f.close() - def configure(conf): conf.load('python') - conf.check_python_version((2,4)) def build(bld): - waf = bld.path.make_node('waf') # create the node right here - bld(name='create_waf', rule=create_waf, target=waf, always=True, color='PINK', update_outputs=True) + waf = bld.path.make_node('waf') # do not use a build directory for this file + bld(name='create_waf', rule=create_waf, target=waf, always=True, color='PINK') +class Dist(Scripting.Dist): + def get_excl(self): + return super(self.__class__, self).get_excl() + ' **/waflib.zip' diff --git a/zip/waf-zip b/zip/waf-zip index 9a7553a259..06e8ccee0f 100755 --- a/zip/waf-zip +++ b/zip/waf-zip @@ -1,5 +1,5 @@ #!/usr/bin/env python -# encoding: ISO8859-1 +# encoding: latin-1 # Thomas Nagy, 2005-2011 """
-
%s%s%s