1#!/usr/bin/env python 2# -*- coding: utf-8 -*- 3# 4# Autowaf, useful waf utilities with support for recursive projects 5# Copyright 2008-2011 David Robillard 6# 7# Licensed under the GNU GPL v2 or later, see COPYING file for details. 8 9import os 10import subprocess 11import sys 12import glob 13 14from waflib import Configure, Context, Logs, Node, Options, Task, Utils 15from waflib.TaskGen import feature, before, after 16 17global g_is_child 18g_is_child = False 19 20# Only run autowaf hooks once (even if sub projects call several times) 21global g_step 22g_step = 0 23 24# Compute dependencies globally 25#import preproc 26#preproc.go_absolute = True 27 28@feature('c', 'cxx') 29@after('apply_incpaths') 30def include_config_h(self): 31 self.env.append_value('INCPATHS', self.bld.bldnode.abspath()) 32 33def set_options(opt, debug_by_default=False): 34 "Add standard autowaf options if they havn't been added yet" 35 global g_step 36 if g_step > 0: 37 return 38 39 # Install directory options 40 dirs_options = opt.add_option_group('Installation directories', '') 41 42 # Move --prefix and --destdir to directory options group 43 for k in ('--prefix', '--destdir'): 44 option = opt.parser.get_option(k) 45 if option: 46 opt.parser.remove_option(k) 47 dirs_options.add_option(option) 48 49 # Standard directory options 50 dirs_options.add_option('--bindir', type='string', 51 help="Executable programs [Default: PREFIX/bin]") 52 dirs_options.add_option('--configdir', type='string', 53 help="Configuration data [Default: PREFIX/etc]") 54 dirs_options.add_option('--datadir', type='string', 55 help="Shared data [Default: PREFIX/share]") 56 dirs_options.add_option('--includedir', type='string', 57 help="Header files [Default: PREFIX/include]") 58 dirs_options.add_option('--libdir', type='string', 59 help="Libraries [Default: PREFIX/lib]") 60 dirs_options.add_option('--mandir', type='string', 61 help="Manual pages [Default: DATADIR/man]") 62 dirs_options.add_option('--docdir', type='string', 63 help="HTML documentation [Default: DATADIR/doc]") 64 65 # Build options 66 if debug_by_default: 67 opt.add_option('--optimize', action='store_false', default=True, dest='debug', 68 help="Build optimized binaries") 69 else: 70 opt.add_option('--debug', action='store_true', default=False, dest='debug', 71 help="Build debuggable binaries") 72 opt.add_option('--grind', action='store_true', default=False, dest='grind', 73 help="Run tests in valgrind") 74 opt.add_option('--strict', action='store_true', default=False, dest='strict', 75 help="Use strict compiler flags and show all warnings") 76 opt.add_option('--docs', action='store_true', default=False, dest='docs', 77 help="Build documentation - requires doxygen") 78 79 # LV2 options 80 opt.add_option('--lv2-user', action='store_true', default=False, dest='lv2_user', 81 help="Install LV2 bundles to user location") 82 opt.add_option('--lv2-system', action='store_true', default=False, dest='lv2_system', 83 help="Install LV2 bundles to system location") 84 dirs_options.add_option('--lv2dir', type='string', 85 help="LV2 bundles [Default: LIBDIR/lv2]") 86 g_step = 1 87 88def check_header(conf, lang, name, define='', mandatory=True): 89 "Check for a header" 90 includes = '' # search default system include paths 91 if sys.platform == "darwin": 92 includes = '/opt/local/include' 93 94 if lang == 'c': 95 check_func = conf.check_cc 96 elif lang == 'cxx': 97 check_func = conf.check_cxx 98 else: 99 Logs.error("Unknown header language `%s'" % lang) 100 return 101 102 if define != '': 103 check_func(header_name=name, includes=includes, 104 define_name=define, mandatory=mandatory) 105 else: 106 check_func(header_name=name, includes=includes, mandatory=mandatory) 107 108def nameify(name): 109 return name.replace('/', '_').replace('++', 'PP').replace('-', '_').replace('.', '_') 110 111def define(conf, var_name, value): 112 conf.define(var_name, value) 113 conf.env[var_name] = value 114 115def check_pkg(conf, name, **args): 116 "Check for a package iff it hasn't been checked for yet" 117 if args['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_LIBS']: 118 return 119 class CheckType: 120 OPTIONAL=1 121 MANDATORY=2 122 var_name = 'CHECKED_' + nameify(args['uselib_store']) 123 check = not var_name in conf.env 124 mandatory = not 'mandatory' in args or args['mandatory'] 125 if not check and 'atleast_version' in args: 126 # Re-check if version is newer than previous check 127 checked_version = conf.env['VERSION_' + name] 128 if checked_version and checked_version < args['atleast_version']: 129 check = True; 130 if not check and mandatory and conf.env[var_name] == CheckType.OPTIONAL: 131 # Re-check if previous check was optional but this one is mandatory 132 check = True; 133 if check: 134 conf.check_cfg(package=name, args="--cflags --libs", **args) 135 if 'atleast_version' in args: 136 conf.env['VERSION_' + name] = args['atleast_version'] 137 if mandatory: 138 conf.env[var_name] = CheckType.MANDATORY 139 else: 140 conf.env[var_name] = CheckType.OPTIONAL 141 142 143def normpath(path): 144 if sys.platform == 'win32': 145 return os.path.normpath(path).replace('\\', '/') 146 else: 147 return os.path.normpath(path) 148 149def configure(conf): 150 global g_step 151 if g_step > 1: 152 return 153 def append_cxx_flags(vals): 154 conf.env.append_value('CFLAGS', vals.split()) 155 conf.env.append_value('CXXFLAGS', vals.split()) 156 print('') 157 display_header('Global Configuration') 158 159 if Options.options.docs: 160 conf.load('doxygen') 161 162 conf.env['DOCS'] = Options.options.docs 163 conf.env['DEBUG'] = Options.options.debug 164 conf.env['STRICT'] = Options.options.strict 165 conf.env['PREFIX'] = normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) 166 167 def config_dir(var, opt, default): 168 if opt: 169 conf.env[var] = normpath(opt) 170 else: 171 conf.env[var] = normpath(default) 172 173 opts = Options.options 174 prefix = conf.env['PREFIX'] 175 176 config_dir('BINDIR', opts.bindir, os.path.join(prefix, 'bin')) 177 config_dir('SYSCONFDIR', opts.configdir, os.path.join(prefix, 'etc')) 178 config_dir('DATADIR', opts.datadir, os.path.join(prefix, 'share')) 179 config_dir('INCLUDEDIR', opts.includedir, os.path.join(prefix, 'include')) 180 config_dir('LIBDIR', opts.libdir, os.path.join(prefix, 'lib')) 181 config_dir('MANDIR', opts.mandir, os.path.join(conf.env['DATADIR'], 'man')) 182 config_dir('DOCDIR', opts.docdir, os.path.join(conf.env['DATADIR'], 'doc')) 183 184 if Options.options.lv2dir: 185 conf.env['LV2DIR'] = Options.options.lv2dir 186 elif Options.options.lv2_user: 187 if sys.platform == "darwin": 188 conf.env['LV2DIR'] = os.path.join(os.getenv('HOME'), 'Library/Audio/Plug-Ins/LV2') 189 elif sys.platform == "win32": 190 conf.env['LV2DIR'] = os.path.join(os.getenv('APPDATA'), 'LV2') 191 else: 192 conf.env['LV2DIR'] = os.path.join(os.getenv('HOME'), '.lv2') 193 elif Options.options.lv2_system: 194 if sys.platform == "darwin": 195 conf.env['LV2DIR'] = '/Library/Audio/Plug-Ins/LV2' 196 elif sys.platform == "win32": 197 conf.env['LV2DIR'] = os.path.join(os.getenv('COMMONPROGRAMFILES'), 'LV2') 198 else: 199 conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2') 200 else: 201 conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2') 202 203 conf.env['LV2DIR'] = normpath(conf.env['LV2DIR']) 204 205 if Options.options.docs: 206 ''' MRF: doxygen = conf.find_program('doxygen') 207 if not doxygen: 208 conf.fatal("Doxygen is required to build with --docs") ''' 209 210 dot = conf.find_program('dot') 211 if not dot: 212 conf.fatal("Graphviz (dot) is required to build with --docs") 213 214 if Options.options.debug: 215 conf.env['CFLAGS'] = [ '-O0', '-g' ] 216 conf.env['CXXFLAGS'] = [ '-O0', '-g' ] 217 else: 218 append_cxx_flags('-DNDEBUG') 219 220 if Options.options.strict: 221 conf.env.append_value('CFLAGS', [ '-std=c99', '-pedantic' ]) 222 conf.env.append_value('CXXFLAGS', [ '-ansi', '-Woverloaded-virtual', '-Wnon-virtual-dtor']) 223 append_cxx_flags('-Wall -Wextra -Wno-unused-parameter') 224 225 if not conf.env['MSVC_COMPILER']: 226 append_cxx_flags('-fshow-column') 227 228 conf.env.prepend_value('CFLAGS', '-I' + os.path.abspath('.')) 229 conf.env.prepend_value('CXXFLAGS', '-I' + os.path.abspath('.')) 230 231 display_msg(conf, "Install prefix", conf.env['PREFIX']) 232 display_msg(conf, "Debuggable build", str(conf.env['DEBUG'])) 233 display_msg(conf, "Strict compiler flags", str(conf.env['STRICT'])) 234 display_msg(conf, "Build documentation", str(conf.env['DOCS'])) 235 print('') 236 237 g_step = 2 238 239def set_c99_mode(conf): 240 if conf.env.MSVC_COMPILER: 241 # MSVC has no hope or desire to compile C99, just compile as C++ 242 conf.env.append_unique('CFLAGS', ['-TP', '-MD']) 243 else: 244 conf.env.append_unique('CFLAGS', '-std=c99') 245 246def set_local_lib(conf, name, has_objects): 247 var_name = 'HAVE_' + nameify(name.upper()) 248 define(conf, var_name, 1) 249 if has_objects: 250 if type(conf.env['AUTOWAF_LOCAL_LIBS']) != dict: 251 conf.env['AUTOWAF_LOCAL_LIBS'] = {} 252 conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()] = True 253 else: 254 if type(conf.env['AUTOWAF_LOCAL_HEADERS']) != dict: 255 conf.env['AUTOWAF_LOCAL_HEADERS'] = {} 256 conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()] = True 257 258def append_property(obj, key, val): 259 if hasattr(obj, key): 260 setattr(obj, key, getattr(obj, key) + val) 261 else: 262 setattr(obj, key, val) 263 264def use_lib(bld, obj, libs): 265 abssrcdir = os.path.abspath('.') 266 libs_list = libs.split() 267 for l in libs_list: 268 in_headers = l.lower() in bld.env['AUTOWAF_LOCAL_HEADERS'] 269 in_libs = l.lower() in bld.env['AUTOWAF_LOCAL_LIBS'] 270 if in_libs: 271 append_property(obj, 'use', ' lib%s ' % l.lower()) 272 append_property(obj, 'framework', bld.env['FRAMEWORK_' + l]) 273 if in_headers or in_libs: 274 inc_flag = '-iquote ' + os.path.join(abssrcdir, l.lower()) 275 for f in ['CFLAGS', 'CXXFLAGS']: 276 if not inc_flag in bld.env[f]: 277 bld.env.prepend_value(f, inc_flag) 278 else: 279 append_property(obj, 'uselib', ' ' + l) 280 281def display_header(title): 282 Logs.pprint('BOLD', title) 283 284def display_msg(conf, msg, status = None, color = None): 285 color = 'CYAN' 286 if type(status) == bool and status or status == "True": 287 color = 'GREEN' 288 elif type(status) == bool and not status or status == "False": 289 color = 'YELLOW' 290 Logs.pprint('BOLD', " *", sep='') 291 Logs.pprint('NORMAL', "%s" % msg.ljust(conf.line_just - 3), sep='') 292 Logs.pprint('BOLD', ":", sep='') 293 Logs.pprint(color, status) 294 295def link_flags(env, lib): 296 return ' '.join(map(lambda x: env['LIB_ST'] % x, env['LIB_' + lib])) 297 298def compile_flags(env, lib): 299 return ' '.join(map(lambda x: env['CPPPATH_ST'] % x, env['INCLUDES_' + lib])) 300 301def set_recursive(): 302 global g_is_child 303 g_is_child = True 304 305def is_child(): 306 global g_is_child 307 return g_is_child 308 309# Pkg-config file 310def build_pc(bld, name, version, version_suffix, libs, subst_dict={}): 311 '''Build a pkg-config file for a library. 312 name -- uppercase variable name (e.g. 'SOMENAME') 313 version -- version string (e.g. '1.2.3') 314 version_suffix -- name version suffix (e.g. '2') 315 libs -- string/list of dependencies (e.g. 'LIBFOO GLIB') 316 ''' 317 pkg_prefix = bld.env['PREFIX'] 318 if pkg_prefix[-1] == '/': 319 pkg_prefix = pkg_prefix[:-1] 320 321 target = name.lower() 322 if version_suffix != '': 323 target += '-' + version_suffix 324 target += '.pc' 325 326 libdir = bld.env['LIBDIR'] 327 if libdir.startswith(pkg_prefix): 328 libdir = libdir.replace(pkg_prefix, '${exec_prefix}') 329 330 includedir = bld.env['INCLUDEDIR'] 331 if includedir.startswith(pkg_prefix): 332 includedir = includedir.replace(pkg_prefix, '${prefix}') 333 334 obj = bld(features = 'subst', 335 source = '%s.pc.in' % name.lower(), 336 target = target, 337 install_path = os.path.join(bld.env['LIBDIR'], 'pkgconfig'), 338 exec_prefix = '${prefix}', 339 PREFIX = pkg_prefix, 340 EXEC_PREFIX = '${prefix}', 341 LIBDIR = libdir, 342 INCLUDEDIR = includedir) 343 344 if type(libs) != list: 345 libs = libs.split() 346 347 subst_dict[name + '_VERSION'] = version 348 subst_dict[name + '_MAJOR_VERSION'] = version[0:version.find('.')] 349 for i in libs: 350 subst_dict[i + '_LIBS'] = link_flags(bld.env, i) 351 lib_cflags = compile_flags(bld.env, i) 352 if lib_cflags == '': 353 lib_cflags = ' ' 354 subst_dict[i + '_CFLAGS'] = lib_cflags 355 356 obj.__dict__.update(subst_dict) 357 358def build_dir(name, subdir): 359 if is_child(): 360 return os.path.join('build', name, subdir) 361 else: 362 return os.path.join('build', subdir) 363 364# Clean up messy Doxygen documentation after it is built 365def make_simple_dox(name): 366 name = name.lower() 367 NAME = name.upper() 368 try: 369 top = os.getcwd() 370 os.chdir(build_dir(name, 'doc/html')) 371 page = 'group__%s.html' % name 372 if not os.path.exists(page): 373 return 374 for i in [ 375 ['%s_API ' % NAME, ''], 376 ['group__%s.html' % name, ''], 377 [' ', ''], 378 ['<script.*><\/script>', ''], 379 ['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>', ''], 380 ['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>', 381 ''], 382 ['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>', 383 'Doxygen']]: 384 os.system("sed -i 's/%s/%s/g' %s" % (i[0], i[1], page)) 385 os.rename('group__%s.html' % name, 'index.html') 386 for i in (glob.glob('*.png') + 387 glob.glob('*.html') + 388 glob.glob('*.js') + 389 glob.glob('*.css')): 390 if i != 'index.html' and i != 'style.css': 391 os.remove(i) 392 os.chdir(top) 393 os.chdir(build_dir(name, 'doc/man/man3')) 394 for i in glob.glob('*.3'): 395 os.system("sed -i 's/%s_API //' %s" % (NAME, i)) 396 for i in glob.glob('_*'): 397 os.remove(i) 398 os.chdir(top) 399 except Exception as e: 400 Logs.error("Failed to fix up %s documentation: %s" % (name, e)) 401 402# Doxygen API documentation 403def build_dox(bld, name, version, srcdir, blddir): 404 if not bld.env['DOCS']: 405 return 406 407 if is_child(): 408 src_dir = os.path.join(srcdir, name.lower()) 409 doc_dir = os.path.join(blddir, name.lower(), 'doc') 410 else: 411 src_dir = srcdir 412 doc_dir = os.path.join(blddir, 'doc') 413 414 subst_tg = bld(features = 'subst', 415 source = 'doc/reference.doxygen.in', 416 target = 'doc/reference.doxygen', 417 install_path = '', 418 name = 'doxyfile') 419 420 subst_dict = { 421 name + '_VERSION' : version, 422 name + '_SRCDIR' : os.path.abspath(src_dir), 423 name + '_DOC_DIR' : os.path.abspath(doc_dir) 424 } 425 426 subst_tg.__dict__.update(subst_dict) 427 428 subst_tg.post() 429 430 docs = bld(features = 'doxygen', 431 doxyfile = 'doc/reference.doxygen') 432 433 docs.post() 434 435 bld.install_files('${DOCDIR}/%s-1/html' % name.lower(), 436 bld.path.get_bld().ant_glob('doc/html/*')) 437 bld.install_files('${DOCDIR}/%s-1/html/search' % name.lower(), 438 bld.path.get_bld().ant_glob('doc/html/search/*')) 439 440 for i in range(1, 8): 441 bld.install_files('${MANDIR}/man%d' % i, 442 bld.path.get_bld().ant_glob('doc/man/man%d/*' % i, 443 excl='**/_*')) 444 445# Version code file generation 446def build_version_files(header_path, source_path, domain, major, minor, micro): 447 header_path = os.path.abspath(header_path) 448 source_path = os.path.abspath(source_path) 449 text = "int " + domain + "_major_version = " + str(major) + ";\n" 450 text += "int " + domain + "_minor_version = " + str(minor) + ";\n" 451 text += "int " + domain + "_micro_version = " + str(micro) + ";\n" 452 try: 453 o = open(source_path, 'w') 454 o.write(text) 455 o.close() 456 except IOError: 457 Logs.error('Failed to open %s for writing\n' % source_path) 458 sys.exit(-1) 459 460 text = "#ifndef __" + domain + "_version_h__\n" 461 text += "#define __" + domain + "_version_h__\n" 462 text += "extern const char* " + domain + "_revision;\n" 463 text += "extern int " + domain + "_major_version;\n" 464 text += "extern int " + domain + "_minor_version;\n" 465 text += "extern int " + domain + "_micro_version;\n" 466 text += "#endif /* __" + domain + "_version_h__ */\n" 467 try: 468 o = open(header_path, 'w') 469 o.write(text) 470 o.close() 471 except IOError: 472 Logs.warn('Failed to open %s for writing\n' % header_path) 473 sys.exit(-1) 474 475 return None 476 477def build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder=None): 478 Logs.info('Generating pot file from %s' % name) 479 pot_file = '%s.pot' % name 480 481 cmd = [ 'xgettext', 482 '--keyword=_', 483 '--keyword=N_', 484 '--keyword=S_', 485 '--from-code=UTF-8', 486 '-o', pot_file ] 487 488 if copyright_holder: 489 cmd += [ '--copyright-holder="%s"' % copyright_holder ] 490 491 cmd += sources 492 Logs.info('Updating ' + pot_file) 493 subprocess.call(cmd, cwd=os.path.join(srcdir, dir)) 494 495def build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder=None): 496 pwd = os.getcwd() 497 os.chdir(os.path.join(srcdir, dir)) 498 pot_file = '%s.pot' % name 499 po_files = glob.glob('po/*.po') 500 for po_file in po_files: 501 cmd = [ 'msgmerge', 502 '--update', 503 po_file, 504 pot_file ] 505 Logs.info('Updating ' + po_file) 506 subprocess.call(cmd) 507 os.chdir(pwd) 508 509def build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder=None): 510 pwd = os.getcwd() 511 os.chdir(os.path.join(srcdir, dir)) 512 pot_file = '%s.pot' % name 513 po_files = glob.glob('po/*.po') 514 for po_file in po_files: 515 mo_file = po_file.replace('.po', '.mo') 516 cmd = [ 'msgfmt', 517 '-c', 518 '-f', 519 '-o', 520 mo_file, 521 po_file ] 522 Logs.info('Generating ' + po_file) 523 subprocess.call(cmd) 524 os.chdir(pwd) 525 526def build_i18n(bld, srcdir, dir, name, sources, copyright_holder=None): 527 build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder) 528 build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder) 529 build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder) 530 531def cd_to_build_dir(ctx, appname): 532 orig_dir = os.path.abspath(os.curdir) 533 top_level = (len(ctx.stack_path) > 1) 534 if top_level: 535 os.chdir(os.path.join('build', appname)) 536 else: 537 os.chdir('build') 538 Logs.pprint('GREEN', "Waf: Entering directory `%s'" % os.path.abspath(os.getcwd())) 539 540def cd_to_orig_dir(ctx, child): 541 if child: 542 os.chdir(os.path.join('..', '..')) 543 else: 544 os.chdir('..') 545 546def pre_test(ctx, appname, dirs=['src']): 547 diropts = '' 548 for i in dirs: 549 diropts += ' -d ' + i 550 cd_to_build_dir(ctx, appname) 551 clear_log = open('lcov-clear.log', 'w') 552 try: 553 try: 554 # Clear coverage data 555 subprocess.call(('lcov %s -z' % diropts).split(), 556 stdout=clear_log, stderr=clear_log) 557 except: 558 Logs.warn('Failed to run lcov, no coverage report will be generated') 559 finally: 560 clear_log.close() 561 562def post_test(ctx, appname, dirs=['src'], remove=['*boost*', 'c++*']): 563 diropts = '' 564 for i in dirs: 565 diropts += ' -d ' + i 566 coverage_log = open('lcov-coverage.log', 'w') 567 coverage_lcov = open('coverage.lcov', 'w') 568 coverage_stripped_lcov = open('coverage-stripped.lcov', 'w') 569 try: 570 try: 571 base = '.' 572 if g_is_child: 573 base = '..' 574 575 # Generate coverage data 576 subprocess.call(('lcov -c %s -b %s' % (diropts, base)).split(), 577 stdout=coverage_lcov, stderr=coverage_log) 578 579 # Strip unwanted stuff 580 subprocess.call( 581 ['lcov', '--remove', 'coverage.lcov'] + remove, 582 stdout=coverage_stripped_lcov, stderr=coverage_log) 583 584 # Generate HTML coverage output 585 if not os.path.isdir('coverage'): 586 os.makedirs('coverage') 587 subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(), 588 stdout=coverage_log, stderr=coverage_log) 589 590 except: 591 Logs.warn('Failed to run lcov, no coverage report will be generated') 592 finally: 593 coverage_stripped_lcov.close() 594 coverage_lcov.close() 595 coverage_log.close() 596 597 print('') 598 Logs.pprint('GREEN', "Waf: Leaving directory `%s'" % os.path.abspath(os.getcwd())) 599 top_level = (len(ctx.stack_path) > 1) 600 if top_level: 601 cd_to_orig_dir(ctx, top_level) 602 603 print('') 604 Logs.pprint('BOLD', 'Coverage:', sep='') 605 print('<file://%s>\n\n' % os.path.abspath('coverage/index.html')) 606 607def run_tests(ctx, appname, tests, desired_status=0, dirs=['src'], name='*'): 608 failures = 0 609 diropts = '' 610 for i in dirs: 611 diropts += ' -d ' + i 612 613 # Run all tests 614 for i in tests: 615 s = i 616 if type(i) == type([]): 617 s = ' '.join(i) 618 print('') 619 Logs.pprint('BOLD', '** Test', sep='') 620 Logs.pprint('NORMAL', '%s' % s) 621 cmd = i 622 if Options.options.grind: 623 cmd = 'valgrind ' + i 624 if subprocess.call(cmd, shell=True) == desired_status: 625 Logs.pprint('GREEN', '** Pass') 626 else: 627 failures += 1 628 Logs.pprint('RED', '** FAIL') 629 630 print('') 631 if failures == 0: 632 Logs.pprint('GREEN', '** Pass: All %s.%s tests passed' % (appname, name)) 633 else: 634 Logs.pprint('RED', '** FAIL: %d %s.%s tests failed' % (failures, appname, name)) 635 636def run_ldconfig(ctx): 637 if (ctx.cmd == 'install' 638 and not ctx.env['RAN_LDCONFIG'] 639 and ctx.env['LIBDIR'] 640 and not 'DESTDIR' in os.environ 641 and not Options.options.destdir): 642 try: 643 Logs.info("Waf: Running `/sbin/ldconfig %s'" % ctx.env['LIBDIR']) 644 subprocess.call(['/sbin/ldconfig', ctx.env['LIBDIR']]) 645 ctx.env['RAN_LDCONFIG'] = True 646 except: 647 pass 648 649def write_news(name, in_files, out_file): 650 import rdflib 651 import textwrap 652 from time import strftime, strptime 653 654 doap = rdflib.Namespace('http://usefulinc.com/ns/doap#') 655 dcs = rdflib.Namespace('http://ontologi.es/doap-changeset#') 656 rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') 657 foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/') 658 rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') 659 m = rdflib.ConjunctiveGraph() 660 661 try: 662 for i in in_files: 663 m.parse(i, format='n3') 664 except: 665 Logs.warn('Error parsing data, unable to generate NEWS') 666 return 667 668 proj = m.value(None, rdf.type, doap.Project) 669 for f in m.triples([proj, rdfs.seeAlso, None]): 670 if f[2].endswith('.ttl'): 671 m.parse(f[2], format='n3') 672 673 entries = {} 674 for r in m.triples([proj, doap.release, None]): 675 release = r[2] 676 revision = m.value(release, doap.revision, None) 677 date = m.value(release, doap.created, None) 678 blamee = m.value(release, dcs.blame, None) 679 changeset = m.value(release, dcs.changeset, None) 680 681 if revision and date and blamee and changeset: 682 entry = '%s (%s) stable;\n' % (name, revision) 683 684 for i in m.triples([changeset, dcs.item, None]): 685 entry += '\n * ' + '\n '.join( 686 textwrap.wrap(m.value(i[2], rdfs.label, None), width=79)) 687 688 entry += '\n\n --' 689 690 blamee_name = m.value(blamee, foaf.name, None) 691 blamee_mbox = m.value(blamee, foaf.mbox, None) 692 if blamee_name and blamee_mbox: 693 entry += ' %s <%s>' % (blamee_name, 694 blamee_mbox.replace('mailto:', '')) 695 696 entry += ' %s\n\n' % ( 697 strftime('%a, %d %b %Y %H:%M:%S +0000', strptime(date, '%Y-%m-%d'))) 698 699 entries[revision] = entry 700 else: 701 Logs.warn('Ignored incomplete %s release description' % name) 702 703 if len(entries) > 0: 704 news = open(out_file, 'w') 705 for e in sorted(entries.keys(), reverse=True): 706 news.write(entries[e]) 707 news.close() 708