1#!/usr/bin/env python 2 3import os 4import re 5import sys 6 7from waflib import Build, Context, Logs, Options, Scripting, Utils 8from waflib.extras import autowaf as autowaf 9 10# Mandatory waf variables 11APPNAME = 'lv2' # Package name for waf dist 12VERSION = '1.18.2' # Package version for waf dist 13top = '.' # Source directory 14out = 'build' # Build directory 15 16# Release variables 17title = 'LV2' 18uri = 'http://lv2plug.in/ns/lv2' 19dist_pattern = 'http://lv2plug.in/spec/lv2-%d.%d.%d.tar.bz2' 20post_tags = [] 21 22# Links for documentation 23list_email = 'devel@lists.lv2plug.in' 24list_page = 'http://lists.lv2plug.in/listinfo.cgi/devel-lv2plug.in' 25 26# Map of specification base name to old URI-style include path 27spec_map = { 28 'atom': 'lv2/lv2plug.in/ns/ext/atom', 29 'buf-size': 'lv2/lv2plug.in/ns/ext/buf-size', 30 'core': 'lv2/lv2plug.in/ns/lv2core', 31 'data-access': 'lv2/lv2plug.in/ns/ext/data-access', 32 'dynmanifest': 'lv2/lv2plug.in/ns/ext/dynmanifest', 33 'event': 'lv2/lv2plug.in/ns/ext/event', 34 'instance-access': 'lv2/lv2plug.in/ns/ext/instance-access', 35 'log': 'lv2/lv2plug.in/ns/ext/log', 36 'midi': 'lv2/lv2plug.in/ns/ext/midi', 37 'morph': 'lv2/lv2plug.in/ns/ext/morph', 38 'options': 'lv2/lv2plug.in/ns/ext/options', 39 'parameters': 'lv2/lv2plug.in/ns/ext/parameters', 40 'patch': 'lv2/lv2plug.in/ns/ext/patch', 41 'port-groups': 'lv2/lv2plug.in/ns/ext/port-groups', 42 'port-props': 'lv2/lv2plug.in/ns/ext/port-props', 43 'presets': 'lv2/lv2plug.in/ns/ext/presets', 44 'resize-port': 'lv2/lv2plug.in/ns/ext/resize-port', 45 'state': 'lv2/lv2plug.in/ns/ext/state', 46 'time': 'lv2/lv2plug.in/ns/ext/time', 47 'ui': 'lv2/lv2plug.in/ns/extensions/ui', 48 'units': 'lv2/lv2plug.in/ns/extensions/units', 49 'uri-map': 'lv2/lv2plug.in/ns/ext/uri-map', 50 'urid': 'lv2/lv2plug.in/ns/ext/urid', 51 'worker': 'lv2/lv2plug.in/ns/ext/worker'} 52 53 54def options(ctx): 55 ctx.load('compiler_c') 56 ctx.load('compiler_cxx') 57 ctx.load('lv2') 58 ctx.add_flags( 59 ctx.configuration_options(), 60 {'no-coverage': 'Do not use gcov for code coverage', 61 'online-docs': 'Build documentation for web hosting', 62 'no-check-links': 'Do not check documentation for broken links', 63 'no-plugins': 'Do not build example plugins', 64 'copy-headers': 'Copy headers instead of linking to bundle'}) 65 66 67def configure(conf): 68 try: 69 conf.load('compiler_c', cache=True) 70 except Exception: 71 Options.options.build_tests = False 72 Options.options.no_plugins = True 73 74 try: 75 conf.load('compiler_cxx', cache=True) 76 except Exception: 77 pass 78 79 if Options.options.online_docs: 80 Options.options.docs = True 81 82 conf.load('lv2', cache=True) 83 conf.load('autowaf', cache=True) 84 autowaf.set_c_lang(conf, 'c99') 85 86 if Options.options.strict: 87 # Check for programs used by lint target 88 conf.find_program("flake8", var="FLAKE8", mandatory=False) 89 conf.find_program("clang-tidy", var="CLANG_TIDY", mandatory=False) 90 conf.find_program("iwyu_tool", var="IWYU_TOOL", mandatory=False) 91 92 if Options.options.ultra_strict: 93 autowaf.add_compiler_flags(conf.env, 'c', { 94 'gcc': [ 95 '-Wno-bad-function-cast', 96 ], 97 'clang': [ 98 '-Wno-bad-function-cast', 99 ] 100 }) 101 102 autowaf.add_compiler_flags(conf.env, '*', { 103 'clang': [ 104 '-Wno-cast-align', 105 '-Wno-cast-qual', 106 '-Wno-documentation-unknown-command', 107 '-Wno-double-promotion', 108 '-Wno-float-conversion', 109 '-Wno-float-equal', 110 '-Wno-implicit-float-conversion', 111 '-Wno-padded', 112 '-Wno-reserved-id-macro', 113 '-Wno-shorten-64-to-32', 114 '-Wno-sign-conversion', 115 '-Wno-switch-enum', 116 '-Wno-unused-parameter', 117 ], 118 'gcc': [ 119 '-Wno-cast-align', 120 '-Wno-cast-qual', 121 '-Wno-conversion', 122 '-Wno-double-promotion', 123 '-Wno-float-equal', 124 '-Wno-inline', 125 '-Wno-padded', 126 '-Wno-parentheses', 127 '-Wno-suggest-attribute=const', 128 '-Wno-suggest-attribute=malloc', 129 '-Wno-suggest-attribute=pure', 130 '-Wno-switch-enum', 131 '-Wno-unused-parameter', 132 ], 133 'msvc': [ 134 '/wd4061', # enumerator in switch is not explicitly handled 135 '/wd4100', # unreferenced formal parameter 136 '/wd4244', # conversion with possible loss of data 137 '/wd4267', # conversion from size_t to a smaller type 138 '/wd4310', # cast truncates constant value 139 '/wd4365', # signed/unsigned mismatch 140 '/wd4464', # relative include path contains ".." 141 '/wd4514', # unreferenced inline function has been removed 142 '/wd4706', # assignment within conditional expression 143 '/wd4710', # function not inlined 144 '/wd4711', # function selected for automatic inline expansion 145 '/wd4820', # padding added after construct 146 '/wd5045', # will insert Spectre mitigation for memory load 147 ] 148 }) 149 150 autowaf.add_compiler_flags(conf.env, 'cxx', { 151 'gcc': [ 152 '-Wno-useless-cast', 153 '-Wno-zero-as-null-pointer-constant', 154 ], 155 'clang': [ 156 '-Wno-old-style-cast', 157 '-Wno-zero-as-null-pointer-constant', 158 ] 159 }) 160 161 if 'mingw' in conf.env.CC[0]: 162 autowaf.add_compiler_flags(conf.env, '*', { 163 'gcc': [ 164 '-Wno-format', 165 '-Wno-suggest-attribute=format', 166 ], 167 }) 168 169 if conf.env.DEST_OS == 'win32' or not hasattr(os.path, 'relpath'): 170 Logs.warn('System does not support linking headers, copying') 171 Options.options.copy_headers = True 172 173 conf.env.BUILD_TESTS = Options.options.build_tests 174 conf.env.BUILD_PLUGINS = not Options.options.no_plugins 175 conf.env.COPY_HEADERS = Options.options.copy_headers 176 conf.env.ONLINE_DOCS = Options.options.online_docs 177 178 if conf.env.DOCS or conf.env.ONLINE_DOCS: 179 try: 180 conf.find_program('asciidoc') 181 conf.env.BUILD_BOOK = True 182 except Exception: 183 Logs.warn('Asciidoc not found, book will not be built') 184 185 if not Options.options.no_check_links: 186 if not conf.find_program('linkchecker', 187 var='LINKCHECKER', mandatory=False): 188 Logs.warn('Documentation will not be checked for broken links') 189 190 # Check for gcov library (for test coverage) 191 if (conf.env.BUILD_TESTS and 192 not Options.options.no_coverage and 193 not conf.is_defined('HAVE_GCOV')): 194 conf.check_cc(lib='gcov', define_name='HAVE_GCOV', mandatory=False) 195 196 if conf.env.BUILD_TESTS: 197 conf.find_program('serdi', mandatory=False) 198 conf.find_program('sord_validate', mandatory=False) 199 200 autowaf.set_lib_env(conf, 'lv2', VERSION, has_objects=False) 201 autowaf.set_local_lib(conf, 'lv2', has_objects=False) 202 203 conf.run_env.append_unique('LV2_PATH', 204 [os.path.join(conf.path.abspath(), 'lv2')]) 205 206 if conf.env.BUILD_PLUGINS: 207 for i in ['eg-amp.lv2', 208 'eg-fifths.lv2', 209 'eg-metro.lv2', 210 'eg-midigate.lv2', 211 'eg-params.lv2', 212 'eg-sampler.lv2', 213 'eg-scope.lv2']: 214 try: 215 path = os.path.join('plugins', i) 216 conf.recurse(path) 217 conf.env.LV2_BUILD += [path] 218 conf.run_env.append_unique( 219 'LV2_PATH', [conf.build_path('plugins/%s/lv2' % i)]) 220 except Exception as e: 221 Logs.warn('Configuration of %s failed (%s)' % (i, e)) 222 223 autowaf.display_summary( 224 conf, 225 {'Bundle directory': conf.env.LV2DIR, 226 'Copy (not link) headers': bool(conf.env.COPY_HEADERS), 227 'Version': VERSION}) 228 229 230def chop_lv2_prefix(s): 231 if s.startswith('lv2/lv2plug.in/'): 232 return s[len('lv2/lv2plug.in/'):] 233 return s 234 235 236def subst_file(template, output, dict): 237 i = open(template, 'r') 238 o = open(output, 'w') 239 for line in i: 240 for key in dict: 241 line = line.replace(key, dict[key]) 242 o.write(line) 243 i.close() 244 o.close() 245 246 247def specdirs(path): 248 return (path.ant_glob('lv2/*', dir=True) + 249 path.ant_glob('plugins/*.lv2', dir=True)) 250 251 252def ttl_files(path, specdir): 253 def abspath(node): 254 return node.abspath() 255 256 return map(abspath, 257 path.ant_glob(specdir.path_from(path) + '/*.ttl')) 258 259 260def load_ttl(files, exclude = []): 261 import rdflib 262 model = rdflib.ConjunctiveGraph() 263 for f in files: 264 if f not in exclude: 265 model.parse(f, format='n3') 266 return model 267 268 269# Task to build extension index 270def build_index(task): 271 src_dir = task.inputs[0].parent.parent 272 sys.path.append(str(src_dir.find_node('lv2specgen'))) 273 import rdflib 274 275 doap = rdflib.Namespace('http://usefulinc.com/ns/doap#') 276 277 model = load_ttl([str(src_dir.find_node('lv2/core/meta.ttl')), 278 str(src_dir.find_node('lv2/core/people.ttl'))]) 279 280 # Get date for this version, and list of all LV2 distributions 281 proj = rdflib.URIRef('http://lv2plug.in/ns/lv2') 282 date = None 283 dists = [] 284 for r in model.triples([proj, doap.release, None]): 285 revision = model.value(r[2], doap.revision, None) 286 created = model.value(r[2], doap.created, None) 287 if str(revision) == VERSION: 288 date = created 289 290 dist = model.value(r[2], doap['file-release'], None) 291 if dist and created: 292 dists += [(created, dist)] 293 else: 294 print('warning: %s has no file release\n' % proj) 295 296 rows = [] 297 for f in task.inputs: 298 if not f.abspath().endswith('index.html.in'): 299 rowfile = open(f.abspath(), 'r') 300 rows += rowfile.readlines() 301 rowfile.close() 302 303 if date is None: 304 import datetime 305 import time 306 now = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) 307 date = datetime.datetime.utcfromtimestamp(now).strftime('%F') 308 309 subst_file(task.inputs[0].abspath(), task.outputs[0].abspath(), 310 {'@ROWS@': ''.join(rows), 311 '@LV2_VERSION@': VERSION, 312 '@DATE@': date}) 313 314 315def build_spec(bld, path): 316 name = os.path.basename(path) 317 bundle_dir = os.path.join(bld.env.LV2DIR, name + '.lv2') 318 include_dir = os.path.join(bld.env.INCLUDEDIR, path) 319 old_include_dir = os.path.join(bld.env.INCLUDEDIR, spec_map[name]) 320 321 # Build test program if applicable 322 for test in bld.path.ant_glob(os.path.join(path, '*-test.c')): 323 test_lib = [] 324 test_cflags = [''] 325 test_linkflags = [''] 326 if bld.is_defined('HAVE_GCOV'): 327 test_lib += ['gcov'] 328 test_cflags += ['--coverage'] 329 test_linkflags += ['--coverage'] 330 if bld.env.DEST_OS not in ['darwin', 'win32']: 331 test_lib += ['rt'] 332 333 # Unit test program 334 bld(features = 'c cprogram', 335 source = test, 336 lib = test_lib, 337 uselib = 'LV2', 338 target = os.path.splitext(str(test.get_bld()))[0], 339 install_path = None, 340 cflags = test_cflags, 341 linkflags = test_linkflags) 342 343 # Install bundle 344 bld.install_files(bundle_dir, 345 bld.path.ant_glob(path + '/?*.*', excl='*.in')) 346 347 # Install URI-like includes 348 headers = bld.path.ant_glob(path + '/*.h') 349 if headers: 350 for d in [include_dir, old_include_dir]: 351 if bld.env.COPY_HEADERS: 352 bld.install_files(d, headers) 353 else: 354 bld.symlink_as(d, 355 os.path.relpath(bundle_dir, os.path.dirname(d))) 356 357 358def build(bld): 359 specs = (bld.path.ant_glob('lv2/*', dir=True)) 360 361 # Copy lv2.h to include directory for backwards compatibility 362 old_lv2_h_path = os.path.join(bld.env.INCLUDEDIR, 'lv2.h') 363 if bld.env.COPY_HEADERS: 364 bld.install_files(os.path.dirname(old_lv2_h_path), 'lv2/core/lv2.h') 365 else: 366 bld.symlink_as(old_lv2_h_path, 'lv2/core/lv2.h') 367 368 # LV2 pkgconfig file 369 bld(features = 'subst', 370 source = 'lv2.pc.in', 371 target = 'lv2.pc', 372 install_path = '${LIBDIR}/pkgconfig', 373 PREFIX = bld.env.PREFIX, 374 INCLUDEDIR = bld.env.INCLUDEDIR, 375 VERSION = VERSION) 376 377 # Validator 378 bld(features = 'subst', 379 source = 'util/lv2_validate.in', 380 target = 'lv2_validate', 381 chmod = Utils.O755, 382 install_path = '${BINDIR}', 383 LV2DIR = bld.env.LV2DIR) 384 385 # Build extensions 386 for spec in specs: 387 build_spec(bld, spec.path_from(bld.path)) 388 389 # Build plugins 390 for plugin in bld.env.LV2_BUILD: 391 bld.recurse(plugin) 392 393 # Install lv2specgen 394 bld.install_files('${DATADIR}/lv2specgen/', 395 ['lv2specgen/style.css', 396 'lv2specgen/template.html']) 397 bld.install_files('${DATADIR}/lv2specgen/DTD/', 398 bld.path.ant_glob('lv2specgen/DTD/*')) 399 bld.install_files('${BINDIR}', 'lv2specgen/lv2specgen.py', 400 chmod=Utils.O755) 401 402 # Install schema bundle 403 bld.install_files('${LV2DIR}/schemas.lv2/', 404 bld.path.ant_glob('schemas.lv2/*.ttl')) 405 406 if bld.env.ONLINE_DOCS: 407 # Generate .htaccess files 408 for d in ('ns', 'ns/ext', 'ns/extensions'): 409 path = os.path.join(str(bld.path.get_bld()), d) 410 bld(features = 'subst', 411 source = 'doc/htaccess.in', 412 target = os.path.join(path, '.htaccess'), 413 install_path = None, 414 BASE = '/' + d) 415 416 if bld.env.DOCS or bld.env.ONLINE_DOCS: 417 # Copy spec files to build dir 418 for spec in specs: 419 srcpath = spec.path_from(bld.path) 420 basename = os.path.basename(srcpath) 421 full_path = spec_map[basename] 422 name = 'lv2core' if basename == 'core' else basename 423 path = chop_lv2_prefix(full_path) 424 425 bld(features = 'subst', 426 is_copy = True, 427 source = os.path.join(srcpath, name + '.ttl'), 428 target = path + '.ttl') 429 430 # Copy stylesheets to build directory 431 for i in ['style.css', 'pygments.css']: 432 bld(features = 'subst', 433 is_copy = True, 434 name = 'copy', 435 source = 'doc/%s' % i, 436 target = 'aux/%s' % i) 437 438 # Build Doxygen documentation (and tags file) 439 autowaf.build_dox(bld, 'LV2', VERSION, top, out, 'doc', False) 440 bld.add_group() 441 442 index_files = [] 443 for spec in specs: 444 # Call lv2specgen to generate spec docs 445 srcpath = spec.path_from(bld.path) 446 basename = os.path.basename(srcpath) 447 full_path = spec_map[basename] 448 name = 'lv2core' if basename == 'core' else basename 449 ttl_name = name + '.ttl' 450 index_file = bld.path.get_bld().make_node('index_rows/' + name) 451 index_files += [index_file] 452 chopped_path = chop_lv2_prefix(full_path) 453 454 assert chopped_path.startswith('ns/') 455 root_path = os.path.relpath('/', os.path.dirname(chopped_path[2:])) 456 html_path = '%s.html' % chopped_path 457 out_dir = os.path.dirname(html_path) 458 style_uri = os.path.relpath('aux/style.css', out_dir) 459 460 cmd = (str(bld.path.find_node('lv2specgen/lv2specgen.py')) + 461 ' --root-uri=http://lv2plug.in/ns/' 462 ' --root-path=' + root_path + 463 ' --list-email=' + list_email + 464 ' --list-page=' + list_page + 465 ' --style-uri=' + style_uri + 466 ' --docdir=' + os.path.relpath('doc/html', out_dir) + 467 ' --tags=%s' % bld.path.get_bld().make_node('doc/tags') + 468 ' --index=' + str(index_file) + 469 (' --online' if bld.env.ONLINE_DOCS else '') + 470 ' ${SRC} ${TGT}') 471 472 bld(rule = cmd, 473 source = os.path.join(srcpath, ttl_name), 474 target = [html_path, index_file], 475 shell = False) 476 477 # Install documentation 478 bld.install_files( 479 os.path.join('${DOCDIR}', 'lv2', os.path.dirname(html_path)), 480 html_path) 481 482 index_files.sort(key=lambda x: x.path_from(bld.path)) 483 bld.add_group() 484 485 # Build extension index 486 bld(rule = build_index, 487 name = 'index', 488 source = ['doc/index.html.in'] + index_files, 489 target = 'ns/index.html') 490 491 # Install main documentation files 492 bld.install_files('${DOCDIR}/lv2/aux/', 'aux/style.css') 493 bld.install_files('${DOCDIR}/lv2/ns/', 'ns/index.html') 494 495 def check_links(ctx): 496 import subprocess 497 if ctx.env.LINKCHECKER: 498 if subprocess.call([ctx.env.LINKCHECKER[0], 499 '--no-status', out]): 500 ctx.fatal('Documentation contains broken links') 501 502 if bld.cmd == 'build': 503 bld.add_post_fun(check_links) 504 505 if bld.env.BUILD_TESTS: 506 # Generate a compile test file that includes all headers 507 def gen_build_test(task): 508 with open(task.outputs[0].abspath(), 'w') as out: 509 for i in task.inputs: 510 out.write('#include "%s"\n' % i.bldpath()) 511 out.write('int main(void) { return 0; }\n') 512 513 bld(rule = gen_build_test, 514 source = bld.path.ant_glob('lv2/**/*.h'), 515 target = 'build-test.c', 516 install_path = None) 517 518 bld(features = 'c cprogram', 519 source = bld.path.get_bld().make_node('build-test.c'), 520 target = 'build-test', 521 includes = '.', 522 uselib = 'LV2', 523 install_path = None) 524 525 if 'COMPILER_CXX' in bld.env: 526 bld(rule = gen_build_test, 527 source = bld.path.ant_glob('lv2/**/*.h'), 528 target = 'build-test.cpp', 529 install_path = None) 530 531 bld(features = 'cxx cxxprogram', 532 source = bld.path.get_bld().make_node('build-test.cpp'), 533 target = 'build-test-cpp', 534 includes = '.', 535 uselib = 'LV2', 536 install_path = None) 537 538 if bld.env.BUILD_BOOK: 539 # Build "Programming LV2 Plugins" book from plugin examples 540 bld.recurse('plugins') 541 542 543class LintContext(Build.BuildContext): 544 fun = cmd = 'lint' 545 546 547def lint(ctx): 548 "checks code for style issues" 549 import subprocess 550 import glob 551 552 st = 0 553 554 if "FLAKE8" in ctx.env: 555 Logs.info("Running flake8") 556 st = subprocess.call([ctx.env.FLAKE8[0], 557 "wscript", 558 "--ignore", 559 "E101,E129,W191,E221,W504,E251,E241,E741"]) 560 else: 561 Logs.warn("Not running flake8") 562 563 if "IWYU_TOOL" in ctx.env: 564 Logs.info("Running include-what-you-use") 565 cmd = [ctx.env.IWYU_TOOL[0], "-o", "clang", "-p", "build"] 566 output = subprocess.check_output(cmd).decode('utf-8') 567 if 'error: ' in output: 568 sys.stdout.write(output) 569 st += 1 570 else: 571 Logs.warn("Not running include-what-you-use") 572 573 if "CLANG_TIDY" in ctx.env and "clang" in ctx.env.CC[0]: 574 Logs.info("Running clang-tidy") 575 sources = glob.glob('**/*.h', recursive=True) 576 sources = list(map(os.path.abspath, sources)) 577 procs = [] 578 for source in sources: 579 cmd = [ctx.env.CLANG_TIDY[0], "--quiet", "-p=.", source] 580 procs += [subprocess.Popen(cmd, cwd="build")] 581 582 for proc in procs: 583 stdout, stderr = proc.communicate() 584 st += proc.returncode 585 else: 586 Logs.warn("Not running clang-tidy") 587 588 if st != 0: 589 sys.exit(st) 590 591 592def test_vocabularies(check, specs, files): 593 import rdflib 594 595 foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/') 596 lv2 = rdflib.Namespace('http://lv2plug.in/ns/lv2core#') 597 owl = rdflib.Namespace('http://www.w3.org/2002/07/owl#') 598 rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') 599 rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') 600 601 # Check if this is a stable LV2 release to enable additional tests 602 version_tuple = tuple(map(int, VERSION.split("."))) 603 is_stable = version_tuple[1] % 2 == 0 and version_tuple[2] % 2 == 0 604 605 # Check that extended documentation is not in main specification file 606 for spec in specs: 607 path = str(spec.abspath()) 608 name = os.path.basename(path) 609 name = 'lv2core' if name == 'core' else name 610 vocab = os.path.join(path, name + '.ttl') 611 612 spec_model = rdflib.ConjunctiveGraph() 613 spec_model.parse(vocab, format='n3') 614 615 def has_statement(s, p, o): 616 for t in spec_model.triples([s, p, o]): 617 return True 618 619 return False 620 621 check(lambda: not has_statement(None, lv2.documentation, None), 622 name = name + ".ttl does not contain lv2:documentation") 623 624 # Check specification manifests 625 for spec in specs: 626 path = str(spec.abspath()) 627 manifest_path = os.path.join(path, 'manifest.ttl') 628 manifest_model = rdflib.ConjunctiveGraph() 629 manifest_model.parse(manifest_path, format='n3') 630 631 uri = manifest_model.value(None, rdf.type, lv2.Specification) 632 minor = manifest_model.value(uri, lv2.minorVersion, None) 633 micro = manifest_model.value(uri, lv2.microVersion, None) 634 check(lambda: uri is not None, 635 name = manifest_path + " has a lv2:Specification") 636 check(lambda: minor is not None, 637 name = manifest_path + " has a lv2:minorVersion") 638 check(lambda: micro is not None, 639 name = manifest_path + " has a lv2:microVersion") 640 641 if is_stable: 642 check(lambda: int(minor) > 0, 643 name = manifest_path + " has even non-zero minor version") 644 check(lambda: int(micro) % 2 == 0, 645 name = manifest_path + " has even micro version") 646 647 # Load everything into one big model 648 model = rdflib.ConjunctiveGraph() 649 for f in files: 650 model.parse(f, format='n3') 651 652 # Check that all named and typed resources have labels and comments 653 for r in sorted(model.triples([None, rdf.type, None])): 654 subject = r[0] 655 if (type(subject) == rdflib.term.BNode or 656 foaf.Person in model.objects(subject, rdf.type)): 657 continue 658 659 def has_property(subject, prop): 660 return model.value(subject, prop, None) is not None 661 662 check(lambda: has_property(subject, rdfs.label), 663 name = '%s has rdfs:label' % subject) 664 665 if check(lambda: has_property(subject, rdfs.comment), 666 name = '%s has rdfs:comment' % subject): 667 comment = str(model.value(subject, rdfs.comment, None)) 668 669 check(lambda: comment.endswith('.'), 670 name = "%s comment ends in '.'" % subject) 671 check(lambda: comment.find('\n') == -1, 672 name = "%s comment contains no newlines" % subject) 673 check(lambda: comment == comment.strip(), 674 name = "%s comment has stripped whitespace" % subject) 675 676 # Check that lv2:documentation, if present, is proper Markdown 677 documentation = model.value(subject, lv2.documentation, None) 678 if documentation is not None: 679 check(lambda: documentation.datatype == lv2.Markdown, 680 name = "%s documentation is explicitly Markdown" % subject) 681 check(lambda: str(documentation).startswith('\n\n'), 682 name = "%s documentation starts with blank line" % subject) 683 check(lambda: str(documentation).endswith('\n\n'), 684 name = "%s documentation ends with blank line" % subject) 685 686 # Check that all properties are either datatype or object properties 687 for r in sorted(model.triples([None, rdf.type, rdf.Property])): 688 subject = r[0] 689 types = list(model.objects(subject, rdf.type)) 690 691 check(lambda: ((owl.DatatypeProperty in types) or 692 (owl.ObjectProperty in types) or 693 (owl.AnnotationProperty in types)), 694 name = "%s is a Datatype/Object/Annotation property" % subject) 695 696 697def test(tst): 698 import tempfile 699 700 with tst.group("Data") as check: 701 specs = (tst.path.ant_glob('lv2/*', dir=True)) 702 schemas = list(map(str, tst.path.ant_glob("schemas.lv2/*.ttl"))) 703 spec_files = list(map(str, tst.path.ant_glob("lv2/**/*.ttl"))) 704 plugin_files = list(map(str, tst.path.ant_glob("plugins/**/*.ttl"))) 705 bld_files = list(map(str, tst.path.get_bld().ant_glob("**/*.ttl"))) 706 707 if "SERDI" in tst.env and sys.platform != 'win32': 708 for f in spec_files: 709 with tempfile.NamedTemporaryFile(mode="w") as tmp: 710 base_dir = os.path.dirname(f) 711 cmd = tst.env.SERDI + ["-o", "turtle", f, base_dir] 712 check(cmd, stdout=tmp.name) 713 check.file_equals(f, tmp.name) 714 715 if "SORD_VALIDATE" in tst.env: 716 all_files = schemas + spec_files + plugin_files + bld_files 717 check(tst.env.SORD_VALIDATE + all_files) 718 719 try: 720 test_vocabularies(check, specs, spec_files) 721 except ImportError as e: 722 Logs.warn('Not running vocabulary tests (%s)' % e) 723 724 with tst.group('Unit') as check: 725 pattern = tst.env.cprogram_PATTERN % '**/*-test' 726 for test in tst.path.get_bld().ant_glob(pattern): 727 check([str(test)]) 728 729 730class Dist(Scripting.Dist): 731 def execute(self): 732 'Execute but do not call archive() since dist() has already done so.' 733 self.recurse([os.path.dirname(Context.g_module.root_path)]) 734 735 def get_tar_path(self, node): 736 'Resolve symbolic links to avoid broken links in tarball.' 737 return os.path.realpath(node.abspath()) 738 739 740class DistCheck(Dist, Scripting.DistCheck): 741 def execute(self): 742 Dist.execute(self) 743 self.check() 744 745 def archive(self): 746 Dist.archive(self) 747 748 749def _get_news_entries(ctx): 750 from waflib.extras import autoship 751 752 # Get project-level news entries 753 lv2_entries = autoship.read_ttl_news('lv2', 754 ['lv2/core/meta.ttl', 755 'lv2/core/people.ttl'], 756 dist_pattern = dist_pattern) 757 758 release_pattern = r'http://lv2plug.in/spec/lv2-([0-9\.]*).tar.bz2' 759 current_version = sorted(lv2_entries.keys(), reverse=True)[0] 760 761 # Add items from every specification 762 for specdir in specdirs(ctx.path): 763 name = os.path.basename(specdir.abspath()) 764 files = list(ttl_files(ctx.path, specdir)) 765 if name == "core": 766 files = [f for f in files if (not f.endswith('/meta.ttl') and 767 not f.endswith('/people.ttl') and 768 not f.endswith('/manifest.ttl'))] 769 770 entries = autoship.read_ttl_news(name, files) 771 772 def add_items(lv2_version, name, items): 773 for item in items: 774 lv2_entries[lv2_version]["items"] += ["%s: %s" % (name, item)] 775 776 if entries: 777 latest_revision = sorted(entries.keys(), reverse=True)[0] 778 for revision, entry in entries.items(): 779 if "dist" in entry: 780 match = re.match(release_pattern, entry["dist"]) 781 if match: 782 # Append news items to corresponding LV2 version 783 version = tuple(map(int, match.group(1).split('.'))) 784 add_items(version, name, entry["items"]) 785 786 elif revision == latest_revision: 787 # Not-yet-released development version, append to current 788 add_items(current_version, name, entry["items"]) 789 790 # Sort news items in each versions 791 for revision, entry in lv2_entries.items(): 792 entry["items"].sort() 793 794 return lv2_entries 795 796 797def posts(ctx): 798 "generates news posts in Pelican Markdown format" 799 800 from waflib.extras import autoship 801 802 try: 803 os.mkdir(os.path.join(out, 'posts')) 804 except Exception: 805 pass 806 807 autoship.write_posts(_get_news_entries(ctx), 808 os.path.join(out, 'posts'), 809 {'Author': 'drobilla'}) 810 811 812def news(ctx): 813 """write an amalgamated NEWS file to the source directory""" 814 815 from waflib.extras import autoship 816 817 autoship.write_news(_get_news_entries(ctx), 'NEWS') 818 819 820def dist(ctx): 821 news(ctx) 822 ctx.archive() 823 824 825def distcheck(ctx): 826 news(ctx) 827 ctx.archive() 828