1# 2# SConstruct file to build scons packages during development. 3# 4# See the README.rst file for an overview of how SCons is built and tested. 5 6copyright_years = '2001 - 2014' 7 8# This gets inserted into the man pages to reflect the month of release. 9month_year = 'September 2014' 10 11# 12# __COPYRIGHT__ 13# 14# Permission is hereby granted, free of charge, to any person obtaining 15# a copy of this software and associated documentation files (the 16# "Software"), to deal in the Software without restriction, including 17# without limitation the rights to use, copy, modify, merge, publish, 18# distribute, sublicense, and/or sell copies of the Software, and to 19# permit persons to whom the Software is furnished to do so, subject to 20# the following conditions: 21# 22# The above copyright notice and this permission notice shall be included 23# in all copies or substantial portions of the Software. 24# 25# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 26# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 27# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 28# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 29# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 30# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 31# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 32# 33 34import distutils.util 35import fnmatch 36import os 37import os.path 38import re 39import stat 40import sys 41import tempfile 42 43import bootstrap 44 45project = 'scons' 46default_version = '2.3.4' 47copyright = "Copyright (c) %s The SCons Foundation" % copyright_years 48 49platform = distutils.util.get_platform() 50 51SConsignFile() 52 53# 54# An internal "whereis" routine to figure out if a given program 55# is available on this system. 56# 57def whereis(file): 58 exts = [''] 59 if platform == "win32": 60 exts += ['.exe'] 61 for dir in os.environ['PATH'].split(os.pathsep): 62 f = os.path.join(dir, file) 63 for ext in exts: 64 f_ext = f + ext 65 if os.path.isfile(f_ext): 66 try: 67 st = os.stat(f_ext) 68 except: 69 continue 70 if stat.S_IMODE(st[stat.ST_MODE]) & 0111: 71 return f_ext 72 return None 73 74# 75# We let the presence or absence of various utilities determine whether 76# or not we bother to build certain pieces of things. This should allow 77# people to still do SCons packaging work even if they don't have all 78# of the utilities installed (e.g. RPM). 79# 80dh_builddeb = whereis('dh_builddeb') 81fakeroot = whereis('fakeroot') 82gzip = whereis('gzip') 83rpmbuild = whereis('rpmbuild') 84hg = os.path.exists('.hg') and whereis('hg') 85svn = os.path.exists('.svn') and whereis('svn') 86unzip = whereis('unzip') 87zip = whereis('zip') 88 89# 90# Now grab the information that we "build" into the files. 91# 92date = ARGUMENTS.get('DATE') 93if not date: 94 import time 95 date = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(time.time())) 96 97developer = ARGUMENTS.get('DEVELOPER') 98if not developer: 99 for variable in ['USERNAME', 'LOGNAME', 'USER']: 100 developer = os.environ.get(variable) 101 if developer: 102 break 103 104build_system = ARGUMENTS.get('BUILD_SYSTEM') 105if not build_system: 106 import socket 107 build_system = socket.gethostname().split('.')[0] 108 109version = ARGUMENTS.get('VERSION', '') 110if not version: 111 version = default_version 112 113hg_status_lines = [] 114svn_status_lines = [] 115 116if hg: 117 cmd = "%s status --all 2> /dev/null" % hg 118 hg_status_lines = os.popen(cmd, "r").readlines() 119 120if svn: 121 cmd = "%s status --verbose 2> /dev/null" % svn 122 svn_status_lines = os.popen(cmd, "r").readlines() 123 124revision = ARGUMENTS.get('REVISION', '') 125def generate_build_id(revision): 126 return revision 127 128if not revision and hg: 129 hg_heads = os.popen("%s heads 2> /dev/null" % hg, "r").read() 130 cs = re.search('changeset:\s+(\S+)', hg_heads) 131 if cs: 132 revision = cs.group(1) 133 b = re.search('branch:\s+(\S+)', hg_heads) 134 if b: 135 revision = b.group(1) + ':' + revision 136 def generate_build_id(revision): 137 result = revision 138 if [l for l in hg_status_lines if l[0] in 'AMR!']: 139 result = result + '[MODIFIED]' 140 return result 141 142if not revision and svn: 143 svn_info = os.popen("%s info 2> /dev/null" % svn, "r").read() 144 m = re.search('Revision: (\d+)', svn_info) 145 if m: 146 revision = m.group(1) 147 def generate_build_id(revision): 148 result = 'r' + revision 149 if [l for l in svn_status_lines if l[0] in 'ACDMR']: 150 result = result + '[MODIFIED]' 151 return result 152 153checkpoint = ARGUMENTS.get('CHECKPOINT', '') 154if checkpoint: 155 if checkpoint == 'd': 156 import time 157 checkpoint = time.strftime('%Y%m%d', time.localtime(time.time())) 158 elif checkpoint == 'r': 159 checkpoint = 'r' + revision 160 version = version + '.beta.' + checkpoint 161 162build_id = ARGUMENTS.get('BUILD_ID') 163if build_id is None: 164 if revision: 165 build_id = generate_build_id(revision) 166 else: 167 build_id = '' 168 169python_ver = sys.version[0:3] 170 171# 172# Adding some paths to sys.path, this is mainly needed 173# for the doc toolchain. 174# 175addpaths = [os.path.abspath(os.path.join(os.getcwd(), 'bin')), 176 os.path.abspath(os.path.join(os.getcwd(), 'QMTest'))] 177for a in addpaths: 178 if a not in sys.path: 179 sys.path.append(a) 180 181 182# Re-exporting LD_LIBRARY_PATH is necessary if the Python version was 183# built with the --enable-shared option. 184 185ENV = { 'PATH' : os.environ['PATH'] } 186for key in ['LOGNAME', 'PYTHONPATH', 'LD_LIBRARY_PATH']: 187 if key in os.environ: 188 ENV[key] = os.environ[key] 189 190build_dir = ARGUMENTS.get('BUILDDIR', 'build') 191if not os.path.isabs(build_dir): 192 build_dir = os.path.normpath(os.path.join(os.getcwd(), build_dir)) 193 194command_line_variables = [ 195 ("BUILDDIR=", "The directory in which to build the packages. " + 196 "The default is the './build' subdirectory."), 197 198 ("BUILD_ID=", "An identifier for the specific build." + 199 "The default is the Subversion revision number."), 200 201 ("BUILD_SYSTEM=", "The system on which the packages were built. " + 202 "The default is whatever hostname is returned " + 203 "by socket.gethostname()."), 204 205 ("CHECKPOINT=", "The specific checkpoint release being packaged, " + 206 "which will be appended to the VERSION string. " + 207 "A value of CHECKPOINT=d will generate a string " + 208 "of 'd' plus today's date in the format YYYMMDD. " + 209 "A value of CHECKPOINT=r will generate a " + 210 "string of 'r' plus the Subversion revision " + 211 "number. Any other CHECKPOINT= string will be " + 212 "used as is. There is no default value."), 213 214 ("DATE=", "The date string representing when the packaging " + 215 "build occurred. The default is the day and time " + 216 "the SConstruct file was invoked, in the format " + 217 "YYYY/MM/DD HH:MM:SS."), 218 219 ("DEVELOPER=", "The developer who created the packages. " + 220 "The default is the first set environment " + 221 "variable from the list $USERNAME, $LOGNAME, $USER."), 222 223 ("REVISION=", "The revision number of the source being built. " + 224 "The default is the Subversion revision returned " + 225 "'svn info', with an appended string of " + 226 "'[MODIFIED]' if there are any changes in the " + 227 "working copy."), 228 229 ("VERSION=", "The SCons version being packaged. The default " + 230 "is the hard-coded value '%s' " % default_version + 231 "from this SConstruct file."), 232] 233 234Default('.', build_dir) 235 236packaging_flavors = [ 237 ('deb', "A .deb package. (This is currently not supported.)"), 238 239 ('rpm', "A RedHat Package Manager file."), 240 241 ('tar-gz', "The normal .tar.gz file for end-user installation."), 242 243 ('src-tar-gz', "A .tar.gz file containing all the source " + 244 "(including tests and documentation)."), 245 246 ('local-tar-gz', "A .tar.gz file for dropping into other software " + 247 "for local use."), 248 249 ('zip', "The normal .zip file for end-user installation."), 250 251 ('src-zip', "A .zip file containing all the source " + 252 "(including tests and documentation)."), 253 254 ('local-zip', "A .zip file for dropping into other software " + 255 "for local use."), 256] 257 258test_deb_dir = os.path.join(build_dir, "test-deb") 259test_rpm_dir = os.path.join(build_dir, "test-rpm") 260test_tar_gz_dir = os.path.join(build_dir, "test-tar-gz") 261test_src_tar_gz_dir = os.path.join(build_dir, "test-src-tar-gz") 262test_local_tar_gz_dir = os.path.join(build_dir, "test-local-tar-gz") 263test_zip_dir = os.path.join(build_dir, "test-zip") 264test_src_zip_dir = os.path.join(build_dir, "test-src-zip") 265test_local_zip_dir = os.path.join(build_dir, "test-local-zip") 266 267unpack_tar_gz_dir = os.path.join(build_dir, "unpack-tar-gz") 268unpack_zip_dir = os.path.join(build_dir, "unpack-zip") 269 270if platform == "win32": 271 tar_hflag = '' 272 python_project_subinst_dir = os.path.join("Lib", "site-packages", project) 273 project_script_subinst_dir = 'Scripts' 274else: 275 tar_hflag = 'h' 276 python_project_subinst_dir = os.path.join("lib", project) 277 project_script_subinst_dir = 'bin' 278 279 280 281import textwrap 282 283indent_fmt = ' %-26s ' 284 285Help("""\ 286The following aliases build packages of various types, and unpack the 287contents into build/test-$PACKAGE subdirectories, which can be used by the 288runtest.py -p option to run tests against what's been actually packaged: 289 290""") 291 292aliases = sorted(packaging_flavors + [('doc', 'The SCons documentation.')]) 293 294for alias, help_text in aliases: 295 tw = textwrap.TextWrapper( 296 width = 78, 297 initial_indent = indent_fmt % alias, 298 subsequent_indent = indent_fmt % '' + ' ', 299 ) 300 Help(tw.fill(help_text) + '\n') 301 302Help(""" 303The following command-line variables can be set: 304 305""") 306 307for variable, help_text in command_line_variables: 308 tw = textwrap.TextWrapper( 309 width = 78, 310 initial_indent = indent_fmt % variable, 311 subsequent_indent = indent_fmt % '' + ' ', 312 ) 313 Help(tw.fill(help_text) + '\n') 314 315 316 317zcat = 'gzip -d -c' 318 319# 320# Figure out if we can handle .zip files. 321# 322zipit = None 323unzipit = None 324try: 325 import zipfile 326 327 def zipit(env, target, source): 328 print "Zipping %s:" % str(target[0]) 329 def visit(arg, dirname, names): 330 for name in names: 331 path = os.path.join(dirname, name) 332 if os.path.isfile(path): 333 arg.write(path) 334 zf = zipfile.ZipFile(str(target[0]), 'w') 335 olddir = os.getcwd() 336 os.chdir(env['CD']) 337 try: os.path.walk(env['PSV'], visit, zf) 338 finally: os.chdir(olddir) 339 zf.close() 340 341 def unzipit(env, target, source): 342 print "Unzipping %s:" % str(source[0]) 343 zf = zipfile.ZipFile(str(source[0]), 'r') 344 for name in zf.namelist(): 345 dest = os.path.join(env['UNPACK_ZIP_DIR'], name) 346 dir = os.path.dirname(dest) 347 try: 348 os.makedirs(dir) 349 except: 350 pass 351 print dest,name 352 # if the file exists, then delete it before writing 353 # to it so that we don't end up trying to write to a symlink: 354 if os.path.isfile(dest) or os.path.islink(dest): 355 os.unlink(dest) 356 if not os.path.isdir(dest): 357 open(dest, 'wb').write(zf.read(name)) 358 359except: 360 if unzip and zip: 361 zipit = "cd $CD && $ZIP $ZIPFLAGS $( ${TARGET.abspath} $) $PSV" 362 unzipit = "$UNZIP $UNZIPFLAGS $SOURCES" 363 364def SCons_revision(target, source, env): 365 """Interpolate specific values from the environment into a file. 366 367 This is used to copy files into a tree that gets packaged up 368 into the source file package. 369 """ 370 t = str(target[0]) 371 s = source[0].rstr() 372 contents = open(s, 'rb').read() 373 # Note: We construct the __*__ substitution strings here 374 # so that they don't get replaced when this file gets 375 # copied into the tree for packaging. 376 contents = contents.replace('__BUILD' + '__', env['BUILD']) 377 contents = contents.replace('__BUILDSYS' + '__', env['BUILDSYS']) 378 contents = contents.replace('__COPYRIGHT' + '__', env['COPYRIGHT']) 379 contents = contents.replace('__DATE' + '__', env['DATE']) 380 contents = contents.replace('__DEVELOPER' + '__', env['DEVELOPER']) 381 contents = contents.replace('__FILE' + '__', str(source[0]).replace('\\', '/')) 382 contents = contents.replace('__MONTH_YEAR'+ '__', env['MONTH_YEAR']) 383 contents = contents.replace('__REVISION' + '__', env['REVISION']) 384 contents = contents.replace('__VERSION' + '__', env['VERSION']) 385 contents = contents.replace('__NULL' + '__', '') 386 open(t, 'wb').write(contents) 387 os.chmod(t, os.stat(s)[0]) 388 389revaction = SCons_revision 390revbuilder = Builder(action = Action(SCons_revision, 391 varlist=['COPYRIGHT', 'VERSION'])) 392 393def soelim(target, source, env): 394 """ 395 Interpolate files included in [gnt]roff source files using the 396 .so directive. 397 398 This behaves somewhat like the soelim(1) wrapper around groff, but 399 makes us independent of whether the actual underlying implementation 400 includes an soelim() command or the corresponding command-line option 401 to groff(1). The key behavioral difference is that this doesn't 402 recursively include .so files from the include file. Not yet, anyway. 403 """ 404 t = str(target[0]) 405 s = str(source[0]) 406 dir, f = os.path.split(s) 407 tfp = open(t, 'w') 408 sfp = open(s, 'r') 409 for line in sfp.readlines(): 410 if line[:4] in ['.so ', "'so "]: 411 sofile = os.path.join(dir, line[4:-1]) 412 tfp.write(open(sofile, 'r').read()) 413 else: 414 tfp.write(line) 415 sfp.close() 416 tfp.close() 417 418def soscan(node, env, path): 419 c = node.get_text_contents() 420 return re.compile(r"^[\.']so\s+(\S+)", re.M).findall(c) 421 422soelimbuilder = Builder(action = Action(soelim), 423 source_scanner = Scanner(soscan)) 424 425# When copying local files from a Repository (Aegis), 426# just make copies, don't symlink them. 427SetOption('duplicate', 'copy') 428 429env = Environment( 430 ENV = ENV, 431 432 BUILD = build_id, 433 BUILDDIR = build_dir, 434 BUILDSYS = build_system, 435 COPYRIGHT = copyright, 436 DATE = date, 437 DEVELOPER = developer, 438 DISTDIR = os.path.join(build_dir, 'dist'), 439 MONTH_YEAR = month_year, 440 REVISION = revision, 441 VERSION = version, 442 DH_COMPAT = 2, 443 444 TAR_HFLAG = tar_hflag, 445 446 ZIP = zip, 447 ZIPFLAGS = '-r', 448 UNZIP = unzip, 449 UNZIPFLAGS = '-o -d $UNPACK_ZIP_DIR', 450 451 ZCAT = zcat, 452 453 RPMBUILD = rpmbuild, 454 RPM2CPIO = 'rpm2cpio', 455 456 TEST_DEB_DIR = test_deb_dir, 457 TEST_RPM_DIR = test_rpm_dir, 458 TEST_SRC_TAR_GZ_DIR = test_src_tar_gz_dir, 459 TEST_SRC_ZIP_DIR = test_src_zip_dir, 460 TEST_TAR_GZ_DIR = test_tar_gz_dir, 461 TEST_ZIP_DIR = test_zip_dir, 462 463 UNPACK_TAR_GZ_DIR = unpack_tar_gz_dir, 464 UNPACK_ZIP_DIR = unpack_zip_dir, 465 466 BUILDERS = { 'SCons_revision' : revbuilder, 467 'SOElim' : soelimbuilder }, 468 469 PYTHON = '"%s"' % sys.executable, 470 PYTHONFLAGS = '-tt', 471 ) 472 473Version_values = [Value(version), Value(build_id)] 474 475# 476# Define SCons packages. 477# 478# In the original, more complicated packaging scheme, we were going 479# to have separate packages for: 480# 481# python-scons only the build engine 482# scons-script only the script 483# scons the script plus the build engine 484# 485# We're now only delivering a single "scons" package, but this is still 486# "built" as two sub-packages (the build engine and the script), so 487# the definitions remain here, even though we're not using them for 488# separate packages. 489# 490 491python_scons = { 492 'pkg' : 'python-' + project, 493 'src_subdir' : 'engine', 494 'inst_subdir' : os.path.join('lib', 'python1.5', 'site-packages'), 495 'rpm_dir' : '/usr/lib/scons', 496 497 'debian_deps' : [ 498 'debian/changelog', 499 'debian/control', 500 'debian/copyright', 501 'debian/dirs', 502 'debian/docs', 503 'debian/postinst', 504 'debian/prerm', 505 'debian/rules', 506 ], 507 508 'files' : [ 'LICENSE.txt', 509 'README.txt', 510 'setup.cfg', 511 'setup.py', 512 ], 513 514 'filemap' : { 515 'LICENSE.txt' : '../LICENSE.txt' 516 }, 517 518 'buildermap' : {}, 519 520 'extra_rpm_files' : [], 521 522 'explicit_deps' : { 523 'SCons/__init__.py' : Version_values, 524 }, 525} 526 527# Figure out the name of a .egg-info file that might be generated 528# as part of the RPM package. There are two complicating factors. 529# 530# First, the RPM spec file we generate will just execute "python", not 531# necessarily the one in sys.executable. If *that* version of python has 532# a distutils that knows about Python eggs, then setup.py will generate a 533# .egg-info file, so we have to execute any distutils logic in a subshell. 534# 535# Second, we can't just have the subshell check for the existence of the 536# distutils.command.install_egg_info module and generate the expected 537# file name by hand, the way we used to, because different systems can 538# have slightly different .egg-info naming conventions. (Specifically, 539# Ubuntu overrides the default behavior to remove the Python version 540# string from the .egg-info file name.) The right way to do this is to 541# actually call into the install_egg_info() class to have it generate 542# the expected name for us. 543# 544# This is all complicated enough that we do it by writing an in-line 545# script to a temporary file and then feeding it to a separate invocation 546# of "python" to tell us the actual name of the generated .egg-info file. 547 548print_egg_info_name = """ 549try: 550 from distutils.dist import Distribution 551 from distutils.command.install_egg_info import install_egg_info 552except ImportError: 553 pass 554else: 555 dist = Distribution({'name' : "scons", 'version' : '%s'}) 556 i = install_egg_info(dist) 557 i.finalize_options() 558 import os.path 559 print os.path.split(i.outputs[0])[1] 560""" % version 561 562try: 563 fd, tfname = tempfile.mkstemp() 564 tfp = os.fdopen(fd, "w") 565 tfp.write(print_egg_info_name) 566 tfp.close() 567 egg_info_file = os.popen("python %s" % tfname).read()[:-1] 568 if egg_info_file: 569 python_scons['extra_rpm_files'].append(egg_info_file) 570finally: 571 try: 572 os.unlink(tfname) 573 except EnvironmentError: 574 pass 575 576# 577# The original packaging scheme would have have required us to push 578# the Python version number into the package name (python1.5-scons, 579# python2.0-scons, etc.), which would have required a definition 580# like the following. Leave this here in case we ever decide to do 581# this in the future, but note that this would require some modification 582# to src/engine/setup.py before it would really work. 583# 584#python2_scons = { 585# 'pkg' : 'python2-' + project, 586# 'src_subdir' : 'engine', 587# 'inst_subdir' : os.path.join('lib', 'python2.2', 'site-packages'), 588# 589# 'debian_deps' : [ 590# 'debian/changelog', 591# 'debian/control', 592# 'debian/copyright', 593# 'debian/dirs', 594# 'debian/docs', 595# 'debian/postinst', 596# 'debian/prerm', 597# 'debian/rules', 598# ], 599# 600# 'files' : [ 601# 'LICENSE.txt', 602# 'README.txt', 603# 'setup.cfg', 604# 'setup.py', 605# ], 606# 'filemap' : { 607# 'LICENSE.txt' : '../LICENSE.txt', 608# }, 609# 'buildermap' : {}, 610#} 611# 612 613scons_script = { 614 'pkg' : project + '-script', 615 'src_subdir' : 'script', 616 'inst_subdir' : 'bin', 617 'rpm_dir' : '/usr/bin', 618 619 'debian_deps' : [ 620 'debian/changelog', 621 'debian/control', 622 'debian/copyright', 623 'debian/dirs', 624 'debian/docs', 625 'debian/postinst', 626 'debian/prerm', 627 'debian/rules', 628 ], 629 630 'files' : [ 631 'LICENSE.txt', 632 'README.txt', 633 'setup.cfg', 634 'setup.py', 635 ], 636 637 'filemap' : { 638 'LICENSE.txt' : '../LICENSE.txt', 639 'scons' : 'scons.py', 640 'sconsign' : 'sconsign.py', 641 'scons-time' : 'scons-time.py', 642 }, 643 644 'buildermap' : {}, 645 646 'extra_rpm_files' : [ 647 'scons-' + version, 648 'sconsign-' + version, 649 'scons-time-' + version, 650 ], 651 652 'explicit_deps' : { 653 'scons' : Version_values, 654 'sconsign' : Version_values, 655 }, 656} 657 658scons = { 659 'pkg' : project, 660 661 'debian_deps' : [ 662 'debian/changelog', 663 'debian/control', 664 'debian/copyright', 665 'debian/dirs', 666 'debian/docs', 667 'debian/postinst', 668 'debian/prerm', 669 'debian/rules', 670 ], 671 672 'files' : [ 673 'CHANGES.txt', 674 'LICENSE.txt', 675 'README.txt', 676 'RELEASE.txt', 677 'scons.1', 678 'sconsign.1', 679 'scons-time.1', 680 'script/scons.bat', 681 #'script/scons-post-install.py', 682 'setup.cfg', 683 'setup.py', 684 ], 685 686 'filemap' : { 687 'scons.1' : '$BUILDDIR/doc/man/scons.1', 688 'sconsign.1' : '$BUILDDIR/doc/man/sconsign.1', 689 'scons-time.1' : '$BUILDDIR/doc/man/scons-time.1', 690 }, 691 692 'buildermap' : { 693 'scons.1' : env.SOElim, 694 'sconsign.1' : env.SOElim, 695 'scons-time.1' : env.SOElim, 696 }, 697 698 'subpkgs' : [ python_scons, scons_script ], 699 700 'subinst_dirs' : { 701 'python-' + project : python_project_subinst_dir, 702 project + '-script' : project_script_subinst_dir, 703 }, 704} 705 706scripts = ['scons', 'sconsign', 'scons-time'] 707 708src_deps = [] 709src_files = [] 710 711for p in [ scons ]: 712 # 713 # Initialize variables with the right directories for this package. 714 # 715 pkg = p['pkg'] 716 pkg_version = "%s-%s" % (pkg, version) 717 718 src = 'src' 719 if 'src_subdir' in p: 720 src = os.path.join(src, p['src_subdir']) 721 722 build = os.path.join(build_dir, pkg) 723 724 tar_gz = os.path.join(build, 'dist', "%s.tar.gz" % pkg_version) 725 platform_tar_gz = os.path.join(build, 726 'dist', 727 "%s.%s.tar.gz" % (pkg_version, platform)) 728 zip = os.path.join(build, 'dist', "%s.zip" % pkg_version) 729 platform_zip = os.path.join(build, 730 'dist', 731 "%s.%s.zip" % (pkg_version, platform)) 732 if platform == "win-amd64": 733 win32_exe = os.path.join(build, 'dist', "%s.win-amd64.exe" % pkg_version) 734 else: 735 win32_exe = os.path.join(build, 'dist', "%s.win32.exe" % pkg_version) 736 737 # 738 # Update the environment with the relevant information 739 # for this package. 740 # 741 # We can get away with calling setup.py using a directory path 742 # like this because we put a preamble in it that will chdir() 743 # to the directory in which setup.py exists. 744 # 745 setup_py = os.path.join(build, 'setup.py') 746 env.Replace(PKG = pkg, 747 PKG_VERSION = pkg_version, 748 SETUP_PY = '"%s"' % setup_py) 749 Local(setup_py) 750 751 # 752 # Read up the list of source files from our MANIFEST.in. 753 # This list should *not* include LICENSE.txt, MANIFEST, 754 # README.txt, or setup.py. Make a copy of the list for the 755 # destination files. 756 # 757 manifest_in = File(os.path.join(src, 'MANIFEST.in')).rstr() 758 src_files = bootstrap.parseManifestLines(src, open(manifest_in).readlines()) 759 raw_files = src_files[:] 760 dst_files = src_files[:] 761 rpm_files = [] 762 763 MANIFEST_in_list = [] 764 765 if 'subpkgs' in p: 766 # 767 # This package includes some sub-packages. Read up their 768 # MANIFEST.in files, and add them to our source and destination 769 # file lists, modifying them as appropriate to add the 770 # specified subdirs. 771 # 772 for sp in p['subpkgs']: 773 ssubdir = sp['src_subdir'] 774 isubdir = p['subinst_dirs'][sp['pkg']] 775 MANIFEST_in = File(os.path.join(src, ssubdir, 'MANIFEST.in')).rstr() 776 MANIFEST_in_list.append(MANIFEST_in) 777 files = bootstrap.parseManifestLines(os.path.join(src, ssubdir), open(MANIFEST_in).readlines()) 778 raw_files.extend(files) 779 src_files.extend([os.path.join(ssubdir, x) for x in files]) 780 for f in files: 781 r = os.path.join(sp['rpm_dir'], f) 782 rpm_files.append(r) 783 if f[-3:] == ".py": 784 rpm_files.append(r + 'c') 785 for f in sp.get('extra_rpm_files', []): 786 r = os.path.join(sp['rpm_dir'], f) 787 rpm_files.append(r) 788 files = [os.path.join(isubdir, x) for x in files] 789 dst_files.extend(files) 790 for k, f in sp['filemap'].items(): 791 if f: 792 k = os.path.join(ssubdir, k) 793 p['filemap'][k] = os.path.join(ssubdir, f) 794 for f, deps in sp['explicit_deps'].items(): 795 f = os.path.join(build, ssubdir, f) 796 env.Depends(f, deps) 797 798 # 799 # Now that we have the "normal" source files, add those files 800 # that are standard for each distribution. Note that we don't 801 # add these to dst_files, because they don't get installed. 802 # And we still have the MANIFEST to add. 803 # 804 src_files.extend(p['files']) 805 806 # 807 # Now run everything in src_file through the sed command we 808 # concocted to expand __FILE__, __VERSION__, etc. 809 # 810 for b in src_files: 811 s = p['filemap'].get(b, b) 812 if not s[0] == '$' and not os.path.isabs(s): 813 s = os.path.join(src, s) 814 builder = p['buildermap'].get(b, env.SCons_revision) 815 x = builder(os.path.join(build, b), s) 816 Local(x) 817 818 # 819 # NOW, finally, we can create the MANIFEST, which we do 820 # by having Python spit out the contents of the src_files 821 # array we've carefully created. After we've added 822 # MANIFEST itself to the array, of course. 823 # 824 src_files.append("MANIFEST") 825 MANIFEST_in_list.append(os.path.join(src, 'MANIFEST.in')) 826 827 def write_src_files(target, source, **kw): 828 global src_files 829 src_files.sort() 830 f = open(str(target[0]), 'wb') 831 for file in src_files: 832 f.write(file + "\n") 833 f.close() 834 return 0 835 env.Command(os.path.join(build, 'MANIFEST'), 836 MANIFEST_in_list, 837 write_src_files) 838 839 # 840 # Now go through and arrange to create whatever packages we can. 841 # 842 build_src_files = [os.path.join(build, x) for x in src_files] 843 Local(*build_src_files) 844 845 distutils_formats = [] 846 847 distutils_targets = [ win32_exe ] 848 849 dist_distutils_targets = env.Install('$DISTDIR', distutils_targets) 850 Local(dist_distutils_targets) 851 AddPostAction(dist_distutils_targets, Chmod(dist_distutils_targets, 0644)) 852 853 if not gzip: 854 print "gzip not found in %s; skipping .tar.gz package for %s." % (os.environ['PATH'], pkg) 855 else: 856 857 distutils_formats.append('gztar') 858 859 src_deps.append(tar_gz) 860 861 distutils_targets.extend([ tar_gz, platform_tar_gz ]) 862 863 dist_tar_gz = env.Install('$DISTDIR', tar_gz) 864 dist_platform_tar_gz = env.Install('$DISTDIR', platform_tar_gz) 865 Local(dist_tar_gz, dist_platform_tar_gz) 866 AddPostAction(dist_tar_gz, Chmod(dist_tar_gz, 0644)) 867 AddPostAction(dist_platform_tar_gz, Chmod(dist_platform_tar_gz, 0644)) 868 869 # 870 # Unpack the tar.gz archive created by the distutils into 871 # build/unpack-tar-gz/scons-{version}. 872 # 873 # We'd like to replace the last three lines with the following: 874 # 875 # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR 876 # 877 # but that gives heartburn to Cygwin's tar, so work around it 878 # with separate zcat-tar-rm commands. 879 # 880 unpack_tar_gz_files = [os.path.join(unpack_tar_gz_dir, pkg_version, x) 881 for x in src_files] 882 env.Command(unpack_tar_gz_files, dist_tar_gz, [ 883 Delete(os.path.join(unpack_tar_gz_dir, pkg_version)), 884 "$ZCAT $SOURCES > .temp", 885 "tar xf .temp -C $UNPACK_TAR_GZ_DIR", 886 Delete(".temp"), 887 ]) 888 889 # 890 # Run setup.py in the unpacked subdirectory to "install" everything 891 # into our build/test subdirectory. The runtest.py script will set 892 # PYTHONPATH so that the tests only look under build/test-{package}, 893 # and under QMTest (for the testing modules TestCmd.py, TestSCons.py, 894 # etc.). This makes sure that our tests pass with what 895 # we really packaged, not because of something hanging around in 896 # the development directory. 897 # 898 # We can get away with calling setup.py using a directory path 899 # like this because we put a preamble in it that will chdir() 900 # to the directory in which setup.py exists. 901 # 902 dfiles = [os.path.join(test_tar_gz_dir, x) for x in dst_files] 903 env.Command(dfiles, unpack_tar_gz_files, [ 904 Delete(os.path.join(unpack_tar_gz_dir, pkg_version, 'build')), 905 Delete("$TEST_TAR_GZ_DIR"), 906 '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_TAR_GZ_DIR" --standalone-lib' % \ 907 os.path.join(unpack_tar_gz_dir, pkg_version, 'setup.py'), 908 ]) 909 910 # 911 # Generate portage files for submission to Gentoo Linux. 912 # 913 gentoo = os.path.join(build, 'gentoo') 914 ebuild = os.path.join(gentoo, 'scons-%s.ebuild' % version) 915 digest = os.path.join(gentoo, 'files', 'digest-scons-%s' % version) 916 env.Command(ebuild, os.path.join('gentoo', 'scons.ebuild.in'), SCons_revision) 917 def Digestify(target, source, env): 918 import md5 919 src = source[0].rfile() 920 contents = open(str(src)).read() 921 sig = md5.new(contents).hexdigest() 922 bytes = os.stat(str(src))[6] 923 open(str(target[0]), 'w').write("MD5 %s %s %d\n" % (sig, 924 src.name, 925 bytes)) 926 env.Command(digest, tar_gz, Digestify) 927 928 if not zipit: 929 print "zip not found; skipping .zip package for %s." % pkg 930 else: 931 932 distutils_formats.append('zip') 933 934 src_deps.append(zip) 935 936 distutils_targets.extend([ zip, platform_zip ]) 937 938 dist_zip = env.Install('$DISTDIR', zip) 939 dist_platform_zip = env.Install('$DISTDIR', platform_zip) 940 Local(dist_zip, dist_platform_zip) 941 AddPostAction(dist_zip, Chmod(dist_zip, 0644)) 942 AddPostAction(dist_platform_zip, Chmod(dist_platform_zip, 0644)) 943 944 # 945 # Unpack the zip archive created by the distutils into 946 # build/unpack-zip/scons-{version}. 947 # 948 unpack_zip_files = [os.path.join(unpack_zip_dir, pkg_version, x) 949 for x in src_files] 950 951 env.Command(unpack_zip_files, dist_zip, [ 952 Delete(os.path.join(unpack_zip_dir, pkg_version)), 953 unzipit, 954 ]) 955 956 # 957 # Run setup.py in the unpacked subdirectory to "install" everything 958 # into our build/test subdirectory. The runtest.py script will set 959 # PYTHONPATH so that the tests only look under build/test-{package}, 960 # and under QMTest (for the testing modules TestCmd.py, TestSCons.py, 961 # etc.). This makes sure that our tests pass with what 962 # we really packaged, not because of something hanging around in 963 # the development directory. 964 # 965 # We can get away with calling setup.py using a directory path 966 # like this because we put a preamble in it that will chdir() 967 # to the directory in which setup.py exists. 968 # 969 dfiles = [os.path.join(test_zip_dir, x) for x in dst_files] 970 env.Command(dfiles, unpack_zip_files, [ 971 Delete(os.path.join(unpack_zip_dir, pkg_version, 'build')), 972 Delete("$TEST_ZIP_DIR"), 973 '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_ZIP_DIR" --standalone-lib' % \ 974 os.path.join(unpack_zip_dir, pkg_version, 'setup.py'), 975 ]) 976 977 if not rpmbuild: 978 msg = "@echo \"Warning: Can not build 'rpm': no rpmbuild utility found\"" 979 AlwaysBuild(Alias('rpm', [], msg)) 980 else: 981 topdir = os.path.join(build, 'build', 982 'bdist.' + platform, 'rpm') 983 984 buildroot = os.path.join(build_dir, 'rpm-buildroot') 985 986 BUILDdir = os.path.join(topdir, 'BUILD', pkg + '-' + version) 987 RPMSdir = os.path.join(topdir, 'RPMS', 'noarch') 988 SOURCESdir = os.path.join(topdir, 'SOURCES') 989 SPECSdir = os.path.join(topdir, 'SPECS') 990 SRPMSdir = os.path.join(topdir, 'SRPMS') 991 992 specfile_in = os.path.join('rpm', "%s.spec.in" % pkg) 993 specfile = os.path.join(SPECSdir, "%s-1.spec" % pkg_version) 994 sourcefile = os.path.join(SOURCESdir, "%s.tar.gz" % pkg_version); 995 noarch_rpm = os.path.join(RPMSdir, "%s-1.noarch.rpm" % pkg_version) 996 src_rpm = os.path.join(SRPMSdir, "%s-1.src.rpm" % pkg_version) 997 998 def spec_function(target, source, env): 999 """Generate the RPM .spec file from the template file. 1000 1001 This fills in the %files portion of the .spec file with a 1002 list generated from our MANIFEST(s), so we don't have to 1003 maintain multiple lists. 1004 """ 1005 c = open(str(source[0]), 'rb').read() 1006 c = c.replace('__VERSION' + '__', env['VERSION']) 1007 c = c.replace('__RPM_FILES' + '__', env['RPM_FILES']) 1008 open(str(target[0]), 'wb').write(c) 1009 1010 rpm_files.sort() 1011 rpm_files_str = "\n".join(rpm_files) + "\n" 1012 rpm_spec_env = env.Clone(RPM_FILES = rpm_files_str) 1013 rpm_spec_action = Action(spec_function, varlist=['RPM_FILES']) 1014 rpm_spec_env.Command(specfile, specfile_in, rpm_spec_action) 1015 1016 env.InstallAs(sourcefile, tar_gz) 1017 Local(sourcefile) 1018 1019 targets = [ noarch_rpm, src_rpm ] 1020 cmd = "$RPMBUILD --define '_topdir $(%s$)' --buildroot %s -ba $SOURCES" % (topdir, buildroot) 1021 if not os.path.isdir(BUILDdir): 1022 cmd = ("$( mkdir -p %s; $)" % BUILDdir) + cmd 1023 t = env.Command(targets, specfile, cmd) 1024 env.Depends(t, sourcefile) 1025 1026 dist_noarch_rpm = env.Install('$DISTDIR', noarch_rpm) 1027 dist_src_rpm = env.Install('$DISTDIR', src_rpm) 1028 Local(dist_noarch_rpm, dist_src_rpm) 1029 AddPostAction(dist_noarch_rpm, Chmod(dist_noarch_rpm, 0644)) 1030 AddPostAction(dist_src_rpm, Chmod(dist_src_rpm, 0644)) 1031 1032 dfiles = [os.path.join(test_rpm_dir, 'usr', x) for x in dst_files] 1033 env.Command(dfiles, 1034 dist_noarch_rpm, 1035 "$RPM2CPIO $SOURCES | (cd $TEST_RPM_DIR && cpio -id)") 1036 1037 if dh_builddeb and fakeroot: 1038 # Our Debian packaging builds directly into build/dist, 1039 # so we don't need to Install() the .debs. 1040 # The built deb is called just x.y.z, not x.y.z.final.0 so strip those off: 1041 deb_version = '.'.join(version.split('.')[0:3]) 1042 deb = os.path.join(build_dir, 'dist', "%s_%s_all.deb" % (pkg, deb_version)) 1043 # print "Building deb into %s (version=%s)"%(deb, deb_version) 1044 for d in p['debian_deps']: 1045 b = env.SCons_revision(os.path.join(build, d), d) 1046 env.Depends(deb, b) 1047 Local(b) 1048 env.Command(deb, build_src_files, [ 1049 "cd %s && fakeroot make -f debian/rules PYTHON=$PYTHON BUILDDEB_OPTIONS=--destdir=../../build/dist binary" % build, 1050 ]) 1051 1052 old = os.path.join('lib', 'scons', '') 1053 new = os.path.join('lib', 'python' + python_ver, 'site-packages', '') 1054 def xxx(s, old=old, new=new): 1055 if s[:len(old)] == old: 1056 s = new + s[len(old):] 1057 return os.path.join('usr', s) 1058 dfiles = [os.path.join(test_deb_dir, xxx(x)) for x in dst_files] 1059 env.Command(dfiles, 1060 deb, 1061 "dpkg --fsys-tarfile $SOURCES | (cd $TEST_DEB_DIR && tar -xf -)") 1062 1063 1064 # 1065 # Use the Python distutils to generate the appropriate packages. 1066 # 1067 commands = [ 1068 Delete(os.path.join(build, 'build', 'lib')), 1069 Delete(os.path.join(build, 'build', 'scripts')), 1070 ] 1071 1072 if distutils_formats: 1073 commands.append(Delete(os.path.join(build, 1074 'build', 1075 'bdist.' + platform, 1076 'dumb'))) 1077 for format in distutils_formats: 1078 commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_dumb -f %s" % format) 1079 1080 commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY sdist --formats=%s" % \ 1081 ','.join(distutils_formats)) 1082 1083 commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_wininst --plat-name win32 --user-access-control auto") 1084 1085 env.Command(distutils_targets, build_src_files, commands) 1086 1087 # 1088 # Now create local packages for people who want to let people 1089 # build their SCons-buildable packages without having to 1090 # install SCons. 1091 # 1092 s_l_v = '%s-local-%s' % (pkg, version) 1093 1094 local = pkg + '-local' 1095 build_dir_local = os.path.join(build_dir, local) 1096 build_dir_local_slv = os.path.join(build_dir, local, s_l_v) 1097 1098 dist_local_tar_gz = os.path.join("$DISTDIR/%s.tar.gz" % s_l_v) 1099 dist_local_zip = os.path.join("$DISTDIR/%s.zip" % s_l_v) 1100 AddPostAction(dist_local_tar_gz, Chmod(dist_local_tar_gz, 0644)) 1101 AddPostAction(dist_local_zip, Chmod(dist_local_zip, 0644)) 1102 1103 commands = [ 1104 Delete(build_dir_local), 1105 '$PYTHON $PYTHONFLAGS $SETUP_PY install "--install-script=%s" "--install-lib=%s" --no-install-man --no-compile --standalone-lib --no-version-script' % \ 1106 (build_dir_local, build_dir_local_slv), 1107 ] 1108 1109 for script in scripts: 1110 # add .py extension for scons-local scripts on non-windows platforms 1111 if platform == "win32": 1112 break 1113 local_script = os.path.join(build_dir_local, script) 1114 commands.append(Move(local_script + '.py', local_script)) 1115 1116 rf = [x for x in raw_files if not x in scripts] 1117 rf = [os.path.join(s_l_v, x) for x in rf] 1118 for script in scripts: 1119 rf.append("%s.py" % script) 1120 local_targets = [os.path.join(build_dir_local, x) for x in rf] 1121 1122 env.Command(local_targets, build_src_files, commands) 1123 1124 scons_LICENSE = os.path.join(build_dir_local, 'scons-LICENSE') 1125 l = env.SCons_revision(scons_LICENSE, 'LICENSE-local') 1126 local_targets.append(l) 1127 Local(l) 1128 1129 scons_README = os.path.join(build_dir_local, 'scons-README') 1130 l = env.SCons_revision(scons_README, 'README-local') 1131 local_targets.append(l) 1132 Local(l) 1133 1134 if gzip: 1135 if platform == "win32": 1136 # avoid problem with tar interpreting c:/ as a remote machine 1137 tar_cargs = '-cz --force-local -f' 1138 else: 1139 tar_cargs = '-czf' 1140 env.Command(dist_local_tar_gz, 1141 local_targets, 1142 "cd %s && tar %s $( ${TARGET.abspath} $) *" % (build_dir_local, tar_cargs)) 1143 1144 unpack_targets = [os.path.join(test_local_tar_gz_dir, x) for x in rf] 1145 commands = [Delete(test_local_tar_gz_dir), 1146 Mkdir(test_local_tar_gz_dir), 1147 "cd %s && tar xzf $( ${SOURCE.abspath} $)" % test_local_tar_gz_dir] 1148 1149 env.Command(unpack_targets, dist_local_tar_gz, commands) 1150 1151 if zipit: 1152 env.Command(dist_local_zip, local_targets, zipit, 1153 CD = build_dir_local, PSV = '.') 1154 1155 unpack_targets = [os.path.join(test_local_zip_dir, x) for x in rf] 1156 commands = [Delete(test_local_zip_dir), 1157 Mkdir(test_local_zip_dir), 1158 unzipit] 1159 1160 env.Command(unpack_targets, dist_local_zip, unzipit, 1161 UNPACK_ZIP_DIR = test_local_zip_dir) 1162 1163# 1164# 1165# 1166Export('build_dir', 'env') 1167 1168SConscript('QMTest/SConscript') 1169 1170# 1171# 1172# 1173files = [ 1174 'runtest.py', 1175] 1176 1177def copy(target, source, env): 1178 t = str(target[0]) 1179 s = str(source[0]) 1180 open(t, 'wb').write(open(s, 'rb').read()) 1181 1182for file in files: 1183 # Guarantee that real copies of these files always exist in 1184 # build/. If there's a symlink there, then this is an Aegis 1185 # build and we blow them away now so that they'll get "built" later. 1186 p = os.path.join(build_dir, file) 1187 if os.path.islink(p): 1188 os.unlink(p) 1189 if not os.path.isabs(p): 1190 p = '#' + p 1191 sp = env.Command(p, file, copy) 1192 Local(sp) 1193 1194# 1195# Documentation. 1196# 1197Export('build_dir', 'env', 'whereis', 'revaction') 1198 1199SConscript('doc/SConscript') 1200 1201# 1202# If we're running in a Subversion working directory, pack up a complete 1203# source archive from the project files and files in the change. 1204# 1205 1206sfiles = None 1207if hg_status_lines: 1208 slines = [l for l in hg_status_lines if l[0] in 'ACM'] 1209 sfiles = [l.split()[-1] for l in slines] 1210elif svn_status_lines: 1211 slines = [l for l in svn_status_lines if l[0] in ' MA'] 1212 sentries = [l.split()[-1] for l in slines] 1213 sfiles = list(filter(os.path.isfile, sentries)) 1214else: 1215 print "Not building in a Mercurial or Subversion tree; skipping building src package." 1216 1217if sfiles: 1218 remove_patterns = [ 1219 '.hgt/*', 1220 '.svnt/*', 1221 '*.aeignore', 1222 '*.hgignore', 1223 'www/*', 1224 ] 1225 1226 for p in remove_patterns: 1227 sfiles = [s for s in sfiles if not fnmatch.fnmatch(s, p)] 1228 1229 if sfiles: 1230 ps = "%s-src" % project 1231 psv = "%s-%s" % (ps, version) 1232 b_ps = os.path.join(build_dir, ps) 1233 b_psv = os.path.join(build_dir, psv) 1234 b_psv_stamp = b_psv + '-stamp' 1235 1236 src_tar_gz = os.path.join(build_dir, 'dist', '%s.tar.gz' % psv) 1237 src_zip = os.path.join(build_dir, 'dist', '%s.zip' % psv) 1238 1239 Local(src_tar_gz, src_zip) 1240 1241 for file in sfiles: 1242 env.SCons_revision(os.path.join(b_ps, file), file) 1243 1244 b_ps_files = [os.path.join(b_ps, x) for x in sfiles] 1245 cmds = [ 1246 Delete(b_psv), 1247 Copy(b_psv, b_ps), 1248 Touch("$TARGET"), 1249 ] 1250 1251 env.Command(b_psv_stamp, src_deps + b_ps_files, cmds) 1252 1253 Local(*b_ps_files) 1254 1255 if gzip: 1256 1257 env.Command(src_tar_gz, b_psv_stamp, 1258 "tar cz${TAR_HFLAG} -f $TARGET -C build %s" % psv) 1259 1260 # 1261 # Unpack the archive into build/unpack/scons-{version}. 1262 # 1263 unpack_tar_gz_files = [os.path.join(unpack_tar_gz_dir, psv, x) 1264 for x in sfiles] 1265 1266 # 1267 # We'd like to replace the last three lines with the following: 1268 # 1269 # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR 1270 # 1271 # but that gives heartburn to Cygwin's tar, so work around it 1272 # with separate zcat-tar-rm commands. 1273 env.Command(unpack_tar_gz_files, src_tar_gz, [ 1274 Delete(os.path.join(unpack_tar_gz_dir, psv)), 1275 "$ZCAT $SOURCES > .temp", 1276 "tar xf .temp -C $UNPACK_TAR_GZ_DIR", 1277 Delete(".temp"), 1278 ]) 1279 1280 # 1281 # Run setup.py in the unpacked subdirectory to "install" everything 1282 # into our build/test subdirectory. The runtest.py script will set 1283 # PYTHONPATH so that the tests only look under build/test-{package}, 1284 # and under QMTest (for the testing modules TestCmd.py, 1285 # TestSCons.py, etc.). This makes sure that our tests pass with 1286 # what we really packaged, not because of something hanging around 1287 # in the development directory. 1288 # 1289 # We can get away with calling setup.py using a directory path 1290 # like this because we put a preamble in it that will chdir() 1291 # to the directory in which setup.py exists. 1292 # 1293 dfiles = [os.path.join(test_src_tar_gz_dir, x) for x in dst_files] 1294 scons_lib_dir = os.path.join(unpack_tar_gz_dir, psv, 'src', 'engine') 1295 ENV = env.Dictionary('ENV').copy() 1296 ENV['SCONS_LIB_DIR'] = scons_lib_dir 1297 ENV['USERNAME'] = developer 1298 env.Command(dfiles, unpack_tar_gz_files, 1299 [ 1300 Delete(os.path.join(unpack_tar_gz_dir, 1301 psv, 1302 'build', 1303 'scons', 1304 'build')), 1305 Delete("$TEST_SRC_TAR_GZ_DIR"), 1306 'cd "%s" && $PYTHON $PYTHONFLAGS "%s" "%s" VERSION="$VERSION"' % \ 1307 (os.path.join(unpack_tar_gz_dir, psv), 1308 os.path.join('src', 'script', 'scons.py'), 1309 os.path.join('build', 'scons')), 1310 '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_SRC_TAR_GZ_DIR" --standalone-lib' % \ 1311 os.path.join(unpack_tar_gz_dir, 1312 psv, 1313 'build', 1314 'scons', 1315 'setup.py'), 1316 ], 1317 ENV = ENV) 1318 1319 if zipit: 1320 1321 env.Command(src_zip, b_psv_stamp, zipit, CD = 'build', PSV = psv) 1322 1323 # 1324 # Unpack the archive into build/unpack/scons-{version}. 1325 # 1326 unpack_zip_files = [os.path.join(unpack_zip_dir, psv, x) 1327 for x in sfiles] 1328 1329 env.Command(unpack_zip_files, src_zip, [ 1330 Delete(os.path.join(unpack_zip_dir, psv)), 1331 unzipit 1332 ]) 1333 1334 # 1335 # Run setup.py in the unpacked subdirectory to "install" everything 1336 # into our build/test subdirectory. The runtest.py script will set 1337 # PYTHONPATH so that the tests only look under build/test-{package}, 1338 # and under QMTest (for the testing modules TestCmd.py, 1339 # TestSCons.py, etc.). This makes sure that our tests pass with 1340 # what we really packaged, not because of something hanging 1341 # around in the development directory. 1342 # 1343 # We can get away with calling setup.py using a directory path 1344 # like this because we put a preamble in it that will chdir() 1345 # to the directory in which setup.py exists. 1346 # 1347 dfiles = [os.path.join(test_src_zip_dir, x) for x in dst_files] 1348 scons_lib_dir = os.path.join(unpack_zip_dir, psv, 'src', 'engine') 1349 ENV = env.Dictionary('ENV').copy() 1350 ENV['SCONS_LIB_DIR'] = scons_lib_dir 1351 ENV['USERNAME'] = developer 1352 env.Command(dfiles, unpack_zip_files, 1353 [ 1354 Delete(os.path.join(unpack_zip_dir, 1355 psv, 1356 'build', 1357 'scons', 1358 'build')), 1359 Delete("$TEST_SRC_ZIP_DIR"), 1360 'cd "%s" && $PYTHON $PYTHONFLAGS "%s" "%s" VERSION="$VERSION"' % \ 1361 (os.path.join(unpack_zip_dir, psv), 1362 os.path.join('src', 'script', 'scons.py'), 1363 os.path.join('build', 'scons')), 1364 '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_SRC_ZIP_DIR" --standalone-lib' % \ 1365 os.path.join(unpack_zip_dir, 1366 psv, 1367 'build', 1368 'scons', 1369 'setup.py'), 1370 ], 1371 ENV = ENV) 1372 1373for pf, help_text in packaging_flavors: 1374 Alias(pf, [ 1375 os.path.join(build_dir, 'test-'+pf), 1376 os.path.join(build_dir, 'QMTest'), 1377 os.path.join(build_dir, 'runtest.py'), 1378 ]) 1379 1380