1#! /usr/bin/env python 2# encoding: utf-8 3 4""" 5waf-powered distributed network builds, with a network cache. 6 7Caching files from a server has advantages over a NFS/Samba shared folder: 8 9- builds are much faster because they use local files 10- builds just continue to work in case of a network glitch 11- permissions are much simpler to manage 12""" 13 14import os, urllib, tarfile, re, shutil, tempfile, sys 15from collections import OrderedDict 16from waflib import Context, Utils, Logs 17 18try: 19 from urllib.parse import urlencode 20except ImportError: 21 urlencode = urllib.urlencode 22 23def safe_urlencode(data): 24 x = urlencode(data) 25 try: 26 x = x.encode('utf-8') 27 except Exception: 28 pass 29 return x 30 31try: 32 from urllib.error import URLError 33except ImportError: 34 from urllib2 import URLError 35 36try: 37 from urllib.request import Request, urlopen 38except ImportError: 39 from urllib2 import Request, urlopen 40 41DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache') 42DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/') 43TARFORMAT = 'w:bz2' 44TIMEOUT = 60 45REQUIRES = 'requires.txt' 46 47re_com = re.compile(r'\s*#.*', re.M) 48 49def total_version_order(num): 50 lst = num.split('.') 51 template = '%10s' * len(lst) 52 ret = template % tuple(lst) 53 return ret 54 55def get_distnet_cache(): 56 return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE) 57 58def get_server_url(): 59 return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER) 60 61def get_download_url(): 62 return '%s/download.py' % get_server_url() 63 64def get_upload_url(): 65 return '%s/upload.py' % get_server_url() 66 67def get_resolve_url(): 68 return '%s/resolve.py' % get_server_url() 69 70def send_package_name(): 71 out = getattr(Context.g_module, 'out', 'build') 72 pkgfile = '%s/package_to_upload.tarfile' % out 73 return pkgfile 74 75class package(Context.Context): 76 fun = 'package' 77 cmd = 'package' 78 79 def execute(self): 80 try: 81 files = self.files 82 except AttributeError: 83 files = self.files = [] 84 85 Context.Context.execute(self) 86 pkgfile = send_package_name() 87 if not pkgfile in files: 88 if not REQUIRES in files: 89 files.append(REQUIRES) 90 self.make_tarfile(pkgfile, files, add_to_package=False) 91 92 def make_tarfile(self, filename, files, **kw): 93 if kw.get('add_to_package', True): 94 self.files.append(filename) 95 96 with tarfile.open(filename, TARFORMAT) as tar: 97 endname = os.path.split(filename)[-1] 98 endname = endname.split('.')[0] + '/' 99 for x in files: 100 tarinfo = tar.gettarinfo(x, x) 101 tarinfo.uid = tarinfo.gid = 0 102 tarinfo.uname = tarinfo.gname = 'root' 103 tarinfo.size = os.stat(x).st_size 104 105 # TODO - more archive creation options? 106 if kw.get('bare', True): 107 tarinfo.name = os.path.split(x)[1] 108 else: 109 tarinfo.name = endname + x # todo, if tuple, then.. 110 Logs.debug('distnet: adding %r to %s', tarinfo.name, filename) 111 with open(x, 'rb') as f: 112 tar.addfile(tarinfo, f) 113 Logs.info('Created %s', filename) 114 115class publish(Context.Context): 116 fun = 'publish' 117 cmd = 'publish' 118 def execute(self): 119 if hasattr(Context.g_module, 'publish'): 120 Context.Context.execute(self) 121 mod = Context.g_module 122 123 rfile = getattr(self, 'rfile', send_package_name()) 124 if not os.path.isfile(rfile): 125 self.fatal('Create the release file with "waf release" first! %r' % rfile) 126 127 fdata = Utils.readf(rfile, m='rb') 128 data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)]) 129 130 req = Request(get_upload_url(), data) 131 response = urlopen(req, timeout=TIMEOUT) 132 data = response.read().strip() 133 134 if sys.hexversion>0x300000f: 135 data = data.decode('utf-8') 136 137 if data != 'ok': 138 self.fatal('Could not publish the package %r' % data) 139 140class constraint(object): 141 def __init__(self, line=''): 142 self.required_line = line 143 self.info = [] 144 145 line = line.strip() 146 if not line: 147 return 148 149 lst = line.split(',') 150 if lst: 151 self.pkgname = lst[0] 152 self.required_version = lst[1] 153 for k in lst: 154 a, b, c = k.partition('=') 155 if a and c: 156 self.info.append((a, c)) 157 def __str__(self): 158 buf = [] 159 buf.append(self.pkgname) 160 buf.append(self.required_version) 161 for k in self.info: 162 buf.append('%s=%s' % k) 163 return ','.join(buf) 164 165 def __repr__(self): 166 return "requires %s-%s" % (self.pkgname, self.required_version) 167 168 def human_display(self, pkgname, pkgver): 169 return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version) 170 171 def why(self): 172 ret = [] 173 for x in self.info: 174 if x[0] == 'reason': 175 ret.append(x[1]) 176 return ret 177 178 def add_reason(self, reason): 179 self.info.append(('reason', reason)) 180 181def parse_constraints(text): 182 assert(text is not None) 183 constraints = [] 184 text = re.sub(re_com, '', text) 185 lines = text.splitlines() 186 for line in lines: 187 line = line.strip() 188 if not line: 189 continue 190 constraints.append(constraint(line)) 191 return constraints 192 193def list_package_versions(cachedir, pkgname): 194 pkgdir = os.path.join(cachedir, pkgname) 195 try: 196 versions = os.listdir(pkgdir) 197 except OSError: 198 return [] 199 versions.sort(key=total_version_order) 200 versions.reverse() 201 return versions 202 203class package_reader(Context.Context): 204 cmd = 'solver' 205 fun = 'solver' 206 207 def __init__(self, **kw): 208 Context.Context.__init__(self, **kw) 209 210 self.myproject = getattr(Context.g_module, 'APPNAME', 'project') 211 self.myversion = getattr(Context.g_module, 'VERSION', '1.0') 212 self.cache_constraints = {} 213 self.constraints = [] 214 215 def compute_dependencies(self, filename=REQUIRES): 216 text = Utils.readf(filename) 217 data = safe_urlencode([('text', text)]) 218 219 if '--offline' in sys.argv: 220 self.constraints = self.local_resolve(text) 221 else: 222 req = Request(get_resolve_url(), data) 223 try: 224 response = urlopen(req, timeout=TIMEOUT) 225 except URLError as e: 226 Logs.warn('The package server is down! %r', e) 227 self.constraints = self.local_resolve(text) 228 else: 229 ret = response.read() 230 try: 231 ret = ret.decode('utf-8') 232 except Exception: 233 pass 234 self.trace(ret) 235 self.constraints = parse_constraints(ret) 236 self.check_errors() 237 238 def check_errors(self): 239 errors = False 240 for c in self.constraints: 241 if not c.required_version: 242 errors = True 243 244 reasons = c.why() 245 if len(reasons) == 1: 246 Logs.error('%s but no matching package could be found in this repository', reasons[0]) 247 else: 248 Logs.error('Conflicts on package %r:', c.pkgname) 249 for r in reasons: 250 Logs.error(' %s', r) 251 if errors: 252 self.fatal('The package requirements cannot be satisfied!') 253 254 def load_constraints(self, pkgname, pkgver, requires=REQUIRES): 255 try: 256 return self.cache_constraints[(pkgname, pkgver)] 257 except KeyError: 258 text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires)) 259 ret = parse_constraints(text) 260 self.cache_constraints[(pkgname, pkgver)] = ret 261 return ret 262 263 def apply_constraint(self, domain, constraint): 264 vname = constraint.required_version.replace('*', '.*') 265 rev = re.compile(vname, re.M) 266 ret = [x for x in domain if rev.match(x)] 267 return ret 268 269 def trace(self, *k): 270 if getattr(self, 'debug', None): 271 Logs.error(*k) 272 273 def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]): 274 # breadth first search 275 n_packages_to_versions = dict(packages_to_versions) 276 n_packages_to_constraints = dict(packages_to_constraints) 277 278 self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done)) 279 done = done + [pkgname] 280 281 constraints = self.load_constraints(pkgname, pkgver) 282 self.trace("constraints %r" % constraints) 283 284 for k in constraints: 285 try: 286 domain = n_packages_to_versions[k.pkgname] 287 except KeyError: 288 domain = list_package_versions(get_distnet_cache(), k.pkgname) 289 290 291 self.trace("constraints?") 292 if not k.pkgname in done: 293 todo = todo + [k.pkgname] 294 295 self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain)) 296 297 # apply the constraint 298 domain = self.apply_constraint(domain, k) 299 300 self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain)) 301 302 n_packages_to_versions[k.pkgname] = domain 303 304 # then store the constraint applied 305 constraints = list(packages_to_constraints.get(k.pkgname, [])) 306 constraints.append((pkgname, pkgver, k)) 307 n_packages_to_constraints[k.pkgname] = constraints 308 309 if not domain: 310 self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver)) 311 return (n_packages_to_versions, n_packages_to_constraints) 312 313 # next package on the todo list 314 if not todo: 315 return (n_packages_to_versions, n_packages_to_constraints) 316 317 n_pkgname = todo[0] 318 n_pkgver = n_packages_to_versions[n_pkgname][0] 319 tmp = dict(n_packages_to_versions) 320 tmp[n_pkgname] = [n_pkgver] 321 322 self.trace("fixed point %s" % n_pkgname) 323 324 return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done) 325 326 def get_results(self): 327 return '\n'.join([str(c) for c in self.constraints]) 328 329 def solution_to_constraints(self, versions, constraints): 330 solution = [] 331 for p in versions: 332 c = constraint() 333 solution.append(c) 334 335 c.pkgname = p 336 if versions[p]: 337 c.required_version = versions[p][0] 338 else: 339 c.required_version = '' 340 for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''): 341 c.add_reason(c2.human_display(from_pkgname, from_pkgver)) 342 return solution 343 344 def local_resolve(self, text): 345 self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text) 346 p2v = OrderedDict({self.myproject: [self.myversion]}) 347 (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, []) 348 return self.solution_to_constraints(versions, constraints) 349 350 def download_to_file(self, pkgname, pkgver, subdir, tmp): 351 data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)]) 352 req = urlopen(get_download_url(), data, timeout=TIMEOUT) 353 with open(tmp, 'wb') as f: 354 while True: 355 buf = req.read(8192) 356 if not buf: 357 break 358 f.write(buf) 359 360 def extract_tar(self, subdir, pkgdir, tmpfile): 361 with tarfile.open(tmpfile) as f: 362 temp = tempfile.mkdtemp(dir=pkgdir) 363 try: 364 f.extractall(temp) 365 os.rename(temp, os.path.join(pkgdir, subdir)) 366 finally: 367 try: 368 shutil.rmtree(temp) 369 except Exception: 370 pass 371 372 def get_pkg_dir(self, pkgname, pkgver, subdir): 373 pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver) 374 if not os.path.isdir(pkgdir): 375 os.makedirs(pkgdir) 376 377 target = os.path.join(pkgdir, subdir) 378 379 if os.path.exists(target): 380 return target 381 382 (fd, tmp) = tempfile.mkstemp(dir=pkgdir) 383 try: 384 os.close(fd) 385 self.download_to_file(pkgname, pkgver, subdir, tmp) 386 if subdir == REQUIRES: 387 os.rename(tmp, target) 388 else: 389 self.extract_tar(subdir, pkgdir, tmp) 390 finally: 391 try: 392 os.remove(tmp) 393 except OSError: 394 pass 395 396 return target 397 398 def __iter__(self): 399 if not self.constraints: 400 self.compute_dependencies() 401 for x in self.constraints: 402 if x.pkgname == self.myproject: 403 continue 404 yield x 405 406 def execute(self): 407 self.compute_dependencies() 408 409packages = package_reader() 410 411def load_tools(ctx, extra): 412 global packages 413 for c in packages: 414 packages.get_pkg_dir(c.pkgname, c.required_version, extra) 415 noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch') 416 for x in os.listdir(noarchdir): 417 if x.startswith('waf_') and x.endswith('.py'): 418 ctx.load([x.rstrip('.py')], tooldir=[noarchdir]) 419 420def options(opt): 421 opt.add_option('--offline', action='store_true') 422 packages.execute() 423 load_tools(opt, REQUIRES) 424 425def configure(conf): 426 load_tools(conf, conf.variant) 427 428def build(bld): 429 load_tools(bld, bld.variant) 430 431