1import abc 2import argparse 3import importlib 4import json 5import logging 6import multiprocessing 7import os 8import platform 9import signal 10import subprocess 11import sys 12import threading 13import time 14import traceback 15import urllib 16import uuid 17from collections import defaultdict, OrderedDict 18from itertools import chain, product 19from typing import ClassVar, List, Set, Tuple 20 21from localpaths import repo_root # type: ignore 22 23from manifest.sourcefile import read_script_metadata, js_meta_re, parse_variants # type: ignore 24from wptserve import server as wptserve, handlers 25from wptserve import stash 26from wptserve import config 27from wptserve.handlers import filesystem_path, wrap_pipeline 28from wptserve.utils import get_port, HTTPException, http2_compatible 29from mod_pywebsocket import standalone as pywebsocket 30 31 32EDIT_HOSTS_HELP = ("Please ensure all the necessary WPT subdomains " 33 "are mapped to a loopback device in /etc/hosts.\n" 34 "See https://web-platform-tests.org/running-tests/from-local-system.html#system-setup " 35 "for instructions.") 36 37 38def replace_end(s, old, new): 39 """ 40 Given a string `s` that ends with `old`, replace that occurrence of `old` 41 with `new`. 42 """ 43 assert s.endswith(old) 44 return s[:-len(old)] + new 45 46 47def domains_are_distinct(a, b): 48 a_parts = a.split(".") 49 b_parts = b.split(".") 50 min_length = min(len(a_parts), len(b_parts)) 51 slice_index = -1 * min_length 52 53 return a_parts[slice_index:] != b_parts[slice_index:] 54 55 56class WrapperHandler(object): 57 58 __meta__ = abc.ABCMeta 59 60 headers = [] # type: ClassVar[List[Tuple[str, str]]] 61 62 def __init__(self, base_path=None, url_base="/"): 63 self.base_path = base_path 64 self.url_base = url_base 65 self.handler = handlers.handler(self.handle_request) 66 67 def __call__(self, request, response): 68 self.handler(request, response) 69 70 def handle_request(self, request, response): 71 headers = self.headers + handlers.load_headers( 72 request, self._get_filesystem_path(request)) 73 for header_name, header_value in headers: 74 response.headers.set(header_name, header_value) 75 76 self.check_exposure(request) 77 78 path = self._get_path(request.url_parts.path, True) 79 query = request.url_parts.query 80 if query: 81 query = "?" + query 82 meta = "\n".join(self._get_meta(request)) 83 script = "\n".join(self._get_script(request)) 84 response.content = self.wrapper % {"meta": meta, "script": script, "path": path, "query": query} 85 wrap_pipeline(path, request, response) 86 87 def _get_path(self, path, resource_path): 88 """Convert the path from an incoming request into a path corresponding to an "unwrapped" 89 resource e.g. the file on disk that will be loaded in the wrapper. 90 91 :param path: Path from the HTTP request 92 :param resource_path: Boolean used to control whether to get the path for the resource that 93 this wrapper will load or the associated file on disk. 94 Typically these are the same but may differ when there are multiple 95 layers of wrapping e.g. for a .any.worker.html input the underlying disk file is 96 .any.js but the top level html file loads a resource with a 97 .any.worker.js extension, which itself loads the .any.js file. 98 If True return the path to the resource that the wrapper will load, 99 otherwise return the path to the underlying file on disk.""" 100 for item in self.path_replace: 101 if len(item) == 2: 102 src, dest = item 103 else: 104 assert len(item) == 3 105 src = item[0] 106 dest = item[2 if resource_path else 1] 107 if path.endswith(src): 108 path = replace_end(path, src, dest) 109 return path 110 111 def _get_filesystem_path(self, request): 112 """Get the path of the underlying resource file on disk.""" 113 return self._get_path(filesystem_path(self.base_path, request, self.url_base), False) 114 115 def _get_metadata(self, request): 116 """Get an iterator over script metadata based on // META comments in the 117 associated js file. 118 119 :param request: The Request being processed. 120 """ 121 path = self._get_filesystem_path(request) 122 try: 123 with open(path, "rb") as f: 124 for key, value in read_script_metadata(f, js_meta_re): 125 yield key, value 126 except IOError: 127 raise HTTPException(404) 128 129 def _get_meta(self, request): 130 """Get an iterator over strings to inject into the wrapper document 131 based on // META comments in the associated js file. 132 133 :param request: The Request being processed. 134 """ 135 for key, value in self._get_metadata(request): 136 replacement = self._meta_replacement(key, value) 137 if replacement: 138 yield replacement 139 140 def _get_script(self, request): 141 """Get an iterator over strings to inject into the wrapper document 142 based on // META comments in the associated js file. 143 144 :param request: The Request being processed. 145 """ 146 for key, value in self._get_metadata(request): 147 replacement = self._script_replacement(key, value) 148 if replacement: 149 yield replacement 150 151 @abc.abstractproperty 152 def path_replace(self): 153 # A list containing a mix of 2 item tuples with (input suffix, output suffix) 154 # and 3-item tuples with (input suffix, filesystem suffix, resource suffix) 155 # for the case where we want a different path in the generated resource to 156 # the actual path on the filesystem (e.g. when there is another handler 157 # that will wrap the file). 158 return None 159 160 @abc.abstractproperty 161 def wrapper(self): 162 # String template with variables path and meta for wrapper document 163 return None 164 165 @abc.abstractmethod 166 def _meta_replacement(self, key, value): 167 # Get the string to insert into the wrapper document, given 168 # a specific metadata key: value pair. 169 pass 170 171 @abc.abstractmethod 172 def check_exposure(self, request): 173 # Raise an exception if this handler shouldn't be exposed after all. 174 pass 175 176 177class HtmlWrapperHandler(WrapperHandler): 178 global_type = None # type: ClassVar[str] 179 headers = [('Content-Type', 'text/html')] 180 181 def check_exposure(self, request): 182 if self.global_type: 183 globals = u"" 184 for (key, value) in self._get_metadata(request): 185 if key == "global": 186 globals = value 187 break 188 189 if self.global_type not in parse_variants(globals): 190 raise HTTPException(404, "This test cannot be loaded in %s mode" % 191 self.global_type) 192 193 def _meta_replacement(self, key, value): 194 if key == "timeout": 195 if value == "long": 196 return '<meta name="timeout" content="long">' 197 if key == "title": 198 value = value.replace("&", "&").replace("<", "<") 199 return '<title>%s</title>' % value 200 return None 201 202 def _script_replacement(self, key, value): 203 if key == "script": 204 attribute = value.replace("&", "&").replace('"', """) 205 return '<script src="%s"></script>' % attribute 206 return None 207 208 209class WorkersHandler(HtmlWrapperHandler): 210 global_type = "dedicatedworker" 211 path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"), 212 (".worker.html", ".worker.js")] 213 wrapper = """<!doctype html> 214<meta charset=utf-8> 215%(meta)s 216<script src="/resources/testharness.js"></script> 217<script src="/resources/testharnessreport.js"></script> 218<div id=log></div> 219<script> 220fetch_tests_from_worker(new Worker("%(path)s%(query)s")); 221</script> 222""" 223 224 225class WorkerModulesHandler(HtmlWrapperHandler): 226 global_type = "dedicatedworker-module" 227 path_replace = [(".any.worker-module.html", ".any.js", ".any.worker-module.js"), 228 (".worker.html", ".worker.js")] 229 wrapper = """<!doctype html> 230<meta charset=utf-8> 231%(meta)s 232<script src="/resources/testharness.js"></script> 233<script src="/resources/testharnessreport.js"></script> 234<div id=log></div> 235<script> 236fetch_tests_from_worker(new Worker("%(path)s%(query)s", { type: "module" })); 237</script> 238""" 239 240 241class WindowHandler(HtmlWrapperHandler): 242 path_replace = [(".window.html", ".window.js")] 243 wrapper = """<!doctype html> 244<meta charset=utf-8> 245%(meta)s 246<script src="/resources/testharness.js"></script> 247<script src="/resources/testharnessreport.js"></script> 248%(script)s 249<div id=log></div> 250<script src="%(path)s"></script> 251""" 252 253 254class AnyHtmlHandler(HtmlWrapperHandler): 255 global_type = "window" 256 path_replace = [(".any.html", ".any.js")] 257 wrapper = """<!doctype html> 258<meta charset=utf-8> 259%(meta)s 260<script> 261self.GLOBAL = { 262 isWindow: function() { return true; }, 263 isWorker: function() { return false; }, 264}; 265</script> 266<script src="/resources/testharness.js"></script> 267<script src="/resources/testharnessreport.js"></script> 268%(script)s 269<div id=log></div> 270<script src="%(path)s"></script> 271""" 272 273 274class SharedWorkersHandler(HtmlWrapperHandler): 275 global_type = "sharedworker" 276 path_replace = [(".any.sharedworker.html", ".any.js", ".any.worker.js")] 277 wrapper = """<!doctype html> 278<meta charset=utf-8> 279%(meta)s 280<script src="/resources/testharness.js"></script> 281<script src="/resources/testharnessreport.js"></script> 282<div id=log></div> 283<script> 284fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s")); 285</script> 286""" 287 288 289class SharedWorkerModulesHandler(HtmlWrapperHandler): 290 global_type = "sharedworker-module" 291 path_replace = [(".any.sharedworker-module.html", ".any.js", ".any.worker-module.js")] 292 wrapper = """<!doctype html> 293<meta charset=utf-8> 294%(meta)s 295<script src="/resources/testharness.js"></script> 296<script src="/resources/testharnessreport.js"></script> 297<div id=log></div> 298<script> 299fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s", { type: "module" })); 300</script> 301""" 302 303 304class ServiceWorkersHandler(HtmlWrapperHandler): 305 global_type = "serviceworker" 306 path_replace = [(".any.serviceworker.html", ".any.js", ".any.worker.js")] 307 wrapper = """<!doctype html> 308<meta charset=utf-8> 309%(meta)s 310<script src="/resources/testharness.js"></script> 311<script src="/resources/testharnessreport.js"></script> 312<div id=log></div> 313<script> 314(async function() { 315 const scope = 'does/not/exist'; 316 let reg = await navigator.serviceWorker.getRegistration(scope); 317 if (reg) await reg.unregister(); 318 reg = await navigator.serviceWorker.register("%(path)s%(query)s", {scope}); 319 fetch_tests_from_worker(reg.installing); 320})(); 321</script> 322""" 323 324 325class ServiceWorkerModulesHandler(HtmlWrapperHandler): 326 global_type = "serviceworker-module" 327 path_replace = [(".any.serviceworker-module.html", 328 ".any.js", ".any.worker-module.js")] 329 wrapper = """<!doctype html> 330<meta charset=utf-8> 331%(meta)s 332<script src="/resources/testharness.js"></script> 333<script src="/resources/testharnessreport.js"></script> 334<div id=log></div> 335<script> 336(async function() { 337 const scope = 'does/not/exist'; 338 let reg = await navigator.serviceWorker.getRegistration(scope); 339 if (reg) await reg.unregister(); 340 reg = await navigator.serviceWorker.register( 341 "%(path)s%(query)s", 342 { scope, type: 'module' }, 343 ); 344 fetch_tests_from_worker(reg.installing); 345})(); 346</script> 347""" 348 349 350class BaseWorkerHandler(WrapperHandler): 351 headers = [('Content-Type', 'text/javascript')] 352 353 def _meta_replacement(self, key, value): 354 return None 355 356 @abc.abstractmethod 357 def _create_script_import(self, attribute): 358 # Take attribute (a string URL to a JS script) and return JS source to import the script 359 # into the worker. 360 pass 361 362 def _script_replacement(self, key, value): 363 if key == "script": 364 attribute = value.replace("\\", "\\\\").replace('"', '\\"') 365 return self._create_script_import(attribute) 366 if key == "title": 367 value = value.replace("\\", "\\\\").replace('"', '\\"') 368 return 'self.META_TITLE = "%s";' % value 369 return None 370 371 372class ClassicWorkerHandler(BaseWorkerHandler): 373 path_replace = [(".any.worker.js", ".any.js")] 374 wrapper = """%(meta)s 375self.GLOBAL = { 376 isWindow: function() { return false; }, 377 isWorker: function() { return true; }, 378}; 379importScripts("/resources/testharness.js"); 380%(script)s 381importScripts("%(path)s"); 382done(); 383""" 384 385 def _create_script_import(self, attribute): 386 return 'importScripts("%s")' % attribute 387 388 389class ModuleWorkerHandler(BaseWorkerHandler): 390 path_replace = [(".any.worker-module.js", ".any.js")] 391 wrapper = """%(meta)s 392self.GLOBAL = { 393 isWindow: function() { return false; }, 394 isWorker: function() { return true; }, 395}; 396import "/resources/testharness.js"; 397%(script)s 398import "%(path)s"; 399done(); 400""" 401 402 def _create_script_import(self, attribute): 403 return 'import "%s";' % attribute 404 405 406rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")] 407 408 409class RoutesBuilder(object): 410 def __init__(self): 411 self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler), 412 ("POST", "/tools/runner/update_manifest.py", 413 handlers.python_script_handler)] 414 415 self.forbidden = [("*", "/_certs/*", handlers.ErrorHandler(404)), 416 ("*", "/tools/*", handlers.ErrorHandler(404)), 417 ("*", "{spec}/tools/*", handlers.ErrorHandler(404)), 418 ("*", "/results/", handlers.ErrorHandler(404))] 419 420 self.extra = [] 421 422 self.mountpoint_routes = OrderedDict() 423 424 self.add_mount_point("/", None) 425 426 def get_routes(self): 427 routes = self.forbidden_override + self.forbidden + self.extra 428 # Using reversed here means that mount points that are added later 429 # get higher priority. This makes sense since / is typically added 430 # first. 431 for item in reversed(self.mountpoint_routes.values()): 432 routes.extend(item) 433 return routes 434 435 def add_handler(self, method, route, handler): 436 self.extra.append((str(method), str(route), handler)) 437 438 def add_static(self, path, format_args, content_type, route, headers=None): 439 if headers is None: 440 headers = {} 441 handler = handlers.StaticHandler(path, format_args, content_type, **headers) 442 self.add_handler("GET", str(route), handler) 443 444 def add_mount_point(self, url_base, path): 445 url_base = "/%s/" % url_base.strip("/") if url_base != "/" else "/" 446 447 self.mountpoint_routes[url_base] = [] 448 449 routes = [ 450 ("GET", "*.worker.html", WorkersHandler), 451 ("GET", "*.worker-module.html", WorkerModulesHandler), 452 ("GET", "*.window.html", WindowHandler), 453 ("GET", "*.any.html", AnyHtmlHandler), 454 ("GET", "*.any.sharedworker.html", SharedWorkersHandler), 455 ("GET", "*.any.sharedworker-module.html", SharedWorkerModulesHandler), 456 ("GET", "*.any.serviceworker.html", ServiceWorkersHandler), 457 ("GET", "*.any.serviceworker-module.html", ServiceWorkerModulesHandler), 458 ("GET", "*.any.worker.js", ClassicWorkerHandler), 459 ("GET", "*.any.worker-module.js", ModuleWorkerHandler), 460 ("GET", "*.asis", handlers.AsIsHandler), 461 ("GET", "/.well-known/origin-policy", handlers.PythonScriptHandler), 462 ("*", "*.py", handlers.PythonScriptHandler), 463 ("GET", "*", handlers.FileHandler) 464 ] 465 466 for (method, suffix, handler_cls) in routes: 467 self.mountpoint_routes[url_base].append( 468 (method, 469 "%s%s" % (url_base if url_base != "/" else "", suffix), 470 handler_cls(base_path=path, url_base=url_base))) 471 472 def add_file_mount_point(self, file_url, base_path): 473 assert file_url.startswith("/") 474 url_base = file_url[0:file_url.rfind("/") + 1] 475 self.mountpoint_routes[file_url] = [("GET", file_url, handlers.FileHandler(base_path=base_path, url_base=url_base))] 476 477 478def get_route_builder(logger, aliases, config): 479 builder = RoutesBuilder() 480 for alias in aliases: 481 url = alias["url-path"] 482 directory = alias["local-dir"] 483 if not url.startswith("/") or len(directory) == 0: 484 logger.error("\"url-path\" value must start with '/'.") 485 continue 486 if url.endswith("/"): 487 builder.add_mount_point(url, directory) 488 else: 489 builder.add_file_mount_point(url, directory) 490 return builder 491 492 493class ServerProc(object): 494 def __init__(self, mp_context, scheme=None): 495 self.proc = None 496 self.daemon = None 497 self.mp_context = mp_context 498 self.stop_flag = mp_context.Event() 499 self.scheme = scheme 500 501 def start(self, init_func, host, port, paths, routes, bind_address, config, log_handlers, **kwargs): 502 self.proc = self.mp_context.Process(target=self.create_daemon, 503 args=(init_func, host, port, paths, routes, bind_address, 504 config, log_handlers), 505 name='%s on port %s' % (self.scheme, port), 506 kwargs=kwargs) 507 self.proc.daemon = True 508 self.proc.start() 509 510 def create_daemon(self, init_func, host, port, paths, routes, bind_address, 511 config, log_handlers, **kwargs): 512 # Ensure that when we start this in a new process we have the global lock 513 # in the logging module unlocked 514 importlib.reload(logging) 515 516 logger = get_logger(config.log_level, log_handlers) 517 518 if sys.platform == "darwin": 519 # on Darwin, NOFILE starts with a very low limit (256), so bump it up a little 520 # by way of comparison, Debian starts with a limit of 1024, Windows 512 521 import resource # local, as it only exists on Unix-like systems 522 maxfilesperproc = int(subprocess.check_output( 523 ["sysctl", "-n", "kern.maxfilesperproc"] 524 ).strip()) 525 soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) 526 # 2048 is somewhat arbitrary, but gives us some headroom for wptrunner --parallel 527 # note that it's expected that 2048 will be the min here 528 new_soft = min(2048, maxfilesperproc, hard) 529 if soft < new_soft: 530 resource.setrlimit(resource.RLIMIT_NOFILE, (new_soft, hard)) 531 try: 532 self.daemon = init_func(logger, host, port, paths, routes, bind_address, config, **kwargs) 533 except OSError: 534 logger.critical("Socket error on port %s" % port, file=sys.stderr) 535 raise 536 except Exception: 537 logger.critical(traceback.format_exc()) 538 raise 539 540 if self.daemon: 541 try: 542 self.daemon.start() 543 try: 544 self.stop_flag.wait() 545 except KeyboardInterrupt: 546 pass 547 finally: 548 self.daemon.stop() 549 except Exception: 550 logger.critical(traceback.format_exc()) 551 raise 552 553 def stop(self, timeout=None): 554 self.stop_flag.set() 555 self.proc.join(timeout) 556 557 def is_alive(self): 558 return self.proc.is_alive() 559 560 561def check_subdomains(logger, config, routes, mp_context, log_handlers): 562 paths = config.paths 563 bind_address = config.bind_address 564 565 host = config.server_host 566 port = get_port() 567 logger.debug("Going to use port %d to check subdomains" % port) 568 569 wrapper = ServerProc(mp_context) 570 wrapper.start(start_http_server, host, port, paths, routes, 571 bind_address, config, log_handlers) 572 573 url = "http://{}:{}/".format(host, port) 574 connected = False 575 for i in range(10): 576 try: 577 urllib.request.urlopen(url) 578 connected = True 579 break 580 except urllib.error.URLError: 581 time.sleep(1) 582 583 if not connected: 584 logger.critical("Failed to connect to test server " 585 "on {}. {}".format(url, EDIT_HOSTS_HELP)) 586 sys.exit(1) 587 588 for domain in config.domains_set: 589 if domain == host: 590 continue 591 592 try: 593 urllib.request.urlopen("http://%s:%d/" % (domain, port)) 594 except Exception: 595 logger.critical("Failed probing domain {}. {}".format(domain, EDIT_HOSTS_HELP)) 596 sys.exit(1) 597 598 wrapper.stop() 599 600 601def make_hosts_file(config, host): 602 rv = [] 603 604 for domain in config.domains_set: 605 rv.append("%s\t%s\n" % (host, domain)) 606 607 # Windows interpets the IP address 0.0.0.0 as non-existent, making it an 608 # appropriate alias for non-existent hosts. However, UNIX-like systems 609 # interpret the same address to mean any IP address, which is inappropraite 610 # for this context. These systems do not reserve any value for this 611 # purpose, so the inavailability of the domains must be taken for granted. 612 # 613 # https://github.com/web-platform-tests/wpt/issues/10560 614 if platform.uname()[0] == "Windows": 615 for not_domain in config.not_domains_set: 616 rv.append("0.0.0.0\t%s\n" % not_domain) 617 618 return "".join(rv) 619 620 621def start_servers(logger, host, ports, paths, routes, bind_address, config, 622 mp_context, log_handlers, **kwargs): 623 servers = defaultdict(list) 624 for scheme, ports in ports.items(): 625 assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1) 626 627 # If trying to start HTTP/2.0 server, check compatibility 628 if scheme == 'h2' and not http2_compatible(): 629 logger.error('Cannot start HTTP/2.0 server as the environment is not compatible. ' + 630 'Requires OpenSSL 1.0.2+') 631 continue 632 633 for port in ports: 634 if port is None: 635 continue 636 637 init_func = { 638 "http": start_http_server, 639 "http-private": start_http_server, 640 "http-public": start_http_server, 641 "https": start_https_server, 642 "https-private": start_https_server, 643 "https-public": start_https_server, 644 "h2": start_http2_server, 645 "ws": start_ws_server, 646 "wss": start_wss_server, 647 "quic-transport": start_quic_transport_server, 648 }[scheme] 649 650 server_proc = ServerProc(mp_context, scheme=scheme) 651 server_proc.start(init_func, host, port, paths, routes, bind_address, 652 config, log_handlers, **kwargs) 653 servers[scheme].append((port, server_proc)) 654 655 return servers 656 657 658def startup_failed(logger): 659 logger.critical(EDIT_HOSTS_HELP) 660 sys.exit(1) 661 662 663def start_http_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 664 try: 665 return wptserve.WebTestHttpd(host=host, 666 port=port, 667 doc_root=paths["doc_root"], 668 routes=routes, 669 rewrites=rewrites, 670 bind_address=bind_address, 671 config=config, 672 use_ssl=False, 673 key_file=None, 674 certificate=None, 675 latency=kwargs.get("latency")) 676 except Exception: 677 startup_failed(logger) 678 679 680def start_https_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 681 try: 682 return wptserve.WebTestHttpd(host=host, 683 port=port, 684 doc_root=paths["doc_root"], 685 routes=routes, 686 rewrites=rewrites, 687 bind_address=bind_address, 688 config=config, 689 use_ssl=True, 690 key_file=config.ssl_config["key_path"], 691 certificate=config.ssl_config["cert_path"], 692 encrypt_after_connect=config.ssl_config["encrypt_after_connect"], 693 latency=kwargs.get("latency")) 694 except Exception: 695 startup_failed(logger) 696 697 698def start_http2_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 699 try: 700 return wptserve.WebTestHttpd(host=host, 701 port=port, 702 handler_cls=wptserve.Http2WebTestRequestHandler, 703 doc_root=paths["doc_root"], 704 ws_doc_root=paths["ws_doc_root"], 705 routes=routes, 706 rewrites=rewrites, 707 bind_address=bind_address, 708 config=config, 709 use_ssl=True, 710 key_file=config.ssl_config["key_path"], 711 certificate=config.ssl_config["cert_path"], 712 encrypt_after_connect=config.ssl_config["encrypt_after_connect"], 713 latency=kwargs.get("latency"), 714 http2=True) 715 except Exception: 716 startup_failed(logger) 717 718 719class WebSocketDaemon(object): 720 def __init__(self, host, port, doc_root, handlers_root, bind_address, ssl_config): 721 logger = logging.getLogger() 722 self.host = host 723 cmd_args = ["-p", port, 724 "-d", doc_root, 725 "-w", handlers_root] 726 727 if ssl_config is not None: 728 cmd_args += ["--tls", 729 "--private-key", ssl_config["key_path"], 730 "--certificate", ssl_config["cert_path"]] 731 732 if (bind_address): 733 cmd_args = ["-H", host] + cmd_args 734 opts, args = pywebsocket._parse_args_and_config(cmd_args) 735 opts.cgi_directories = [] 736 opts.is_executable_method = None 737 self.server = pywebsocket.WebSocketServer(opts) 738 ports = [item[0].getsockname()[1] for item in self.server._sockets] 739 if not ports: 740 # TODO: Fix the logging configuration in WebSockets processes 741 # see https://github.com/web-platform-tests/wpt/issues/22719 742 logger.critical("Failed to start websocket server on port %s, " 743 "is something already using that port?" % port, file=sys.stderr) 744 raise OSError() 745 assert all(item == ports[0] for item in ports) 746 self.port = ports[0] 747 self.started = False 748 self.server_thread = None 749 750 def start(self): 751 self.started = True 752 self.server_thread = threading.Thread(target=self.server.serve_forever) 753 self.server_thread.setDaemon(True) # don't hang on exit 754 self.server_thread.start() 755 756 def stop(self): 757 """ 758 Stops the server. 759 760 If the server is not running, this method has no effect. 761 """ 762 if self.started: 763 try: 764 self.server.shutdown() 765 self.server.server_close() 766 self.server_thread.join() 767 self.server_thread = None 768 except AttributeError: 769 pass 770 self.started = False 771 self.server = None 772 773 774def start_ws_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 775 try: 776 return WebSocketDaemon(host, 777 str(port), 778 repo_root, 779 config.paths["ws_doc_root"], 780 bind_address, 781 ssl_config=None) 782 except Exception: 783 startup_failed(logger) 784 785 786def start_wss_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 787 try: 788 return WebSocketDaemon(host, 789 str(port), 790 repo_root, 791 config.paths["ws_doc_root"], 792 bind_address, 793 config.ssl_config) 794 except Exception: 795 startup_failed(logger) 796 797 798class QuicTransportDaemon(object): 799 def __init__(self, host, port, handlers_path=None, private_key=None, certificate=None, log_level=None): 800 args = ["python3", "wpt", "serve-quic-transport"] 801 if host: 802 args += ["--host", host] 803 if port: 804 args += ["--port", str(port)] 805 if private_key: 806 args += ["--private-key", private_key] 807 if certificate: 808 args += ["--certificate", certificate] 809 if handlers_path: 810 args += ["--handlers-path", handlers_path] 811 if log_level == "debug": 812 args += ["--verbose"] 813 self.command = args 814 self.proc = None 815 816 def start(self): 817 def handle_signal(*_): 818 if self.proc: 819 try: 820 self.proc.terminate() 821 except OSError: 822 # It's fine if the child already exits. 823 pass 824 self.proc.wait() 825 sys.exit(0) 826 827 signal.signal(signal.SIGTERM, handle_signal) 828 signal.signal(signal.SIGINT, handle_signal) 829 830 self.proc = subprocess.Popen(self.command) 831 # Give the server a second to start and then check. 832 time.sleep(1) 833 if self.proc.poll(): 834 sys.exit(1) 835 836 837def start_quic_transport_server(logger, host, port, paths, routes, bind_address, config, **kwargs): 838 try: 839 return QuicTransportDaemon(host, 840 port, 841 private_key=config.ssl_config["key_path"], 842 certificate=config.ssl_config["cert_path"], 843 log_level=config.log_level) 844 except Exception: 845 startup_failed(logger) 846 847 848def start(logger, config, routes, mp_context, log_handlers, **kwargs): 849 host = config["server_host"] 850 ports = config.ports 851 paths = config.paths 852 bind_address = config["bind_address"] 853 854 logger.debug("Using ports: %r" % ports) 855 856 servers = start_servers(logger, host, ports, paths, routes, bind_address, config, mp_context, 857 log_handlers, **kwargs) 858 859 return servers 860 861 862def iter_servers(servers): 863 for servers in servers.values(): 864 for port, server in servers: 865 yield server 866 867 868def _make_subdomains_product(s: Set[str], depth: int = 2) -> Set[str]: 869 return {u".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))} 870 871 872def _make_origin_policy_subdomains(limit: int) -> Set[str]: 873 return {u"op%d" % x for x in range(1,limit+1)} 874 875 876_subdomains = {u"www", 877 u"www1", 878 u"www2", 879 u"天気の良い日", 880 u"élève"} 881 882_not_subdomains = {u"nonexistent"} 883 884_subdomains = _make_subdomains_product(_subdomains) 885 886# Origin policy subdomains need to not be reused by any other tests, since origin policies have 887# origin-wide impacts like installing a CSP or Feature Policy that could interfere with features 888# under test. 889# See https://github.com/web-platform-tests/rfcs/pull/44. 890_subdomains |= _make_origin_policy_subdomains(99) 891 892_not_subdomains = _make_subdomains_product(_not_subdomains) 893 894 895class ConfigBuilder(config.ConfigBuilder): 896 """serve config 897 898 This subclasses wptserve.config.ConfigBuilder to add serve config options. 899 """ 900 901 _default = { 902 "browser_host": "web-platform.test", 903 "alternate_hosts": { 904 "alt": "not-web-platform.test" 905 }, 906 "doc_root": repo_root, 907 "ws_doc_root": os.path.join(repo_root, "websockets", "handlers"), 908 "server_host": None, 909 "ports": { 910 "http": [8000, "auto"], 911 "http-private": ["auto"], 912 "http-public": ["auto"], 913 "https": [8443, 8444], 914 "https-private": ["auto"], 915 "https-public": ["auto"], 916 "ws": ["auto"], 917 "wss": ["auto"], 918 }, 919 "check_subdomains": True, 920 "log_level": "info", 921 "bind_address": True, 922 "ssl": { 923 "type": "pregenerated", 924 "encrypt_after_connect": False, 925 "openssl": { 926 "openssl_binary": "openssl", 927 "base_path": "_certs", 928 "password": "web-platform-tests", 929 "force_regenerate": False, 930 "duration": 30, 931 "base_conf_path": None 932 }, 933 "pregenerated": { 934 "host_key_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.key"), 935 "host_cert_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.pem") 936 }, 937 "none": {} 938 }, 939 "aliases": [] 940 } 941 942 computed_properties = ["ws_doc_root"] + config.ConfigBuilder.computed_properties 943 944 def __init__(self, logger, *args, **kwargs): 945 if "subdomains" not in kwargs: 946 kwargs["subdomains"] = _subdomains 947 if "not_subdomains" not in kwargs: 948 kwargs["not_subdomains"] = _not_subdomains 949 super(ConfigBuilder, self).__init__( 950 logger, 951 *args, 952 **kwargs 953 ) 954 with self as c: 955 browser_host = c.get("browser_host") 956 alternate_host = c.get("alternate_hosts", {}).get("alt") 957 958 if not domains_are_distinct(browser_host, alternate_host): 959 raise ValueError( 960 "Alternate host must be distinct from browser host" 961 ) 962 963 def _get_ws_doc_root(self, data): 964 if data["ws_doc_root"] is not None: 965 return data["ws_doc_root"] 966 else: 967 return os.path.join(data["doc_root"], "websockets", "handlers") 968 969 def _get_paths(self, data): 970 rv = super(ConfigBuilder, self)._get_paths(data) 971 rv["ws_doc_root"] = data["ws_doc_root"] 972 return rv 973 974 975def build_config(logger, override_path=None, config_cls=ConfigBuilder, **kwargs): 976 rv = config_cls(logger) 977 978 enable_http2 = kwargs.get("h2") 979 if enable_http2 is None: 980 enable_http2 = True 981 if enable_http2: 982 rv._default["ports"]["h2"] = [9000] 983 984 if kwargs.get("quic_transport"): 985 rv._default["ports"]["quic-transport"] = [10000] 986 987 if override_path and os.path.exists(override_path): 988 with open(override_path) as f: 989 override_obj = json.load(f) 990 rv.update(override_obj) 991 992 if kwargs.get("config_path"): 993 other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path"))) 994 if os.path.exists(other_path): 995 with open(other_path) as f: 996 override_obj = json.load(f) 997 rv.update(override_obj) 998 else: 999 raise ValueError("Config path %s does not exist" % other_path) 1000 1001 if kwargs.get("verbose"): 1002 rv.log_level = "debug" 1003 1004 overriding_path_args = [("doc_root", "Document root"), 1005 ("ws_doc_root", "WebSockets document root")] 1006 for key, title in overriding_path_args: 1007 value = kwargs.get(key) 1008 if value is None: 1009 continue 1010 value = os.path.abspath(os.path.expanduser(value)) 1011 if not os.path.exists(value): 1012 raise ValueError("%s path %s does not exist" % (title, value)) 1013 setattr(rv, key, value) 1014 1015 return rv 1016 1017 1018def get_parser(): 1019 parser = argparse.ArgumentParser() 1020 parser.add_argument("--latency", type=int, 1021 help="Artificial latency to add before sending http responses, in ms") 1022 parser.add_argument("--config", action="store", dest="config_path", 1023 help="Path to external config file") 1024 parser.add_argument("--doc_root", action="store", dest="doc_root", 1025 help="Path to document root. Overrides config.") 1026 parser.add_argument("--ws_doc_root", action="store", dest="ws_doc_root", 1027 help="Path to WebSockets document root. Overrides config.") 1028 parser.add_argument("--alias_file", action="store", dest="alias_file", 1029 help="File with entries for aliases/multiple doc roots. In form of `/ALIAS_NAME/, DOC_ROOT\\n`") 1030 parser.add_argument("--h2", action="store_true", dest="h2", default=None, 1031 help=argparse.SUPPRESS) 1032 parser.add_argument("--no-h2", action="store_false", dest="h2", default=None, 1033 help="Disable the HTTP/2.0 server") 1034 parser.add_argument("--quic-transport", action="store_true", help="Enable QUIC server for WebTransport") 1035 parser.add_argument("--exit-after-start", action="store_true", help="Exit after starting servers") 1036 parser.add_argument("--verbose", action="store_true", help="Enable verbose logging") 1037 parser.set_defaults(report=False) 1038 parser.set_defaults(is_wave=False) 1039 return parser 1040 1041 1042class MpContext(object): 1043 def __getattr__(self, name): 1044 return getattr(multiprocessing, name) 1045 1046 1047def get_logger(log_level, log_handlers): 1048 """Get a logger configured to log at level log_level 1049 1050 If the logger has existing handlers the log_handlers argument is ignored. 1051 Otherwise the handlers in log_handlers are added to the logger. If there are 1052 no log_handlers passed and no configured handlers, a stream handler is added 1053 to the logger. 1054 1055 Typically this is called once per process to set up logging in that process. 1056 1057 :param log_level: - A string representing a log level e.g. "info" 1058 :param log_handlers: - Optional list of Handler objects. 1059 """ 1060 logger = logging.getLogger() 1061 logger.setLevel(getattr(logging, log_level.upper())) 1062 if not logger.hasHandlers(): 1063 if log_handlers is not None: 1064 for handler in log_handlers: 1065 logger.addHandler(handler) 1066 else: 1067 handler = logging.StreamHandler(sys.stdout) 1068 formatter = logging.Formatter("[%(asctime)s %(processName)s] %(levelname)s - %(message)s") 1069 handler.setFormatter(formatter) 1070 logger.addHandler(handler) 1071 return logger 1072 1073 1074def run(config_cls=ConfigBuilder, route_builder=None, mp_context=None, log_handlers=None, 1075 **kwargs): 1076 logger = get_logger("INFO", log_handlers) 1077 1078 if mp_context is None: 1079 if hasattr(multiprocessing, "get_context"): 1080 mp_context = multiprocessing.get_context() 1081 else: 1082 mp_context = MpContext() 1083 1084 with build_config(logger, 1085 os.path.join(repo_root, "config.json"), 1086 config_cls=config_cls, 1087 **kwargs) as config: 1088 # This sets the right log level 1089 logger = get_logger(config.log_level, log_handlers) 1090 1091 bind_address = config["bind_address"] 1092 1093 if kwargs.get("alias_file"): 1094 with open(kwargs["alias_file"], 'r') as alias_file: 1095 for line in alias_file: 1096 alias, doc_root = [x.strip() for x in line.split(',')] 1097 config["aliases"].append({ 1098 'url-path': alias, 1099 'local-dir': doc_root, 1100 }) 1101 1102 if route_builder is None: 1103 route_builder = get_route_builder 1104 routes = route_builder(logger, config.aliases, config).get_routes() 1105 1106 if config["check_subdomains"]: 1107 check_subdomains(logger, config, routes, mp_context, log_handlers) 1108 1109 stash_address = None 1110 if bind_address: 1111 stash_address = (config.server_host, get_port("")) 1112 logger.debug("Going to use port %d for stash" % stash_address[1]) 1113 1114 with stash.StashServer(stash_address, authkey=str(uuid.uuid4())): 1115 servers = start(logger, config, routes, mp_context, log_handlers, **kwargs) 1116 1117 if not kwargs["exit_after_start"]: 1118 try: 1119 # Periodically check if all the servers are alive 1120 server_process_exited = False 1121 while not server_process_exited: 1122 for server in iter_servers(servers): 1123 server.proc.join(1) 1124 if not server.proc.is_alive(): 1125 server_process_exited = True 1126 break 1127 except KeyboardInterrupt: 1128 pass 1129 1130 failed_subproc = 0 1131 for server in iter_servers(servers): 1132 subproc = server.proc 1133 if subproc.is_alive(): 1134 logger.info('Status of subprocess "%s": running', subproc.name) 1135 server.stop(timeout=1) 1136 1137 if server.proc.exitcode == 0: 1138 logger.info('Status of subprocess "%s": exited correctly', subproc.name) 1139 else: 1140 logger.warning('Status of subprocess "%s": failed. Exit with non-zero status: %d', 1141 subproc.name, subproc.exitcode) 1142 failed_subproc += 1 1143 return failed_subproc 1144 1145 1146def main(): 1147 kwargs = vars(get_parser().parse_args()) 1148 return run(**kwargs) 1149