1/* -*- indent-tabs-mode: nil; js-indent-level: 4 -*- / 2/* vim: set shiftwidth=4 tabstop=8 autoindent cindent expandtab: */ 3/* This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6"use strict"; 7 8var EXPORTED_SYMBOLS = [ 9 "OnRefTestLoad", 10 "OnRefTestUnload", 11 "getTestPlugin" 12]; 13 14Cu.import("resource://gre/modules/FileUtils.jsm"); 15Cu.import("chrome://reftest/content/globals.jsm", this); 16Cu.import("chrome://reftest/content/httpd.jsm", this); 17Cu.import("chrome://reftest/content/manifest.jsm", this); 18Cu.import("chrome://reftest/content/StructuredLog.jsm", this); 19Cu.import("resource://gre/modules/Services.jsm"); 20Cu.import("resource://gre/modules/NetUtil.jsm"); 21Cu.import('resource://gre/modules/XPCOMUtils.jsm'); 22 23XPCOMUtils.defineLazyGetter(this, "OS", function() { 24 const { OS } = Cu.import("resource://gre/modules/osfile.jsm"); 25 return OS; 26}); 27 28XPCOMUtils.defineLazyGetter(this, "PDFJS", function() { 29 const { require } = Cu.import("resource://gre/modules/commonjs/toolkit/require.js", {}); 30 return { 31 main: require('resource://pdf.js/build/pdf.js'), 32 worker: require('resource://pdf.js/build/pdf.worker.js') 33 }; 34}); 35 36function HasUnexpectedResult() 37{ 38 return g.testResults.Exception > 0 || 39 g.testResults.FailedLoad > 0 || 40 g.testResults.UnexpectedFail > 0 || 41 g.testResults.UnexpectedPass > 0 || 42 g.testResults.AssertionUnexpected > 0 || 43 g.testResults.AssertionUnexpectedFixed > 0; 44} 45 46// By default we just log to stdout 47var gDumpFn = function(line) { 48 dump(line); 49 if (g.logFile) { 50 g.logFile.write(line, line.length); 51 } 52} 53var gDumpRawLog = function(record) { 54 // Dump JSON representation of data on a single line 55 var line = "\n" + JSON.stringify(record) + "\n"; 56 dump(line); 57 58 if (g.logFile) { 59 g.logFile.write(line, line.length); 60 } 61} 62g.logger = new StructuredLogger('reftest', gDumpRawLog); 63var logger = g.logger; 64 65function TestBuffer(str) 66{ 67 logger.debug(str); 68 g.testLog.push(str); 69} 70 71function FlushTestBuffer() 72{ 73 // In debug mode, we've dumped all these messages already. 74 if (g.logLevel !== 'debug') { 75 for (var i = 0; i < g.testLog.length; ++i) { 76 logger.info("Saved log: " + g.testLog[i]); 77 } 78 } 79 g.testLog = []; 80} 81 82function LogWidgetLayersFailure() 83{ 84 logger.error( 85 "Screen resolution is too low - USE_WIDGET_LAYERS was disabled. " + 86 (g.browserIsRemote ? 87 "Since E10s is enabled, there is no fallback rendering path!" : 88 "The fallback rendering path is not reliably consistent with on-screen rendering.")); 89 90 logger.error( 91 "If you cannot increase your screen resolution you can try reducing " + 92 "gecko's pixel scaling by adding something like '--setpref " + 93 "layout.css.devPixelsPerPx=1.0' to your './mach reftest' command " + 94 "(possibly as an alias in ~/.mozbuild/machrc). Note that this is " + 95 "inconsistent with CI testing, and may interfere with HighDPI/" + 96 "reftest-zoom tests."); 97} 98 99function AllocateCanvas() 100{ 101 if (g.recycledCanvases.length > 0) { 102 return g.recycledCanvases.shift(); 103 } 104 105 var canvas = g.containingWindow.document.createElementNS(XHTML_NS, "canvas"); 106 var r = g.browser.getBoundingClientRect(); 107 canvas.setAttribute("width", Math.ceil(r.width)); 108 canvas.setAttribute("height", Math.ceil(r.height)); 109 110 return canvas; 111} 112 113function ReleaseCanvas(canvas) 114{ 115 // store a maximum of 2 canvases, if we're not caching 116 if (!g.noCanvasCache || g.recycledCanvases.length < 2) { 117 g.recycledCanvases.push(canvas); 118 } 119} 120 121function IDForEventTarget(event) 122{ 123 try { 124 return "'" + event.target.getAttribute('id') + "'"; 125 } catch (ex) { 126 return "<unknown>"; 127 } 128} 129 130function getTestPlugin(aName) { 131 var ph = Cc["@mozilla.org/plugin/host;1"].getService(Ci.nsIPluginHost); 132 var tags = ph.getPluginTags(); 133 134 // Find the test plugin 135 for (var i = 0; i < tags.length; i++) { 136 if (tags[i].name == aName) 137 return tags[i]; 138 } 139 140 logger.warning("Failed to find the test-plugin."); 141 return null; 142} 143 144function OnRefTestLoad(win) 145{ 146 g.crashDumpDir = Cc[NS_DIRECTORY_SERVICE_CONTRACTID] 147 .getService(Ci.nsIProperties) 148 .get("ProfD", Ci.nsIFile); 149 g.crashDumpDir.append("minidumps"); 150 151 g.pendingCrashDumpDir = Cc[NS_DIRECTORY_SERVICE_CONTRACTID] 152 .getService(Ci.nsIProperties) 153 .get("UAppData", Ci.nsIFile); 154 g.pendingCrashDumpDir.append("Crash Reports"); 155 g.pendingCrashDumpDir.append("pending"); 156 157 var env = Cc["@mozilla.org/process/environment;1"]. 158 getService(Ci.nsIEnvironment); 159 160 var prefs = Cc["@mozilla.org/preferences-service;1"]. 161 getService(Ci.nsIPrefBranch); 162 g.browserIsRemote = prefs.getBoolPref("browser.tabs.remote.autostart", false); 163 164 g.browserIsIframe = prefs.getBoolPref("reftest.browser.iframe.enabled", false); 165 166 g.logLevel = prefs.getCharPref("reftest.logLevel", "info"); 167 168 if (win === undefined || win == null) { 169 win = window; 170 } 171 if (g.containingWindow == null && win != null) { 172 g.containingWindow = win; 173 } 174 175 if (g.browserIsIframe) { 176 g.browser = g.containingWindow.document.createElementNS(XHTML_NS, "iframe"); 177 g.browser.setAttribute("mozbrowser", ""); 178 } else { 179 g.browser = g.containingWindow.document.createElementNS(XUL_NS, "xul:browser"); 180 g.browser.setAttribute("class", "lightweight"); 181 } 182 g.browser.setAttribute("id", "browser"); 183 g.browser.setAttribute("type", "content"); 184 g.browser.setAttribute("primary", "true"); 185 g.browser.setAttribute("remote", g.browserIsRemote ? "true" : "false"); 186 // Make sure the browser element is exactly 800x1000, no matter 187 // what size our window is 188 g.browser.setAttribute("style", "padding: 0px; margin: 0px; border:none; min-width: 800px; min-height: 1000px; max-width: 800px; max-height: 1000px"); 189 190 if (Services.appinfo.OS == "Android") { 191 let doc = g.containingWindow.document.getElementById('main-window'); 192 while (doc.hasChildNodes()) { 193 doc.firstChild.remove(); 194 } 195 doc.appendChild(g.browser); 196 } else { 197 document.getElementById("reftest-window").appendChild(g.browser); 198 } 199 200 // reftests should have the test plugins enabled, not click-to-play 201 let plugin1 = getTestPlugin("Test Plug-in"); 202 let plugin2 = getTestPlugin("Second Test Plug-in"); 203 if (plugin1 && plugin2) { 204 g.testPluginEnabledStates = [plugin1.enabledState, plugin2.enabledState]; 205 plugin1.enabledState = Ci.nsIPluginTag.STATE_ENABLED; 206 plugin2.enabledState = Ci.nsIPluginTag.STATE_ENABLED; 207 } else { 208 logger.warning("Could not get test plugin tags."); 209 } 210 211 g.browserMessageManager = g.browser.frameLoader.messageManager; 212 // The content script waits for the initial onload, then notifies 213 // us. 214 RegisterMessageListenersAndLoadContentScript(); 215} 216 217function InitAndStartRefTests() 218{ 219 /* These prefs are optional, so we don't need to spit an error to the log */ 220 try { 221 var prefs = Cc["@mozilla.org/preferences-service;1"]. 222 getService(Ci.nsIPrefBranch); 223 } catch(e) { 224 logger.error("EXCEPTION: " + e); 225 } 226 227 try { 228 prefs.setBoolPref("android.widget_paints_background", false); 229 } catch (e) {} 230 231 /* set the g.loadTimeout */ 232 try { 233 g.loadTimeout = prefs.getIntPref("reftest.timeout"); 234 } catch(e) { 235 g.loadTimeout = 5 * 60 * 1000; //5 minutes as per bug 479518 236 } 237 238 /* Get the logfile for android tests */ 239 try { 240 var logFile = prefs.getCharPref("reftest.logFile"); 241 if (logFile) { 242 var f = FileUtils.File(logFile); 243 g.logFile = FileUtils.openFileOutputStream(f, FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE); 244 } 245 } catch(e) {} 246 247 g.remote = prefs.getBoolPref("reftest.remote", false); 248 249 g.ignoreWindowSize = prefs.getBoolPref("reftest.ignoreWindowSize", false); 250 251 /* Support for running a chunk (subset) of tests. In separate try as this is optional */ 252 try { 253 g.totalChunks = prefs.getIntPref("reftest.totalChunks"); 254 g.thisChunk = prefs.getIntPref("reftest.thisChunk"); 255 } 256 catch(e) { 257 g.totalChunks = 0; 258 g.thisChunk = 0; 259 } 260 261 try { 262 g.focusFilterMode = prefs.getCharPref("reftest.focusFilterMode"); 263 } catch(e) {} 264 265 try { 266 g.compareRetainedDisplayLists = prefs.getBoolPref("reftest.compareRetainedDisplayLists"); 267 } catch (e) {} 268#ifdef MOZ_STYLO 269 try { 270 g.compareStyloToGecko = prefs.getBoolPref("reftest.compareStyloToGecko"); 271 } catch(e) {} 272#endif 273 274#ifdef MOZ_ENABLE_SKIA_PDF 275 try { 276 // We have to disable printing via parent or else silent print operations 277 // (the type that we use here) would be treated as non-silent -- in other 278 // words, a print dialog would appear for each print operation, which 279 // would interrupt the test run. 280 // See http://searchfox.org/mozilla-central/rev/bd39b6170f04afeefc751a23bb04e18bbd10352b/layout/printing/nsPrintEngine.cpp#617 281 prefs.setBoolPref("print.print_via_parent", false); 282 } catch (e) { 283 /* uh oh, print reftests may not work... */ 284 } 285#endif 286 287 g.windowUtils = g.containingWindow.QueryInterface(Ci.nsIInterfaceRequestor).getInterface(Ci.nsIDOMWindowUtils); 288 if (!g.windowUtils || !g.windowUtils.compareCanvases) 289 throw "nsIDOMWindowUtils inteface missing"; 290 291 g.ioService = Cc[IO_SERVICE_CONTRACTID].getService(Ci.nsIIOService); 292 g.debug = Cc[DEBUG_CONTRACTID].getService(Ci.nsIDebug2); 293 294 RegisterProcessCrashObservers(); 295 296 if (g.remote) { 297 g.server = null; 298 } else { 299 g.server = new HttpServer(); 300 } 301 try { 302 if (g.server) 303 StartHTTPServer(); 304 } catch (ex) { 305 //g.browser.loadURI('data:text/plain,' + ex); 306 ++g.testResults.Exception; 307 logger.error("EXCEPTION: " + ex); 308 DoneTests(); 309 } 310 311 // Focus the content browser. 312 if (g.focusFilterMode != FOCUS_FILTER_NON_NEEDS_FOCUS_TESTS) { 313 var fm = Cc["@mozilla.org/focus-manager;1"].getService(Ci.nsIFocusManager); 314 if (fm.activeWindow != g.containingWindow) { 315 Focus(); 316 } 317 g.browser.addEventListener("focus", ReadTests, true); 318 g.browser.focus(); 319 } else { 320 ReadTests(); 321 } 322} 323 324function StartHTTPServer() 325{ 326 g.server.registerContentType("sjs", "sjs"); 327 g.server.start(-1); 328 g.httpServerPort = g.server.identity.primaryPort; 329} 330 331// Perform a Fisher-Yates shuffle of the array. 332function Shuffle(array) 333{ 334 for (var i = array.length - 1; i > 0; i--) { 335 var j = Math.floor(Math.random() * (i + 1)); 336 var temp = array[i]; 337 array[i] = array[j]; 338 array[j] = temp; 339 } 340} 341 342function ReadTests() { 343 try { 344 if (g.focusFilterMode != FOCUS_FILTER_NON_NEEDS_FOCUS_TESTS) { 345 g.browser.removeEventListener("focus", ReadTests, true); 346 } 347 348 g.urls = []; 349 var prefs = Cc["@mozilla.org/preferences-service;1"]. 350 getService(Ci.nsIPrefBranch); 351 352 /* There are three modes implemented here: 353 * 1) reftest.manifests 354 * 2) reftest.manifests and reftest.manifests.dumpTests 355 * 3) reftest.tests 356 * 357 * The first will parse the specified manifests, then immediately 358 * run the tests. The second will parse the manifests, save the test 359 * objects to a file and exit. The third will load a file of test 360 * objects and run them. 361 * 362 * The latter two modes are used to pass test data back and forth 363 * with python harness. 364 */ 365 let manifests = prefs.getCharPref("reftest.manifests", null); 366 let dumpTests = prefs.getCharPref("reftest.manifests.dumpTests", null); 367 let testList = prefs.getCharPref("reftest.tests", null); 368 369 if ((testList && manifests) || !(testList || manifests)) { 370 logger.error("Exactly one of reftest.manifests or reftest.tests must be specified."); 371 logger.debug("reftest.manifests is: " + manifests); 372 logger.error("reftest.tests is: " + testList); 373 DoneTests(); 374 } 375 376 if (testList) { 377 logger.debug("Reading test objects from: " + testList); 378 let promise = OS.File.read(testList).then(function onSuccess(array) { 379 let decoder = new TextDecoder(); 380 g.urls = JSON.parse(decoder.decode(array)).map(CreateUrls); 381 StartTests(); 382 }).catch(function onFailure(e) { 383 logger.error("Failed to load test objects: " + e); 384 DoneTests(); 385 }); 386 } else if (manifests) { 387 // Parse reftest manifests 388 // XXX There is a race condition in the manifest parsing code which 389 // sometimes shows up on Android jsreftests (bug 1416125). It seems 390 // adding/removing log statements can change its frequency. 391 logger.debug("Reading " + manifests.length + " manifests"); 392 manifests = JSON.parse(manifests); 393 g.urlsFilterRegex = manifests[null]; 394 395 var globalFilter = manifests.hasOwnProperty("") ? new RegExp(manifests[""]) : null; 396 var manifestURLs = Object.keys(manifests); 397 398 // Ensure we read manifests from higher up the directory tree first so that we 399 // process includes before reading the included manifest again 400 manifestURLs.sort(function(a,b) {return a.length - b.length}) 401 manifestURLs.forEach(function(manifestURL) { 402 logger.info("Reading manifest " + manifestURL); 403 var filter = manifests[manifestURL] ? new RegExp(manifests[manifestURL]) : null; 404 ReadTopManifest(manifestURL, [globalFilter, filter, false]); 405 }); 406 407 if (dumpTests) { 408 logger.debug("Dumping test objects to file: " + dumpTests); 409 let encoder = new TextEncoder(); 410 let tests = encoder.encode(JSON.stringify(g.urls)); 411 OS.File.writeAtomic(dumpTests, tests, {flush: true}).then( 412 function onSuccess() { 413 DoneTests(); 414 }, 415 function onFailure(reason) { 416 logger.error("failed to write test data: " + reason); 417 DoneTests(); 418 } 419 ) 420 } else { 421 logger.debug("Running " + g.urls.length + " test objects"); 422 g.manageSuite = true; 423 g.urls = g.urls.map(CreateUrls); 424 StartTests(); 425 } 426 } 427 } catch(e) { 428 ++g.testResults.Exception; 429 logger.error("EXCEPTION: " + e); 430 } 431} 432 433function StartTests() 434{ 435 /* These prefs are optional, so we don't need to spit an error to the log */ 436 try { 437 var prefs = Cc["@mozilla.org/preferences-service;1"]. 438 getService(Ci.nsIPrefBranch); 439 } catch(e) { 440 logger.error("EXCEPTION: " + e); 441 } 442 443 g.noCanvasCache = prefs.getIntPref("reftest.nocache", false); 444 445 g.shuffle = prefs.getBoolPref("reftest.shuffle", false); 446 447 g.runUntilFailure = prefs.getBoolPref("reftest.runUntilFailure", false); 448 449 g.verify = prefs.getBoolPref("reftest.verify", false); 450 451 g.cleanupPendingCrashes = prefs.getBoolPref("reftest.cleanupPendingCrashes", false); 452 453 // Check if there are any crash dump files from the startup procedure, before 454 // we start running the first test. Otherwise the first test might get 455 // blamed for producing a crash dump file when that was not the case. 456 CleanUpCrashDumpFiles(); 457 458 // When we repeat this function is called again, so really only want to set 459 // g.repeat once. 460 if (g.repeat == null) { 461 g.repeat = prefs.getIntPref("reftest.repeat", 0); 462 } 463 464 g.runSlowTests = prefs.getIntPref("reftest.skipslowtests", false); 465 466 if (g.shuffle) { 467 g.noCanvasCache = true; 468 } 469 470 try { 471 BuildUseCounts(); 472 473 // Filter tests which will be skipped to get a more even distribution when chunking 474 // tURLs is a temporary array containing all active tests 475 var tURLs = new Array(); 476 for (var i = 0; i < g.urls.length; ++i) { 477 if (g.urls[i].expected == EXPECTED_DEATH) 478 continue; 479 480 if (g.urls[i].needsFocus && !Focus()) 481 continue; 482 483 if (g.urls[i].slow && !g.runSlowTests) 484 continue; 485 486 tURLs.push(g.urls[i]); 487 } 488 489 var numActiveTests = tURLs.length; 490 491 if (g.totalChunks > 0 && g.thisChunk > 0) { 492 // Calculate start and end indices of this chunk if tURLs array were 493 // divided evenly 494 var testsPerChunk = tURLs.length / g.totalChunks; 495 var start = Math.round((g.thisChunk-1) * testsPerChunk); 496 var end = Math.round(g.thisChunk * testsPerChunk); 497 numActiveTests = end - start; 498 499 // Map these indices onto the g.urls array. This avoids modifying the 500 // g.urls array which prevents skipped tests from showing up in the log 501 start = g.thisChunk == 1 ? 0 : g.urls.indexOf(tURLs[start]); 502 end = g.thisChunk == g.totalChunks ? g.urls.length : g.urls.indexOf(tURLs[end + 1]) - 1; 503 504 logger.info("Running chunk " + g.thisChunk + " out of " + g.totalChunks + " chunks. " + 505 "tests " + (start+1) + "-" + end + "/" + g.urls.length); 506 507 g.urls = g.urls.slice(start, end); 508 } 509 510 if (g.manageSuite && !g.suiteStarted) { 511 var ids = g.urls.map(function(obj) { 512 return obj.identifier; 513 }); 514 var suite = prefs.getCharPref('reftest.suite', 'reftest'); 515 logger.suiteStart(ids, suite, {"skipped": g.urls.length - numActiveTests}); 516 g.suiteStarted = true 517 } 518 519 if (g.shuffle) { 520 Shuffle(g.urls); 521 } 522 523 g.totalTests = g.urls.length; 524 if (!g.totalTests && !g.verify) 525 throw "No tests to run"; 526 527 g.uriCanvases = {}; 528 StartCurrentTest(); 529 } catch (ex) { 530 //g.browser.loadURI('data:text/plain,' + ex); 531 ++g.testResults.Exception; 532 logger.error("EXCEPTION: " + ex); 533 DoneTests(); 534 } 535} 536 537function OnRefTestUnload() 538{ 539 let plugin1 = getTestPlugin("Test Plug-in"); 540 let plugin2 = getTestPlugin("Second Test Plug-in"); 541 if (plugin1 && plugin2) { 542 plugin1.enabledState = g.testPluginEnabledStates[0]; 543 plugin2.enabledState = g.testPluginEnabledStates[1]; 544 } else { 545 logger.warning("Failed to get test plugin tags."); 546 } 547} 548 549function AddURIUseCount(uri) 550{ 551 if (uri == null) 552 return; 553 554 var spec = uri.spec; 555 if (spec in g.uriUseCounts) { 556 g.uriUseCounts[spec]++; 557 } else { 558 g.uriUseCounts[spec] = 1; 559 } 560} 561 562function BuildUseCounts() 563{ 564 if (g.noCanvasCache) { 565 return; 566 } 567 568 g.uriUseCounts = {}; 569 for (var i = 0; i < g.urls.length; ++i) { 570 var url = g.urls[i]; 571 if (url.expected != EXPECTED_DEATH && 572 (url.type == TYPE_REFTEST_EQUAL || 573 url.type == TYPE_REFTEST_NOTEQUAL)) { 574 if (url.prefSettings1.length == 0) { 575 AddURIUseCount(g.urls[i].url1); 576 } 577 if (url.prefSettings2.length == 0) { 578 AddURIUseCount(g.urls[i].url2); 579 } 580 } 581 } 582} 583 584// Return true iff this window is focused when this function returns. 585function Focus() 586{ 587 var fm = Cc["@mozilla.org/focus-manager;1"].getService(Ci.nsIFocusManager); 588 fm.focusedWindow = g.containingWindow; 589#ifdef XP_MACOSX 590 try { 591 var dock = Cc["@mozilla.org/widget/macdocksupport;1"].getService(Ci.nsIMacDockSupport); 592 dock.activateApplication(true); 593 } catch(ex) { 594 } 595#endif // XP_MACOSX 596 return true; 597} 598 599function Blur() 600{ 601 // On non-remote reftests, this will transfer focus to the dummy window 602 // we created to hold focus for non-needs-focus tests. Buggy tests 603 // (ones which require focus but don't request needs-focus) will then 604 // fail. 605 g.containingWindow.blur(); 606} 607 608function StartCurrentTest() 609{ 610 g.testLog = []; 611 612 // make sure we don't run tests that are expected to kill the browser 613 while (g.urls.length > 0) { 614 var test = g.urls[0]; 615 logger.testStart(test.identifier); 616 if (test.expected == EXPECTED_DEATH) { 617 ++g.testResults.Skip; 618 logger.testEnd(test.identifier, "SKIP"); 619 g.urls.shift(); 620 } else if (test.needsFocus && !Focus()) { 621 // FIXME: Marking this as a known fail is dangerous! What 622 // if it starts failing all the time? 623 ++g.testResults.Skip; 624 logger.testEnd(test.identifier, "SKIP", null, "(COULDN'T GET FOCUS)"); 625 g.urls.shift(); 626 } else if (test.slow && !g.runSlowTests) { 627 ++g.testResults.Slow; 628 logger.testEnd(test.identifier, "SKIP", null, "(SLOW)"); 629 g.urls.shift(); 630 } else { 631 break; 632 } 633 } 634 635 if ((g.urls.length == 0 && g.repeat == 0) || 636 (g.runUntilFailure && HasUnexpectedResult())) { 637 RestoreChangedPreferences(); 638 DoneTests(); 639 } else if (g.urls.length == 0 && g.repeat > 0) { 640 // Repeat 641 g.repeat--; 642 StartTests(); 643 } else { 644 if (g.urls[0].chaosMode) { 645 g.windowUtils.enterChaosMode(); 646 } 647 if (!g.urls[0].needsFocus) { 648 Blur(); 649 } 650 var currentTest = g.totalTests - g.urls.length; 651 g.containingWindow.document.title = "reftest: " + currentTest + " / " + g.totalTests + 652 " (" + Math.floor(100 * (currentTest / g.totalTests)) + "%)"; 653 StartCurrentURI(URL_TARGET_TYPE_TEST); 654 } 655} 656 657function StartCurrentURI(aURLTargetType) 658{ 659 const isStartingRef = (aURLTargetType == URL_TARGET_TYPE_REFERENCE); 660 661 g.currentURL = g.urls[0][isStartingRef ? "url2" : "url1"].spec; 662 g.currentURLTargetType = aURLTargetType; 663 664 RestoreChangedPreferences(); 665 666 var prefs = Cc["@mozilla.org/preferences-service;1"]. 667 getService(Ci.nsIPrefBranch); 668 669 const prefSettings = 670 g.urls[0][isStartingRef ? "prefSettings2" : "prefSettings1"]; 671 672 if (prefSettings.length > 0) { 673 var badPref = undefined; 674 try { 675 prefSettings.forEach(function(ps) { 676 var oldVal; 677 if (ps.type == PREF_BOOLEAN) { 678 try { 679 oldVal = prefs.getBoolPref(ps.name); 680 } catch (e) { 681 badPref = "boolean preference '" + ps.name + "'"; 682 throw "bad pref"; 683 } 684 } else if (ps.type == PREF_STRING) { 685 try { 686 oldVal = prefs.getCharPref(ps.name); 687 } catch (e) { 688 badPref = "string preference '" + ps.name + "'"; 689 throw "bad pref"; 690 } 691 } else if (ps.type == PREF_INTEGER) { 692 try { 693 oldVal = prefs.getIntPref(ps.name); 694 } catch (e) { 695 badPref = "integer preference '" + ps.name + "'"; 696 throw "bad pref"; 697 } 698 } else { 699 throw "internal error - unknown preference type"; 700 } 701 if (oldVal != ps.value) { 702 g.prefsToRestore.push( { name: ps.name, 703 type: ps.type, 704 value: oldVal } ); 705 var value = ps.value; 706 if (ps.type == PREF_BOOLEAN) { 707 prefs.setBoolPref(ps.name, value); 708 } else if (ps.type == PREF_STRING) { 709 prefs.setCharPref(ps.name, value); 710 value = '"' + value + '"'; 711 } else if (ps.type == PREF_INTEGER) { 712 prefs.setIntPref(ps.name, value); 713 } 714 logger.info("SET PREFERENCE pref(" + ps.name + "," + value + ")"); 715 } 716 }); 717 } catch (e) { 718 if (e == "bad pref") { 719 var test = g.urls[0]; 720 if (test.expected == EXPECTED_FAIL) { 721 logger.testEnd(test.identifier, "FAIL", "FAIL", 722 "(SKIPPED; " + badPref + " not known or wrong type)"); 723 ++g.testResults.Skip; 724 } else { 725 logger.testEnd(test.identifier, "FAIL", "PASS", 726 badPref + " not known or wrong type"); 727 ++g.testResults.UnexpectedFail; 728 } 729 730 // skip the test that had a bad preference 731 g.urls.shift(); 732 StartCurrentTest(); 733 return; 734 } else { 735 throw e; 736 } 737 } 738 } 739 740 if (prefSettings.length == 0 && 741 g.uriCanvases[g.currentURL] && 742 (g.urls[0].type == TYPE_REFTEST_EQUAL || 743 g.urls[0].type == TYPE_REFTEST_NOTEQUAL) && 744 g.urls[0].maxAsserts == 0) { 745 // Pretend the document loaded --- RecordResult will notice 746 // there's already a canvas for this URL 747 g.containingWindow.setTimeout(RecordResult, 0); 748 } else { 749 var currentTest = g.totalTests - g.urls.length; 750 // Log this to preserve the same overall log format, 751 // should be removed if the format is updated 752 gDumpFn("REFTEST TEST-LOAD | " + g.currentURL + " | " + currentTest + " / " + g.totalTests + 753 " (" + Math.floor(100 * (currentTest / g.totalTests)) + "%)\n"); 754 TestBuffer("START " + g.currentURL); 755 var type = g.urls[0].type 756 if (TYPE_SCRIPT == type) { 757 SendLoadScriptTest(g.currentURL, g.loadTimeout); 758 } else if (TYPE_PRINT == type) { 759 SendLoadPrintTest(g.currentURL, g.loadTimeout); 760 } else { 761 SendLoadTest(type, g.currentURL, g.currentURLTargetType, g.loadTimeout); 762 } 763 } 764} 765 766function DoneTests() 767{ 768 if (g.manageSuite) { 769 g.suiteStarted = false 770 logger.suiteEnd({'results': g.testResults}); 771 } else { 772 logger._logData('results', {results: g.testResults}); 773 } 774 logger.info("Slowest test took " + g.slowestTestTime + "ms (" + g.slowestTestURL + ")"); 775 logger.info("Total canvas count = " + g.recycledCanvases.length); 776 if (g.failedUseWidgetLayers) { 777 LogWidgetLayersFailure(); 778 } 779 780 function onStopped() { 781 let appStartup = Cc["@mozilla.org/toolkit/app-startup;1"].getService(Ci.nsIAppStartup); 782 appStartup.quit(Ci.nsIAppStartup.eForceQuit); 783 } 784 if (g.server) { 785 g.server.stop(onStopped); 786 } 787 else { 788 onStopped(); 789 } 790} 791 792function UpdateCanvasCache(url, canvas) 793{ 794 var spec = url.spec; 795 796 --g.uriUseCounts[spec]; 797 798 if (g.uriUseCounts[spec] == 0) { 799 ReleaseCanvas(canvas); 800 delete g.uriCanvases[spec]; 801 } else if (g.uriUseCounts[spec] > 0) { 802 g.uriCanvases[spec] = canvas; 803 } else { 804 throw "Use counts were computed incorrectly"; 805 } 806} 807 808// Recompute drawWindow flags for every drawWindow operation. 809// We have to do this every time since our window can be 810// asynchronously resized (e.g. by the window manager, to make 811// it fit on screen) at unpredictable times. 812// Fortunately this is pretty cheap. 813function DoDrawWindow(ctx, x, y, w, h) 814{ 815 var flags = ctx.DRAWWINDOW_DRAW_CARET | ctx.DRAWWINDOW_DRAW_VIEW; 816 var testRect = g.browser.getBoundingClientRect(); 817 if (g.ignoreWindowSize || 818 (0 <= testRect.left && 819 0 <= testRect.top && 820 g.containingWindow.innerWidth >= testRect.right && 821 g.containingWindow.innerHeight >= testRect.bottom)) { 822 // We can use the window's retained layer manager 823 // because the window is big enough to display the entire 824 // browser element 825 flags |= ctx.DRAWWINDOW_USE_WIDGET_LAYERS; 826 } else if (g.browserIsRemote) { 827 logger.error(g.currentURL + " | can't drawWindow remote content"); 828 ++g.testResults.Exception; 829 } 830 831 if (g.drawWindowFlags != flags) { 832 // Every time the flags change, dump the new state. 833 g.drawWindowFlags = flags; 834 var flagsStr = "DRAWWINDOW_DRAW_CARET | DRAWWINDOW_DRAW_VIEW"; 835 if (flags & ctx.DRAWWINDOW_USE_WIDGET_LAYERS) { 836 flagsStr += " | DRAWWINDOW_USE_WIDGET_LAYERS"; 837 } else { 838 // Output a special warning because we need to be able to detect 839 // this whenever it happens. 840 LogWidgetLayersFailure(); 841 g.failedUseWidgetLayers = true; 842 } 843 logger.info("drawWindow flags = " + flagsStr + 844 "; window size = " + g.containingWindow.innerWidth + "," + g.containingWindow.innerHeight + 845 "; test browser size = " + testRect.width + "," + testRect.height); 846 } 847 848 TestBuffer("DoDrawWindow " + x + "," + y + "," + w + "," + h); 849 ctx.drawWindow(g.containingWindow, x, y, w, h, "rgb(255,255,255)", 850 g.drawWindowFlags); 851} 852 853function InitCurrentCanvasWithSnapshot() 854{ 855 TestBuffer("Initializing canvas snapshot"); 856 857 if (g.urls[0].type == TYPE_LOAD || g.urls[0].type == TYPE_SCRIPT || g.urls[0].type == TYPE_PRINT) { 858 // We don't want to snapshot this kind of test 859 return false; 860 } 861 862 if (!g.currentCanvas) { 863 g.currentCanvas = AllocateCanvas(); 864 } 865 866 var ctx = g.currentCanvas.getContext("2d"); 867 DoDrawWindow(ctx, 0, 0, g.currentCanvas.width, g.currentCanvas.height); 868 return true; 869} 870 871function UpdateCurrentCanvasForInvalidation(rects) 872{ 873 TestBuffer("Updating canvas for invalidation"); 874 875 if (!g.currentCanvas) { 876 return; 877 } 878 879 var ctx = g.currentCanvas.getContext("2d"); 880 for (var i = 0; i < rects.length; ++i) { 881 var r = rects[i]; 882 // Set left/top/right/bottom to pixel boundaries 883 var left = Math.floor(r.left); 884 var top = Math.floor(r.top); 885 var right = Math.ceil(r.right); 886 var bottom = Math.ceil(r.bottom); 887 888 // Clamp the values to the canvas size 889 left = Math.max(0, Math.min(left, g.currentCanvas.width)); 890 top = Math.max(0, Math.min(top, g.currentCanvas.height)); 891 right = Math.max(0, Math.min(right, g.currentCanvas.width)); 892 bottom = Math.max(0, Math.min(bottom, g.currentCanvas.height)); 893 894 ctx.save(); 895 ctx.translate(left, top); 896 DoDrawWindow(ctx, left, top, right - left, bottom - top); 897 ctx.restore(); 898 } 899} 900 901function UpdateWholeCurrentCanvasForInvalidation() 902{ 903 TestBuffer("Updating entire canvas for invalidation"); 904 905 if (!g.currentCanvas) { 906 return; 907 } 908 909 var ctx = g.currentCanvas.getContext("2d"); 910 DoDrawWindow(ctx, 0, 0, g.currentCanvas.width, g.currentCanvas.height); 911} 912 913function RecordResult(testRunTime, errorMsg, typeSpecificResults) 914{ 915 TestBuffer("RecordResult fired"); 916 917 // Keep track of which test was slowest, and how long it took. 918 if (testRunTime > g.slowestTestTime) { 919 g.slowestTestTime = testRunTime; 920 g.slowestTestURL = g.currentURL; 921 } 922 923 // Not 'const ...' because of 'EXPECTED_*' value dependency. 924 var outputs = {}; 925 outputs[EXPECTED_PASS] = { 926 true: {s: ["PASS", "PASS"], n: "Pass"}, 927 false: {s: ["FAIL", "PASS"], n: "UnexpectedFail"} 928 }; 929 outputs[EXPECTED_FAIL] = { 930 true: {s: ["PASS", "FAIL"], n: "UnexpectedPass"}, 931 false: {s: ["FAIL", "FAIL"], n: "KnownFail"} 932 }; 933 outputs[EXPECTED_RANDOM] = { 934 true: {s: ["PASS", "PASS"], n: "Random"}, 935 false: {s: ["FAIL", "FAIL"], n: "Random"} 936 }; 937 // for EXPECTED_FUZZY we need special handling because we can have 938 // Pass, UnexpectedPass, or UnexpectedFail 939 940 var output; 941 var extra; 942 943 if (g.urls[0].type == TYPE_LOAD) { 944 ++g.testResults.LoadOnly; 945 logger.testStatus(g.urls[0].identifier, "(LOAD ONLY)", "PASS", "PASS"); 946 g.currentCanvas = null; 947 FinishTestItem(); 948 return; 949 } 950 if (g.urls[0].type == TYPE_PRINT) { 951 switch (g.currentURLTargetType) { 952 case URL_TARGET_TYPE_TEST: 953 // First document has been loaded. 954 g.testPrintOutput = typeSpecificResults; 955 // Proceed to load the second document. 956 CleanUpCrashDumpFiles(); 957 StartCurrentURI(URL_TARGET_TYPE_REFERENCE); 958 break; 959 case URL_TARGET_TYPE_REFERENCE: 960 let pathToTestPdf = g.testPrintOutput; 961 let pathToRefPdf = typeSpecificResults; 962 comparePdfs(pathToTestPdf, pathToRefPdf, function(error, results) { 963 let expected = g.urls[0].expected; 964 // TODO: We should complain here if results is empty! 965 // (If it's empty, we'll spuriously succeed, regardless of 966 // our expectations) 967 if (error) { 968 output = outputs[expected][false]; 969 extra = { status_msg: output.n }; 970 ++g.testResults[output.n]; 971 logger.testEnd(g.urls[0].identifier, output.s[0], output.s[1], 972 error.message, null, extra); 973 } else { 974 let outputPair = outputs[expected]; 975 if (expected === EXPECTED_FAIL) { 976 let failureResults = results.filter(function (result) { return !result.passed }); 977 if (failureResults.length > 0) { 978 // We got an expected failure. Let's get rid of the 979 // passes from the results so we don't trigger 980 // TEST_UNEXPECTED_PASS logging for those. 981 results = failureResults; 982 } 983 // (else, we expected a failure but got none! 984 // Leave results untouched so we can log them.) 985 } 986 results.forEach(function(result) { 987 output = outputPair[result.passed]; 988 let extra = { status_msg: output.n }; 989 ++g.testResults[output.n]; 990 logger.testEnd(g.urls[0].identifier, output.s[0], output.s[1], 991 result.description, null, extra); 992 }); 993 } 994 FinishTestItem(); 995 }); 996 break; 997 default: 998 throw "Unexpected state."; 999 } 1000 return; 1001 } 1002 if (g.urls[0].type == TYPE_SCRIPT) { 1003 var expected = g.urls[0].expected; 1004 1005 if (errorMsg) { 1006 // Force an unexpected failure to alert the test author to fix the test. 1007 expected = EXPECTED_PASS; 1008 } else if (typeSpecificResults.length == 0) { 1009 // This failure may be due to a JavaScript Engine bug causing 1010 // early termination of the test. If we do not allow silent 1011 // failure, report an error. 1012 if (!g.urls[0].allowSilentFail) 1013 errorMsg = "No test results reported. (SCRIPT)\n"; 1014 else 1015 logger.info("An expected silent failure occurred"); 1016 } 1017 1018 if (errorMsg) { 1019 output = outputs[expected][false]; 1020 extra = { status_msg: output.n }; 1021 ++g.testResults[output.n]; 1022 logger.testStatus(g.urls[0].identifier, errorMsg, output.s[0], output.s[1], null, null, extra); 1023 FinishTestItem(); 1024 return; 1025 } 1026 1027 var anyFailed = typeSpecificResults.some(function(result) { return !result.passed; }); 1028 var outputPair; 1029 if (anyFailed && expected == EXPECTED_FAIL) { 1030 // If we're marked as expected to fail, and some (but not all) tests 1031 // passed, treat those tests as though they were marked random 1032 // (since we can't tell whether they were really intended to be 1033 // marked failing or not). 1034 outputPair = { true: outputs[EXPECTED_RANDOM][true], 1035 false: outputs[expected][false] }; 1036 } else { 1037 outputPair = outputs[expected]; 1038 } 1039 var index = 0; 1040 typeSpecificResults.forEach(function(result) { 1041 var output = outputPair[result.passed]; 1042 var extra = { status_msg: output.n }; 1043 1044 ++g.testResults[output.n]; 1045 logger.testStatus(g.urls[0].identifier, result.description + " item " + (++index), 1046 output.s[0], output.s[1], null, null, extra); 1047 }); 1048 1049 if (anyFailed && expected == EXPECTED_PASS) { 1050 FlushTestBuffer(); 1051 } 1052 1053 FinishTestItem(); 1054 return; 1055 } 1056 1057 const isRecordingRef = 1058 (g.currentURLTargetType == URL_TARGET_TYPE_REFERENCE); 1059 const prefSettings = 1060 g.urls[0][isRecordingRef ? "prefSettings2" : "prefSettings1"]; 1061 1062 if (prefSettings.length == 0 && g.uriCanvases[g.currentURL]) { 1063 g.currentCanvas = g.uriCanvases[g.currentURL]; 1064 } 1065 if (g.currentCanvas == null) { 1066 logger.error(g.currentURL, "program error managing snapshots"); 1067 ++g.testResults.Exception; 1068 } 1069 g[isRecordingRef ? "canvas2" : "canvas1"] = g.currentCanvas; 1070 g.currentCanvas = null; 1071 1072 ResetRenderingState(); 1073 1074 switch (g.currentURLTargetType) { 1075 case URL_TARGET_TYPE_TEST: 1076 // First document has been loaded. 1077 // Proceed to load the second document. 1078 1079 CleanUpCrashDumpFiles(); 1080 StartCurrentURI(URL_TARGET_TYPE_REFERENCE); 1081 break; 1082 case URL_TARGET_TYPE_REFERENCE: 1083 // Both documents have been loaded. Compare the renderings and see 1084 // if the comparison result matches the expected result specified 1085 // in the manifest. 1086 1087 // number of different pixels 1088 var differences; 1089 // whether the two renderings match: 1090 var equal; 1091 var maxDifference = {}; 1092 // whether the allowed fuzziness from the annotations is exceeded 1093 // by the actual comparison results 1094 var fuzz_exceeded = false; 1095 1096 differences = g.windowUtils.compareCanvases(g.canvas1, g.canvas2, maxDifference); 1097 equal = (differences == 0); 1098 1099 if (maxDifference.value > 0 && equal) { 1100 throw "Inconsistent result from compareCanvases."; 1101 } 1102 1103 // what is expected on this platform (PASS, FAIL, or RANDOM) 1104 var expected = g.urls[0].expected; 1105 1106 if (expected == EXPECTED_FUZZY) { 1107 logger.info(`REFTEST fuzzy test ` + 1108 `(${g.urls[0].fuzzyMinDelta}, ${g.urls[0].fuzzyMinPixels}) <= ` + 1109 `(${maxDifference.value}, ${differences}) <= ` + 1110 `(${g.urls[0].fuzzyMaxDelta}, ${g.urls[0].fuzzyMaxPixels})`); 1111 fuzz_exceeded = maxDifference.value > g.urls[0].fuzzyMaxDelta || 1112 differences > g.urls[0].fuzzyMaxPixels; 1113 equal = !fuzz_exceeded && 1114 maxDifference.value >= g.urls[0].fuzzyMinDelta && 1115 differences >= g.urls[0].fuzzyMinPixels; 1116 } 1117 1118 var failedExtraCheck = g.failedNoPaint || g.failedNoDisplayList || g.failedDisplayList || g.failedOpaqueLayer || g.failedAssignedLayer; 1119 1120 // whether the comparison result matches what is in the manifest 1121 var test_passed = (equal == (g.urls[0].type == TYPE_REFTEST_EQUAL)) && !failedExtraCheck; 1122 1123 if (expected != EXPECTED_FUZZY) { 1124 output = outputs[expected][test_passed]; 1125 } else if (test_passed) { 1126 output = {s: ["PASS", "PASS"], n: "Pass"}; 1127 } else if (g.urls[0].type == TYPE_REFTEST_EQUAL && 1128 !failedExtraCheck && 1129 !fuzz_exceeded) { 1130 // If we get here, that means we had an '==' type test where 1131 // at least one of the actual difference values was below the 1132 // allowed range, but nothing else was wrong. So let's produce 1133 // UNEXPECTED-PASS in this scenario. Also, if we enter this 1134 // branch, 'equal' must be false so let's assert that to guard 1135 // against logic errors. 1136 if (equal) { 1137 throw "Logic error in reftest.jsm fuzzy test handling!"; 1138 } 1139 output = {s: ["PASS", "FAIL"], n: "UnexpectedPass"}; 1140 } else { 1141 // In all other cases we fail the test 1142 output = {s: ["FAIL", "PASS"], n: "UnexpectedFail"}; 1143 } 1144 extra = { status_msg: output.n }; 1145 1146 ++g.testResults[output.n]; 1147 1148 // It's possible that we failed both an "extra check" and the normal comparison, but we don't 1149 // have a way to annotate these separately, so just print an error for the extra check failures. 1150 if (failedExtraCheck) { 1151 var failures = []; 1152 if (g.failedNoPaint) { 1153 failures.push("failed reftest-no-paint"); 1154 } 1155 if (g.failedNoDisplayList) { 1156 failures.push("failed reftest-no-display-list"); 1157 } 1158 if (g.failedDisplayList) { 1159 failures.push("failed reftest-display-list"); 1160 } 1161 // The g.failed*Messages arrays will contain messages from both the test and the reference. 1162 if (g.failedOpaqueLayer) { 1163 failures.push("failed reftest-opaque-layer: " + g.failedOpaqueLayerMessages.join(", ")); 1164 } 1165 if (g.failedAssignedLayer) { 1166 failures.push("failed reftest-assigned-layer: " + g.failedAssignedLayerMessages.join(", ")); 1167 } 1168 var failureString = failures.join(", "); 1169 logger.testStatus(g.urls[0].identifier, failureString, output.s[0], output.s[1], null, null, extra); 1170 } else { 1171 var message = "image comparison, max difference: " + maxDifference.value + 1172 ", number of differing pixels: " + differences; 1173 if (!test_passed && expected == EXPECTED_PASS || 1174 !test_passed && expected == EXPECTED_FUZZY || 1175 test_passed && expected == EXPECTED_FAIL) { 1176 if (!equal) { 1177 extra.max_difference = maxDifference.value; 1178 extra.differences = differences; 1179 var image1 = g.canvas1.toDataURL(); 1180 var image2 = g.canvas2.toDataURL(); 1181 extra.reftest_screenshots = [ 1182 {url:g.urls[0].identifier[0], 1183 screenshot: image1.slice(image1.indexOf(",") + 1)}, 1184 g.urls[0].identifier[1], 1185 {url:g.urls[0].identifier[2], 1186 screenshot: image2.slice(image2.indexOf(",") + 1)} 1187 ]; 1188 extra.image1 = image1; 1189 extra.image2 = image2; 1190 } else { 1191 var image1 = g.canvas1.toDataURL(); 1192 extra.reftest_screenshots = [ 1193 {url:g.urls[0].identifier[0], 1194 screenshot: image1.slice(image1.indexOf(",") + 1)} 1195 ]; 1196 extra.image1 = image1; 1197 } 1198 } 1199 logger.testStatus(g.urls[0].identifier, message, output.s[0], output.s[1], null, null, extra); 1200 1201 if (g.noCanvasCache) { 1202 ReleaseCanvas(g.canvas1); 1203 ReleaseCanvas(g.canvas2); 1204 } else { 1205 if (g.urls[0].prefSettings1.length == 0) { 1206 UpdateCanvasCache(g.urls[0].url1, g.canvas1); 1207 } 1208 if (g.urls[0].prefSettings2.length == 0) { 1209 UpdateCanvasCache(g.urls[0].url2, g.canvas2); 1210 } 1211 } 1212 } 1213 1214 if ((!test_passed && expected == EXPECTED_PASS) || (test_passed && expected == EXPECTED_FAIL)) { 1215 FlushTestBuffer(); 1216 } 1217 1218 CleanUpCrashDumpFiles(); 1219 FinishTestItem(); 1220 break; 1221 default: 1222 throw "Unexpected state."; 1223 } 1224} 1225 1226function LoadFailed(why) 1227{ 1228 ++g.testResults.FailedLoad; 1229 if (!why) { 1230 // reftest-content.js sets an initial reason before it sets the 1231 // timeout that will call us with the currently set reason, so we 1232 // should never get here. If we do then there's a logic error 1233 // somewhere. Perhaps tests are somehow running overlapped and the 1234 // timeout for one test is not being cleared before the timeout for 1235 // another is set? Maybe there's some sort of race? 1236 logger.error("load failed with unknown reason (we should always have a reason!)"); 1237 } 1238 logger.testStatus(g.urls[0].identifier, "load failed: " + why, "FAIL", "PASS"); 1239 FlushTestBuffer(); 1240 FinishTestItem(); 1241} 1242 1243function RemoveExpectedCrashDumpFiles() 1244{ 1245 if (g.expectingProcessCrash) { 1246 for (let crashFilename of g.expectedCrashDumpFiles) { 1247 let file = g.crashDumpDir.clone(); 1248 file.append(crashFilename); 1249 if (file.exists()) { 1250 file.remove(false); 1251 } 1252 } 1253 } 1254 g.expectedCrashDumpFiles.length = 0; 1255} 1256 1257function FindUnexpectedCrashDumpFiles() 1258{ 1259 if (!g.crashDumpDir.exists()) { 1260 return; 1261 } 1262 1263 let entries = g.crashDumpDir.directoryEntries; 1264 if (!entries) { 1265 return; 1266 } 1267 1268 let foundCrashDumpFile = false; 1269 while (entries.hasMoreElements()) { 1270 let file = entries.getNext().QueryInterface(Ci.nsIFile); 1271 let path = String(file.path); 1272 if (path.match(/\.(dmp|extra)$/) && !g.unexpectedCrashDumpFiles[path]) { 1273 if (!foundCrashDumpFile) { 1274 ++g.testResults.UnexpectedFail; 1275 foundCrashDumpFile = true; 1276 if (g.currentURL) { 1277 logger.testStatus(g.urls[0].identifier, "crash-check", "FAIL", "PASS", "This test left crash dumps behind, but we weren't expecting it to!"); 1278 } else { 1279 logger.error("Harness startup left crash dumps behind, but we weren't expecting it to!"); 1280 } 1281 } 1282 logger.info("Found unexpected crash dump file " + path); 1283 g.unexpectedCrashDumpFiles[path] = true; 1284 } 1285 } 1286} 1287 1288function RemovePendingCrashDumpFiles() 1289{ 1290 if (!g.pendingCrashDumpDir.exists()) { 1291 return; 1292 } 1293 1294 let entries = g.pendingCrashDumpDir.directoryEntries; 1295 while (entries.hasMoreElements()) { 1296 let file = entries.getNext().QueryInterface(Ci.nsIFile); 1297 if (file.isFile()) { 1298 file.remove(false); 1299 logger.info("This test left pending crash dumps; deleted "+file.path); 1300 } 1301 } 1302} 1303 1304function CleanUpCrashDumpFiles() 1305{ 1306 RemoveExpectedCrashDumpFiles(); 1307 FindUnexpectedCrashDumpFiles(); 1308 if (g.cleanupPendingCrashes) { 1309 RemovePendingCrashDumpFiles(); 1310 } 1311 g.expectingProcessCrash = false; 1312} 1313 1314function FinishTestItem() 1315{ 1316 logger.testEnd(g.urls[0].identifier, "OK"); 1317 1318 // Replace document with BLANK_URL_FOR_CLEARING in case there are 1319 // assertions when unloading. 1320 logger.debug("Loading a blank page"); 1321 // After clearing, content will notify us of the assertion count 1322 // and tests will continue. 1323 SendClear(); 1324 g.failedNoPaint = false; 1325 g.failedNoDisplayList = false; 1326 g.failedDisplayList = false; 1327 g.failedOpaqueLayer = false; 1328 g.failedOpaqueLayerMessages = []; 1329 g.failedAssignedLayer = false; 1330 g.failedAssignedLayerMessages = []; 1331} 1332 1333function DoAssertionCheck(numAsserts) 1334{ 1335 if (g.debug.isDebugBuild) { 1336 if (g.browserIsRemote) { 1337 // Count chrome-process asserts too when content is out of 1338 // process. 1339 var newAssertionCount = g.debug.assertionCount; 1340 var numLocalAsserts = newAssertionCount - g.assertionCount; 1341 g.assertionCount = newAssertionCount; 1342 1343 numAsserts += numLocalAsserts; 1344 } 1345 1346 var minAsserts = g.urls[0].minAsserts; 1347 var maxAsserts = g.urls[0].maxAsserts; 1348 1349 logger.assertionCount(g.urls[0].identifier, numAsserts, minAsserts, maxAsserts); 1350 } 1351 1352 if (g.urls[0].chaosMode) { 1353 g.windowUtils.leaveChaosMode(); 1354 } 1355 1356 // And start the next test. 1357 g.urls.shift(); 1358 StartCurrentTest(); 1359} 1360 1361function ResetRenderingState() 1362{ 1363 SendResetRenderingState(); 1364 // We would want to clear any viewconfig here, if we add support for it 1365} 1366 1367function RestoreChangedPreferences() 1368{ 1369 if (g.prefsToRestore.length > 0) { 1370 var prefs = Cc["@mozilla.org/preferences-service;1"]. 1371 getService(Ci.nsIPrefBranch); 1372 g.prefsToRestore.reverse(); 1373 g.prefsToRestore.forEach(function(ps) { 1374 var value = ps.value; 1375 if (ps.type == PREF_BOOLEAN) { 1376 prefs.setBoolPref(ps.name, value); 1377 } else if (ps.type == PREF_STRING) { 1378 prefs.setCharPref(ps.name, value); 1379 value = '"' + value + '"'; 1380 } else if (ps.type == PREF_INTEGER) { 1381 prefs.setIntPref(ps.name, value); 1382 } 1383 logger.info("RESTORE PREFERENCE pref(" + ps.name + "," + value + ")"); 1384 }); 1385 g.prefsToRestore = []; 1386 } 1387} 1388 1389function RegisterMessageListenersAndLoadContentScript() 1390{ 1391 g.browserMessageManager.addMessageListener( 1392 "reftest:AssertionCount", 1393 function (m) { RecvAssertionCount(m.json.count); } 1394 ); 1395 g.browserMessageManager.addMessageListener( 1396 "reftest:ContentReady", 1397 function (m) { return RecvContentReady(m.data); } 1398 ); 1399 g.browserMessageManager.addMessageListener( 1400 "reftest:Exception", 1401 function (m) { RecvException(m.json.what) } 1402 ); 1403 g.browserMessageManager.addMessageListener( 1404 "reftest:FailedLoad", 1405 function (m) { RecvFailedLoad(m.json.why); } 1406 ); 1407 g.browserMessageManager.addMessageListener( 1408 "reftest:FailedNoPaint", 1409 function (m) { RecvFailedNoPaint(); } 1410 ); 1411 g.browserMessageManager.addMessageListener( 1412 "reftest:FailedNoDisplayList", 1413 function (m) { RecvFailedNoDisplayList(); } 1414 ); 1415 g.browserMessageManager.addMessageListener( 1416 "reftest:FailedDisplayList", 1417 function (m) { RecvFailedDisplayList(); } 1418 ); 1419 g.browserMessageManager.addMessageListener( 1420 "reftest:FailedOpaqueLayer", 1421 function (m) { RecvFailedOpaqueLayer(m.json.why); } 1422 ); 1423 g.browserMessageManager.addMessageListener( 1424 "reftest:FailedAssignedLayer", 1425 function (m) { RecvFailedAssignedLayer(m.json.why); } 1426 ); 1427 g.browserMessageManager.addMessageListener( 1428 "reftest:InitCanvasWithSnapshot", 1429 function (m) { return RecvInitCanvasWithSnapshot(); } 1430 ); 1431 g.browserMessageManager.addMessageListener( 1432 "reftest:Log", 1433 function (m) { RecvLog(m.json.type, m.json.msg); } 1434 ); 1435 g.browserMessageManager.addMessageListener( 1436 "reftest:ScriptResults", 1437 function (m) { RecvScriptResults(m.json.runtimeMs, m.json.error, m.json.results); } 1438 ); 1439 g.browserMessageManager.addMessageListener( 1440 "reftest:PrintResult", 1441 function (m) { RecvPrintResult(m.json.runtimeMs, m.json.status, m.json.fileName); } 1442 ); 1443 g.browserMessageManager.addMessageListener( 1444 "reftest:TestDone", 1445 function (m) { RecvTestDone(m.json.runtimeMs); } 1446 ); 1447 g.browserMessageManager.addMessageListener( 1448 "reftest:UpdateCanvasForInvalidation", 1449 function (m) { RecvUpdateCanvasForInvalidation(m.json.rects); } 1450 ); 1451 g.browserMessageManager.addMessageListener( 1452 "reftest:UpdateWholeCanvasForInvalidation", 1453 function (m) { RecvUpdateWholeCanvasForInvalidation(); } 1454 ); 1455 g.browserMessageManager.addMessageListener( 1456 "reftest:ExpectProcessCrash", 1457 function (m) { RecvExpectProcessCrash(); } 1458 ); 1459 1460 g.browserMessageManager.loadFrameScript("chrome://reftest/content/reftest-content.js", true, true); 1461} 1462 1463function RecvAssertionCount(count) 1464{ 1465 DoAssertionCheck(count); 1466} 1467 1468function RecvContentReady(info) 1469{ 1470 g.contentGfxInfo = info.gfx; 1471 InitAndStartRefTests(); 1472 return { remote: g.browserIsRemote }; 1473} 1474 1475function RecvException(what) 1476{ 1477 logger.error(g.currentURL + " | " + what); 1478 ++g.testResults.Exception; 1479} 1480 1481function RecvFailedLoad(why) 1482{ 1483 LoadFailed(why); 1484} 1485 1486function RecvFailedNoPaint() 1487{ 1488 g.failedNoPaint = true; 1489} 1490 1491function RecvFailedNoDisplayList() 1492{ 1493 g.failedNoDisplayList = true; 1494} 1495 1496function RecvFailedDisplayList() 1497{ 1498 g.failedDisplayList = true; 1499} 1500 1501function RecvFailedOpaqueLayer(why) { 1502 g.failedOpaqueLayer = true; 1503 g.failedOpaqueLayerMessages.push(why); 1504} 1505 1506function RecvFailedAssignedLayer(why) { 1507 g.failedAssignedLayer = true; 1508 g.failedAssignedLayerMessages.push(why); 1509} 1510 1511function RecvInitCanvasWithSnapshot() 1512{ 1513 var painted = InitCurrentCanvasWithSnapshot(); 1514 return { painted: painted }; 1515} 1516 1517function RecvLog(type, msg) 1518{ 1519 msg = "[CONTENT] " + msg; 1520 if (type == "info") { 1521 TestBuffer(msg); 1522 } else if (type == "warning") { 1523 logger.warning(msg); 1524 } else { 1525 logger.error("REFTEST TEST-UNEXPECTED-FAIL | " + g.currentURL + " | unknown log type " + type + "\n"); 1526 ++g.testResults.Exception; 1527 } 1528} 1529 1530function RecvScriptResults(runtimeMs, error, results) 1531{ 1532 RecordResult(runtimeMs, error, results); 1533} 1534 1535function RecvPrintResult(runtimeMs, status, fileName) 1536{ 1537 if (!Components.isSuccessCode(status)) { 1538 logger.error("REFTEST TEST-UNEXPECTED-FAIL | " + g.currentURL + " | error during printing\n"); 1539 ++g.testResults.Exception; 1540 } 1541 RecordResult(runtimeMs, '', fileName); 1542} 1543 1544function RecvTestDone(runtimeMs) 1545{ 1546 RecordResult(runtimeMs, '', [ ]); 1547} 1548 1549function RecvUpdateCanvasForInvalidation(rects) 1550{ 1551 UpdateCurrentCanvasForInvalidation(rects); 1552} 1553 1554function RecvUpdateWholeCanvasForInvalidation() 1555{ 1556 UpdateWholeCurrentCanvasForInvalidation(); 1557} 1558 1559function OnProcessCrashed(subject, topic, data) 1560{ 1561 var id; 1562 subject = subject.QueryInterface(Ci.nsIPropertyBag2); 1563 if (topic == "plugin-crashed") { 1564 id = subject.getPropertyAsAString("pluginDumpID"); 1565 } else if (topic == "ipc:content-shutdown") { 1566 id = subject.getPropertyAsAString("dumpID"); 1567 } 1568 if (id) { 1569 g.expectedCrashDumpFiles.push(id + ".dmp"); 1570 g.expectedCrashDumpFiles.push(id + ".extra"); 1571 } 1572} 1573 1574function RegisterProcessCrashObservers() 1575{ 1576 var os = Cc[NS_OBSERVER_SERVICE_CONTRACTID] 1577 .getService(Ci.nsIObserverService); 1578 os.addObserver(OnProcessCrashed, "plugin-crashed"); 1579 os.addObserver(OnProcessCrashed, "ipc:content-shutdown"); 1580} 1581 1582function RecvExpectProcessCrash() 1583{ 1584 g.expectingProcessCrash = true; 1585} 1586 1587function SendClear() 1588{ 1589 g.browserMessageManager.sendAsyncMessage("reftest:Clear"); 1590} 1591 1592function SendLoadScriptTest(uri, timeout) 1593{ 1594 g.browserMessageManager.sendAsyncMessage("reftest:LoadScriptTest", 1595 { uri: uri, timeout: timeout }); 1596} 1597 1598function SendLoadPrintTest(uri, timeout) 1599{ 1600 g.browserMessageManager.sendAsyncMessage("reftest:LoadPrintTest", 1601 { uri: uri, timeout: timeout }); 1602} 1603 1604function SendLoadTest(type, uri, uriTargetType, timeout) 1605{ 1606 g.browserMessageManager.sendAsyncMessage("reftest:LoadTest", 1607 { type: type, uri: uri, 1608 uriTargetType: uriTargetType, 1609 timeout: timeout } 1610 ); 1611} 1612 1613function SendResetRenderingState() 1614{ 1615 g.browserMessageManager.sendAsyncMessage("reftest:ResetRenderingState"); 1616} 1617 1618function readPdf(path, callback) { 1619 OS.File.open(path, { read: true }).then(function (file) { 1620 file.flush().then(function() { 1621 file.read().then(function (data) { 1622 let fakePort = new PDFJS.main.LoopbackPort(true); 1623 PDFJS.worker.WorkerMessageHandler.initializeFromPort(fakePort); 1624 let myWorker = new PDFJS.main.PDFWorker("worker", fakePort); 1625 PDFJS.main.PDFJS.getDocument({ 1626 worker: myWorker, 1627 data: data 1628 }).then(function (pdf) { 1629 callback(null, pdf); 1630 }, function () { 1631 callback(new Error("Couldn't parse " + path)); 1632 }); 1633 return; 1634 }, function () { 1635 callback(new Error("Couldn't read PDF")); 1636 }); 1637 }); 1638 }); 1639} 1640 1641function comparePdfs(pathToTestPdf, pathToRefPdf, callback) { 1642 Promise.all([pathToTestPdf, pathToRefPdf].map(function(path) { 1643 return new Promise(function(resolve, reject) { 1644 readPdf(path, function(error, pdf) { 1645 // Resolve or reject outer promise. reject and resolve are 1646 // passed to the callback function given as first arguments 1647 // to the Promise constructor. 1648 if (error) { 1649 reject(error); 1650 } else { 1651 resolve(pdf); 1652 } 1653 }); 1654 }); 1655 })).then(function(pdfs) { 1656 let numberOfPages = pdfs[1].numPages; 1657 let sameNumberOfPages = numberOfPages === pdfs[0].numPages; 1658 1659 let resultPromises = [Promise.resolve({ 1660 passed: sameNumberOfPages, 1661 description: "Expected number of pages: " + numberOfPages + 1662 ", got " + pdfs[0].numPages 1663 })]; 1664 1665 if (sameNumberOfPages) { 1666 for (let i = 0; i < numberOfPages; i++) { 1667 let pageNum = i + 1; 1668 let testPagePromise = pdfs[0].getPage(pageNum); 1669 let refPagePromise = pdfs[1].getPage(pageNum); 1670 resultPromises.push(new Promise(function(resolve, reject) { 1671 Promise.all([testPagePromise, refPagePromise]).then(function(pages) { 1672 let testTextPromise = pages[0].getTextContent(); 1673 let refTextPromise = pages[1].getTextContent(); 1674 Promise.all([testTextPromise, refTextPromise]).then(function(texts) { 1675 let testTextItems = texts[0].items; 1676 let refTextItems = texts[1].items; 1677 let testText; 1678 let refText; 1679 let passed = refTextItems.every(function(o, i) { 1680 refText = o.str; 1681 if (!testTextItems[i]) { 1682 return false; 1683 } 1684 testText = testTextItems[i].str; 1685 return testText === refText; 1686 }); 1687 let description; 1688 if (passed) { 1689 if (testTextItems.length > refTextItems.length) { 1690 passed = false; 1691 description = "Page " + pages[0].pageNumber + 1692 " contains unexpected text like '" + 1693 testTextItems[refTextItems.length].str + "'"; 1694 } else { 1695 description = "Page " + pages[0].pageNumber + 1696 " contains same text" 1697 } 1698 } else { 1699 description = "Expected page " + pages[0].pageNumber + 1700 " to contain text '" + refText; 1701 if (testText) { 1702 description += "' but found '" + testText + 1703 "' instead"; 1704 } 1705 } 1706 resolve({ 1707 passed: passed, 1708 description: description 1709 }); 1710 }, reject); 1711 }, reject); 1712 })); 1713 } 1714 } 1715 1716 Promise.all(resultPromises).then(function (results) { 1717 callback(null, results); 1718 }); 1719 }, function(error) { 1720 callback(error); 1721 }); 1722} 1723