1import json
2from unittest import mock
3import shutil
4
5import pytest
6
7from mozperftest.tests.support import (
8    get_running_env,
9    EXAMPLE_XPCSHELL_TEST,
10    temp_file,
11    MOZINFO,
12)
13from mozperftest.environment import TEST, SYSTEM, METRICS
14from mozperftest.test.xpcshell import XPCShellTestError, NoPerfMetricsError
15from mozperftest import utils
16from mozperftest.test import xpcshell
17
18
19class XPCShellTests:
20    def __init__(self, log):
21        self.log = log
22
23    def runTests(self, args):
24        self.log.suite_start("suite start")
25        self.log.test_start("test start")
26        self.log.process_output("1234", "line", "command")
27        self.log.log_raw({"action": "something"})
28        self.log.log_raw({"action": "log", "message": "message"})
29
30        # these are the metrics sent by the scripts
31        self.log.log_raw(
32            {
33                "action": "log",
34                "message": '"perfMetrics"',
35                "extra": {"metrics1": 1, "metrics2": 2},
36            }
37        )
38
39        self.log.log_raw(
40            {"action": "log", "message": '"perfMetrics"', "extra": {"metrics3": 3}}
41        )
42
43        self.log.test_end("test end")
44        self.log.suite_end("suite end")
45        return True
46
47
48class XPCShellTestsFail(XPCShellTests):
49    def runTests(self, args):
50        return False
51
52
53class XPCShellTestsNoPerfMetrics:
54    def __init__(self, log):
55        self.log = log
56
57    def runTests(self, args):
58        self.log.suite_start("suite start")
59        self.log.test_start("test start")
60        self.log.process_output("1234", "line", "command")
61        self.log.log_raw({"action": "something"})
62        self.log.log_raw({"action": "log", "message": "message"})
63
64        self.log.test_end("test end")
65        self.log.suite_end("suite end")
66        return True
67
68
69def running_env(**kw):
70    return get_running_env(flavor="xpcshell", xpcshell_mozinfo=MOZINFO, **kw)
71
72
73@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
74def test_xpcshell_metrics(*mocked):
75    mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_XPCSHELL_TEST)])
76
77    sys = env.layers[SYSTEM]
78    xpcshell = env.layers[TEST]
79
80    try:
81        with sys as s, xpcshell as x:
82            x(s(metadata))
83    finally:
84        shutil.rmtree(mach_cmd._mach_context.state_dir)
85
86    res = metadata.get_results()
87    assert len(res) == 1
88    assert res[0]["name"] == "test_xpcshell.js"
89    results = res[0]["results"]
90
91    assert results[0]["name"] == "metrics1"
92    assert results[0]["values"] == [1]
93
94
95def _test_xpcshell_fail(err, *mocked):
96    mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_XPCSHELL_TEST)])
97    sys = env.layers[SYSTEM]
98    xpcshell = env.layers[TEST]
99    try:
100        with sys as s, xpcshell as x, pytest.raises(err):
101            x(s(metadata))
102    finally:
103        shutil.rmtree(mach_cmd._mach_context.state_dir)
104
105
106@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTestsFail)
107def test_xpcshell_metrics_fail(*mocked):
108    return _test_xpcshell_fail(XPCShellTestError, mocked)
109
110
111@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTestsNoPerfMetrics)
112def test_xpcshell_no_perfmetrics(*mocked):
113    return _test_xpcshell_fail(NoPerfMetricsError, *mocked)
114
115
116@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
117def test_xpcshell_perfherder(*mocked):
118    return _test_xpcshell_perfherder(*mocked)
119
120
121@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
122def test_xpcshell_perfherder_on_try(*mocked):
123    old = utils.ON_TRY
124    utils.ON_TRY = xpcshell.ON_TRY = not utils.ON_TRY
125
126    try:
127        return _test_xpcshell_perfherder(*mocked)
128    finally:
129        utils.ON_TRY = old
130        xpcshell.ON_TRY = old
131
132
133def _test_xpcshell_perfherder(*mocked):
134    mach_cmd, metadata, env = running_env(
135        perfherder=True, xpcshell_cycles=10, tests=[str(EXAMPLE_XPCSHELL_TEST)]
136    )
137
138    sys = env.layers[SYSTEM]
139    xpcshell = env.layers[TEST]
140    metrics = env.layers[METRICS]
141
142    with temp_file() as output:
143        env.set_arg("output", output)
144        try:
145            with sys as s, xpcshell as x, metrics as m:
146                m(x(s(metadata)))
147        finally:
148            shutil.rmtree(mach_cmd._mach_context.state_dir)
149
150        output_file = metadata.get_output()
151        with open(output_file) as f:
152            output = json.loads(f.read())
153
154    # Check some metadata
155    assert output["application"]["name"] == "firefox"
156    assert output["framework"]["name"] == "mozperftest"
157
158    # Check some numbers in our data
159    assert len(output["suites"]) == 1
160    assert len(output["suites"][0]["subtests"]) == 3
161    assert output["suites"][0]["value"] > 0
162
163    for subtest in output["suites"][0]["subtests"]:
164        assert subtest["name"].startswith("metrics")
165