1# coding=utf-8 2# Copyright (c) 2014, 2016-2017, 2019-2020 Intel Corporation 3 4# Permission is hereby granted, free of charge, to any person obtaining a copy 5# of this software and associated documentation files (the "Software"), to deal 6# in the Software without restriction, including without limitation the rights 7# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8# copies of the Software, and to permit persons to whom the Software is 9# furnished to do so, subject to the following conditions: 10 11# The above copyright notice and this permission notice shall be included in 12# all copies or substantial portions of the Software. 13 14# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 20# SOFTWARE. 21 22""" Module providing json backend for piglit """ 23 24import collections 25import functools 26import os 27import shutil 28import sys 29 30try: 31 import simplejson as json 32except ImportError: 33 import json 34 35try: 36 import jsonstreams 37 _STREAMS = True 38except ImportError: 39 _STREAMS = False 40 41from framework import status, results, exceptions 42from .abstract import FileBackend, write_compressed 43from .register import Registry 44from . import compression 45 46__all__ = [ 47 'REGISTRY', 48 'JSONBackend', 49] 50 51# The current version of the JSON results 52CURRENT_JSON_VERSION = 10 53 54# The minimum JSON format supported 55MINIMUM_SUPPORTED_VERSION = 7 56 57# The level to indent a final file 58INDENT = 4 59 60 61def piglit_encoder(obj): 62 """ Encoder for piglit that can transform additional classes into json 63 64 Adds support for status.Status objects and for set() instances 65 66 """ 67 if isinstance(obj, status.Status): 68 return str(obj) 69 elif isinstance(obj, set): 70 return list(obj) 71 elif hasattr(obj, 'to_json'): 72 return obj.to_json() 73 return obj 74 75 76class JSONBackend(FileBackend): 77 """ Piglit's native JSON backend 78 79 This writes out to piglit's native json backend. This class uses the python 80 json module or the simplejson. 81 82 This class is atomic, writes either completely fail or completely succeed. 83 To achieve this it writes individual files for each test and for the 84 metadata, and composes them at the end into a single file and removes the 85 intermediate files. When it tries to compose these files if it cannot read 86 a file it just ignores it, making the result atomic. 87 88 """ 89 _file_extension = 'json' 90 91 def initialize(self, metadata): 92 """ Write boilerplate json code 93 94 This writes all of the json except the actual tests. 95 96 Arguments: 97 metadata -- a dictionary of values to be written 98 99 """ 100 # If metadata is None then this is a loaded result and there is no need 101 # to initialize 102 metadata['results_version'] = CURRENT_JSON_VERSION 103 104 with open(os.path.join(self._dest, 'metadata.json'), 'w') as f: 105 json.dump(metadata, f, default=piglit_encoder) 106 107 # Flush the metadata to the disk, always 108 f.flush() 109 os.fsync(f.fileno()) 110 111 # make the directory for the tests 112 try: 113 os.mkdir(os.path.join(self._dest, 'tests')) 114 except OSError: 115 pass 116 117 def finalize(self, metadata=None): 118 """ End json serialization and cleanup 119 120 This method is called after all of tests are written, it closes any 121 containers that are still open and closes the file 122 123 """ 124 tests_dir = os.path.join(self._dest, 'tests') 125 file_list = sorted( 126 (f for f in os.listdir(tests_dir) if f.endswith('.json')), 127 key=lambda p: int(os.path.splitext(p)[0])) 128 129 # If jsonstreams is not present then build a complete tree of all of 130 # the data and write it with json.dump 131 if not _STREAMS: 132 # Create a dictionary that is full of data to be written to a 133 # single file 134 data = collections.OrderedDict() 135 136 # Load the metadata and put it into a dictionary 137 with open(os.path.join(self._dest, 'metadata.json'), 'r') as f: 138 data.update(json.load(f)) 139 140 # If there is more metadata add it the dictionary 141 if metadata: 142 data.update(metadata) 143 144 # Add the tests to the dictionary 145 data['tests'] = collections.OrderedDict() 146 147 for test in file_list: 148 test = os.path.join(tests_dir, test) 149 if os.path.isfile(test): 150 # Try to open the json snippets. If we fail to open a test 151 # then throw the whole thing out. This gives us atomic 152 # writes, the writing worked and is valid or it didn't 153 # work. 154 try: 155 with open(test, 'r') as f: 156 data['tests'].update(json.load(f)) 157 except ValueError: 158 pass 159 160 if not data['tests']: 161 raise exceptions.PiglitUserError( 162 'No tests were run, not writing a result file', 163 exitcode=2) 164 165 data = results.TestrunResult.from_dict(data) 166 167 # write out the combined file. Use the compression writer from the 168 # FileBackend 169 with self._write_final(os.path.join(self._dest, 'results.json')) as f: 170 json.dump(data, f, default=piglit_encoder, indent=INDENT) 171 172 # Otherwise use jsonstreams to write the final dictionary. This uses an 173 # external library, but is slightly faster and uses considerably less 174 # memory that building a complete tree. 175 else: 176 encoder = functools.partial(json.JSONEncoder, default=piglit_encoder) 177 178 with self._write_final(os.path.join(self._dest, 'results.json')) as f: 179 with jsonstreams.Stream(jsonstreams.Type.object, fd=f, indent=4, 180 encoder=encoder, pretty=True) as s: 181 s.write('__type__', 'TestrunResult') 182 with open(os.path.join(self._dest, 'metadata.json'), 183 'r') as n: 184 s.iterwrite(json.load(n, object_pairs_hook=collections.OrderedDict).items()) 185 186 if metadata: 187 s.iterwrite(metadata.items()) 188 189 with s.subobject('tests') as t: 190 wrote = False 191 for test in file_list: 192 test = os.path.join(tests_dir, test) 193 if os.path.isfile(test): 194 try: 195 with open(test, 'r') as f: 196 a = json.load(f) 197 except ValueError: 198 continue 199 200 t.iterwrite(a.items()) 201 wrote = True 202 203 if not wrote: 204 raise exceptions.PiglitUserError( 205 'No tests were run.', 206 exitcode=2) 207 208 # Delete the temporary files 209 os.unlink(os.path.join(self._dest, 'metadata.json')) 210 shutil.rmtree(os.path.join(self._dest, 'tests')) 211 212 @staticmethod 213 def _write(f, name, data): 214 json.dump({name: data}, f, default=piglit_encoder) 215 216 217def load_results(filename, compression_): 218 """ Loader function for TestrunResult class 219 220 This function takes a single argument of a results file. 221 222 It makes quite a few assumptions, first it assumes that it has been passed 223 a folder, if that fails then it looks for a plain text json file called 224 "main" 225 226 """ 227 # This will load any file or file-like thing. That would include pipes and 228 # file descriptors 229 if not os.path.isdir(filename): 230 filepath = filename 231 elif (os.path.exists(os.path.join(filename, 'metadata.json')) and 232 not os.path.exists(os.path.join( 233 filename, 'results.json.' + compression_))): 234 # We want to hit this path only if there isn't a 235 # results.json.<compressions>, since otherwise we'll continually 236 # regenerate values that we don't need to. 237 return _resume(filename) 238 else: 239 # Look for a compressed result first, then a bare result. 240 for name in ['results.json.{}'.format(compression_), 'results.json']: 241 if os.path.exists(os.path.join(filename, name)): 242 filepath = os.path.join(filename, name) 243 break 244 else: 245 raise exceptions.PiglitFatalError( 246 'No results found in "{}" (compression: {})'.format( 247 filename, compression_)) 248 249 assert compression_ in compression.COMPRESSORS, \ 250 'unsupported compression type' 251 252 with compression.DECOMPRESSORS[compression_](filepath) as f: 253 testrun = _load(f) 254 255 return results.TestrunResult.from_dict(_update_results(testrun, filepath)) 256 257 258def set_meta(results): 259 """Set json specific metadata on a TestrunResult.""" 260 results.results_version = CURRENT_JSON_VERSION 261 262 263def _load(results_file): 264 """Load a json results instance and return a TestrunResult. 265 266 This function converts an existing, fully completed json run. 267 268 """ 269 try: 270 result = json.load(results_file, object_pairs_hook=collections.OrderedDict) 271 except ValueError as e: 272 raise exceptions.PiglitFatalError( 273 'While loading json results file: "{}",\n' 274 'the following error occurred:\n{}'.format(results_file.name, 275 str(e))) 276 277 return result 278 279 280def _resume(results_dir): 281 """Loads a partially completed json results directory.""" 282 # Pylint can't infer that the json being loaded is a dict 283 # pylint: disable=maybe-no-member 284 assert os.path.isdir(results_dir), \ 285 "TestrunResult.resume() requires a directory" 286 287 # Load the metadata 288 with open(os.path.join(results_dir, 'metadata.json'), 'r') as f: 289 meta = json.load(f) 290 assert meta['results_version'] == CURRENT_JSON_VERSION, \ 291 "Old results version, resume impossible" 292 293 meta['tests'] = collections.OrderedDict() 294 295 # Load all of the test names and added them to the test list 296 tests_dir = os.path.join(results_dir, 'tests') 297 file_list = sorted( 298 (l for l in os.listdir(tests_dir) if l.endswith('.json')), 299 key=lambda p: int(os.path.splitext(p)[0])) 300 301 for file_ in file_list: 302 with open(os.path.join(tests_dir, file_), 'r') as f: 303 try: 304 meta['tests'].update(json.load(f)) 305 except ValueError: 306 continue 307 308 return results.TestrunResult.from_dict(meta) 309 310 311def _update_results(results, filepath): 312 """ Update results to the latest version 313 314 This function is a wrapper for other update_* functions, providing 315 incremental updates from one version to another. 316 317 Arguments: 318 results -- a TestrunResults instance 319 filepath -- the name of the file that the Testrunresults instance was 320 created from 321 322 """ 323 324 def loop_updates(results): 325 """ Helper to select the proper update sequence """ 326 # Python lacks a switch statement, the workaround is to use a 327 # dictionary 328 updates = { 329 7: _update_seven_to_eight, 330 8: _update_eight_to_nine, 331 9: _update_nine_to_ten, 332 } 333 334 while results['results_version'] < CURRENT_JSON_VERSION: 335 results = updates[results['results_version']](results) 336 337 return results 338 339 if results['results_version'] < MINIMUM_SUPPORTED_VERSION: 340 raise exceptions.PiglitFatalError( 341 'Unsupported version "{}", ' 342 'minimum supported version is "{}"'.format( 343 results['results_version'], MINIMUM_SUPPORTED_VERSION)) 344 345 # If the results version is the current version there is no need to 346 # update, just return the results 347 if results['results_version'] == CURRENT_JSON_VERSION: 348 return results 349 350 results = loop_updates(results) 351 352 # Move the old results, and write the current results 353 try: 354 os.rename(filepath, filepath + '.old') 355 _write(results, filepath) 356 except OSError: 357 print("WARNING: Could not write updated results {}".format(filepath), 358 file=sys.stderr) 359 360 return results 361 362 363def _write(results, file_): 364 """Write the values of the results out to a file.""" 365 with write_compressed(file_) as f: 366 json.dump(results, f, default=piglit_encoder, indent=INDENT) 367 368 369def _update_seven_to_eight(result): 370 """Update json results from version 7 to 8. 371 372 This update replaces the time attribute float with a TimeAttribute object, 373 which stores a start time and an end time, and provides methods for getting 374 total and delta. 375 376 This value is used for both TestResult.time and TestrunResult.time_elapsed. 377 378 """ 379 for test in result['tests'].values(): 380 test['time'] = {'start': 0.0, 'end': float(test['time']), 381 '__type__': 'TimeAttribute'} 382 383 result['time_elapsed'] = {'start': 0.0, 'end': 384 float(result['time_elapsed']), 385 '__type__': 'TimeAttribute'} 386 387 result['results_version'] = 8 388 389 return result 390 391 392def _update_eight_to_nine(result): 393 """Update json results from version 8 to 9. 394 395 This changes the PID field of the TestResult object to a list of Integers 396 or null rather than a single integer or null. 397 398 """ 399 for test in result['tests'].values(): 400 if 'pid' in test: 401 test['pid'] = [test['pid']] 402 else: 403 test['pid'] = [] 404 405 result['results_version'] = 9 406 407 return result 408 409 410def _update_nine_to_ten(result): 411 result['info'] = {} 412 result['info']['system'] = {} 413 for e in ['glxinfo', 'wglinfo', 'clinfo', 'lspci', 'uname']: 414 r = result.pop(e) 415 if r: 416 result['info']['system'][e] = r 417 418 result['results_version'] = 10 419 420 return result 421 422 423REGISTRY = Registry( 424 extensions=['.json'], 425 backend=JSONBackend, 426 load=load_results, 427 meta=set_meta, 428) 429