1# Copyright (C) 2001-2018 Free Software Foundation, Inc. 2 3# This program is free software; you can redistribute it and/or modify 4# it under the terms of the GNU General Public License as published by 5# the Free Software Foundation; either version 3 of the License, or 6# (at your option) any later version. 7# 8# This program is distributed in the hope that it will be useful, 9# but WITHOUT ANY WARRANTY; without even the implied warranty of 10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11# GNU General Public License for more details. 12# 13# You should have received a copy of the GNU General Public License 14# along with GCC; see the file COPYING3. If not see 15# <http://www.gnu.org/licenses/>. 16# 17# This script was submitted by Janis Johnson <janis187@us.ibm.com>. 18 19# Test the functionality and optionally, performance improvement, of 20# programs compiled with profile-directed optimizations. Compile and 21# run a test with profile options, compile it with options using the 22# profile feedback, and then run the test again. Optionally compile 23# and run a third time without the profile-directed optimization and 24# compare timing results of the program with normal optimization and 25# with the profile-directed optimization. Each test is run using 26# multiple sets of optimization and/or code generation options in 27# addition to the profiling and feedback options. 28 29# If perf_ext is defined and the performance value for the 30# profile-directed test run is nonzero then the performance check will 31# be done. 32 33load_lib dg.exp 34load_lib gcc-dg.exp 35 36global PROFOPT_OPTIONS perf_delta 37 38# The including .exp file must define these. 39global tool profile_option feedback_option prof_ext profile_wrapper 40if ![info exists tool] { 41 error "Tools is not specified." 42} 43if ![info exists prof_ext] { 44 error "No profile data file extensions specified." 45} 46 47# The maximum perforance degradation can be defined in the including file. 48if ![info exists perf_delta] { 49 set perf_delta 4 50} 51 52# The default option list can be overridden by 53# PROFOPT_OPTIONS="{ { list1 } ... { list2 } }" 54 55if ![info exists PROFOPT_OPTIONS] { 56 set PROFOPT_OPTIONS [list \ 57 { -g } \ 58 { -O0 } \ 59 { -O1 } \ 60 { -O2 } \ 61 { -O3 } \ 62 { -O3 -g } \ 63 { -Os } ] 64} 65 66# 67# profopt-cleanup -- remove profiling or performance results files. 68# 69# TESTCASE is the name of the test 70# EXT is the extensions of files to remove 71# 72proc profopt-cleanup { testcase extlist } { 73 set basename [file tail $testcase] 74 set base [file rootname $basename] 75 foreach ext $extlist { 76 set files [glob -nocomplain $base.$ext] 77 if { $files != "" } { 78 eval "remote_file build delete $files" 79 } 80 } 81} 82 83# 84# profopt-target-cleanup -- remove profiling result files. 85# 86# DIR is the name of the directory 87# TESTCASE is the name of the test 88# EXT is the extensions of files to remove 89# 90proc profopt-target-cleanup { dir testcase ext } { 91 global additional_sources_used 92 set basename [file tail $testcase] 93 set base [file rootname $basename] 94 set file "$dir/$base.$ext" 95 eval "remote_file target delete $file" 96 97 if [info exists additional_sources_used] { 98 foreach srcfile $additional_sources_used { 99 set basename [file tail $srcfile] 100 set base [file rootname $basename] 101 set file "$dir/$base.$ext" 102 eval "remote_file target delete $file" 103 } 104 } 105} 106 107# 108# profopt-perf-value -- get performance value for a test 109# 110# TESTCASE is the name of the test 111# PERF_EXT is the extension of the performance result file 112# OPTSTR is the string of compiler options 113# 114proc profopt-perf-value { testcase perf_ext optstr } { 115 set basename [file tail $testcase] 116 set base [file rootname $basename] 117 set files [glob -nocomplain $base.$perf_ext] 118 # The file doesn't exist; let the caller decide if that's a problem. 119 if { $files == "" } { 120 return -2 121 } 122 remote_upload host $base.$perf_ext $base.$perf_ext 123 set fd [open $base.$perf_ext r] 124 gets $fd line 125 set val -2 126 if [regexp "TIME" $line] { 127 if [regexp "TIME -1" $line] { 128 fail "$testcase perf check: no consistent time available, $optstr" 129 set val -1 130 } elseif ![regexp "(\[0-9\]+)" "$line" val] { 131 set val -2 132 } 133 } 134 # Report problems with an existing file. 135 if { $val == -2 } { 136 fail "$testcase perf check: file $base.$perf_ext has wrong format, $optstr" 137 } 138 close $fd 139 profopt-cleanup $testcase $perf_ext 140 return $val 141} 142 143# 144# dg-final-generate -- process code to run after the profile-generate step 145# 146# ARGS is the line number of the directive followed by the commands. 147# 148proc dg-final-generate { args } { 149 global generate_final_code 150 151 if { [llength $args] > 2 } { 152 error "[lindex $args 0]: too many arguments" 153 return 154 } 155 append generate_final_code "[lindex $args 1]\n" 156} 157 158# 159# dg-final-use -- process code to run after the profile-use step 160# 161# ARGS is the line number of the directive followed by the commands. 162# 163proc dg-final-use { args } { 164 global use_final_code 165 166 if { [llength $args] > 2 } { 167 error "[lindex $args 0]: too many arguments" 168 return 169 } 170 append use_final_code "[lindex $args 1]\n" 171} 172 173# 174# dg-final-use-not-autofdo -- process code to run after the profile-use step 175# but only if not running autofdo 176# ARGS is the line number of the directive followed by the commands. 177# 178proc dg-final-use-not-autofdo { args } { 179 global use_final_code 180 global run_autofdo 181 182 if { [llength $args] > 2 } { 183 error "[lindex $args 0]: too many arguments" 184 return 185 } 186 187 if { $run_autofdo == 1 } { 188 return 189 } 190 append use_final_code "[lindex $args 1]\n" 191} 192 193# 194# dg-final-use-autofdo -- process code to run after the profile-use step 195# but only if running autofdo 196# ARGS is the line number of the directive followed by the commands. 197# 198 199proc dg-final-use-autofdo { args } { 200 global use_final_code 201 global run_autofdo 202 203 if { [llength $args] > 2 } { 204 error "[lindex $args 0]: too many arguments" 205 return 206 } 207 208 if { $run_autofdo != 1 } { 209 return 210 } 211 append use_final_code "[lindex $args 1]\n" 212} 213 214# 215# profopt-final-code -- run final code 216# 217# WHICH is "generate" or "use". 218# FINAL_CODE is the TCL code to run. 219# TESTCASE is the name of the test, for error messages. 220# 221proc profopt-final-code { which final_code name } { 222 # This is copied from dg-test in dg.exp of DejaGnu. 223 regsub -all "\\\\(\[{}\])" $final_code "\\1" final_code 224 proc profopt-final-proc { args } $final_code 225 if [catch "profopt-final-proc $name" errmsg] { 226 perror "$name: error executing dg-final-${which}: $errmsg" 227 unresolved "$name: Error executing dg-final-${which}: $errmsg" 228 } 229} 230 231# 232# profopt-get-options -- process test directives 233# 234# SRC is the full pathname of the testcase. 235# 236proc profopt-get-options { src } { 237 # dg-options sets a variable called dg-extra-tool-flags. 238 set dg-extra-tool-flags "" 239 240 # dg-require-* sets dg-do-what. 241 upvar dg-do-what dg-do-what 242 243 # current_compiler_flags reads tool_flags from the same stack frame 244 # as dg-extra-tool-flags 245 set tool_flags "" 246 247 set tmp [dg-get-options $src] 248 foreach op $tmp { 249 set cmd [lindex $op 0] 250 if { ![string compare "dg-options" $cmd] \ 251 || ![string compare "dg-additional-options" $cmd] \ 252 || ![string compare "dg-add-options" $cmd] \ 253 || ![string compare "dg-skip-if" $cmd] \ 254 || ![string compare "dg-final-generate" $cmd] \ 255 || ![string compare "dg-final-use" $cmd] \ 256 || ![string compare "dg-final-use-not-autofdo" $cmd] \ 257 || ![string compare "dg-final-use-autofdo" $cmd] \ 258 || ![string compare "dg-additional-sources" $cmd] \ 259 || [string match "dg-require-*" $cmd] } { 260 set status [catch "$op" errmsg] 261 if { $status != 0 } { 262 perror "$src: $errmsg for \"$op\"\n" 263 unresolved "$src: $errmsg for \"$op\"" 264 return 265 } 266 } else { 267 # Ignore unrecognized dg- commands, but warn about them. 268 warning "profopt.exp does not support $cmd" 269 } 270 } 271 272 # Return flags to use for compiling the primary source file and for 273 # linking. 274 return ${dg-extra-tool-flags} 275} 276 277# auto-profopt-execute -- Compile for auto profiling and then feedback, 278# then normal. SRC is the full path name of the testcase. 279proc auto-profopt-execute { src } { 280 global profile_wrapper 281 global profile_option 282 global feedback_option 283 global run_autofdo 284 global srcdir 285 286 if { ! [check_profiling_available "-fauto-profile"] } { 287 regsub "(?q)$srcdir/" $src "" testcase 288 unsupported "$testcase -fauto-profile" 289 return 290 } 291 set profile_wrapper [profopt-perf-wrapper] 292 set profile_option "-g" 293 set feedback_option "-fauto-profile" 294 set run_autofdo 1 295 profopt-execute $src 296 unset profile_wrapper 297 unset profile_option 298 unset feedback_option 299 unset run_autofdo 300} 301 302# 303# c-prof-execute -- compile for profiling and then feedback, then normal 304# 305# SRC is the full pathname of the testcase. 306# 307proc profopt-execute { src } { 308 global srcdir tmpdir 309 global PROFOPT_OPTIONS 310 global tool profile_option feedback_option prof_ext perf_ext perf_delta 311 global profile_wrapper run_autofdo ld_library_path 312 global generate_final_code use_final_code 313 global verbose 314 global testname_with_flags 315 316 if ![info exists profile_option] { 317 error "No profile option specified for first compile." 318 } 319 if ![info exists feedback_option] { 320 error "No feedback option specified for second compile." 321 } 322 if ![info exists profile_wrapper] { 323 set profile_wrapper "" 324 } 325 if ![info exists run_autofdo] { 326 set run_autofdo "" 327 } 328 329 # Use the default option list or one defined for a set of tests. 330 if ![info exists PROFOPT_OPTIONS] { 331 error "PROFOPT_OPTIONS is not defined" 332 } 333 set prof_option_list $PROFOPT_OPTIONS 334 335 regsub "(?q)$srcdir/" $src "" testcase 336 # If we couldn't rip $srcdir out of `src' then just do the best we can. 337 # The point is to reduce the unnecessary noise in the logs. Don't strip 338 # out too much because different testcases with the same name can confuse 339 # `test-tool'. 340 if [string match "/*" $testcase] { 341 set testcase "[file tail [file dirname $src]]/[file tail $src]" 342 } 343 344 # Several procedures access the name of the test with torture flags, 345 # normally defined in dg-test. Profile optimization tests don't 346 # use dg-test, so define it here to make it accessible via 347 # testname-for-summary. 348 set testname_with_flags $testcase 349 350 set executable $tmpdir/[file tail [file rootname $src].x] 351 set basename [file tail $testcase] 352 set base [file rootname $basename] 353 354 set count 0 355 foreach option $prof_option_list { 356 set execname1 "${executable}${count}1" 357 set execname2 "${executable}${count}2" 358 set execname3 "${executable}${count}3" 359 incr count 360 361 remote_file build delete $execname1 362 remote_file build delete $execname2 363 remote_file build delete $execname3 364 verbose "Testing $testcase, $option" 1 365 366 # Remove old performance data files. 367 if [info exists perf_ext] { 368 profopt-cleanup $testcase $perf_ext 369 } 370 371 # Process test directives. 372 373 set generate_final_code "" 374 set use_final_code "" 375 set dg-do-what [list "run" "" P] 376 set extra_flags [profopt-get-options $src] 377 if { [lindex ${dg-do-what} 1 ] == "N" } { 378 unsupported "$testcase" 379 unset testname_with_flags 380 verbose "$src not supported on this target, skipping it" 3 381 return 382 } 383 384 # schedule removal of dump files et al 385 # Do this before the call below destroys additional_sources.. 386 append use_final_code [schedule-cleanups "$option $extra_flags"] 387 set extra_options [dg-additional-files-options "" "$src"] 388 389 # Remove old profiling data files. Make sure additional_sources_used is 390 # valid, by running it after dg-additional-files-options. 391 foreach ext $prof_ext { 392 profopt-target-cleanup $tmpdir $base $ext 393 profopt-target-cleanup $tmpdir $base "perf.data" 394 } 395 396 # Tree profiling requires TLS runtime support, which may need 397 # additional flags. 398 if { [string first "-fprofile-generate" $profile_option] >= 0 } { 399 set extra_flags [add_options_for_tls $extra_flags] 400 } 401 402 # Compile for profiling. 403 404 set options "$extra_options" 405 lappend options "additional_flags=$option $extra_flags $profile_option" 406 set optstr "$option $profile_option" 407 set comp_output [${tool}_target_compile "$src" "$execname1" executable $options] 408 if ![${tool}_check_compile "$testcase compilation" $optstr $execname1 $comp_output] { 409 unresolved "$testcase execution, $optstr" 410 unresolved "$testcase compilation, $option $feedback_option" 411 unresolved "$testcase execution, $option $feedback_option" 412 continue 413 } 414 415 # Run the profiled test. 416 if { $run_autofdo == 1 } { 417 if { ![info exists ld_library_path]} { 418 set ld_library_path "" 419 } 420 set orig_ld_library_path "[getenv LD_LIBRARY_PATH]" 421 setenv LD_LIBRARY_PATH "$ld_library_path:$orig_ld_library_path" 422 verbose -log "Running $profile_wrapper -o $tmpdir/$base.perf.data $execname1" 423 set id [remote_spawn "" "$profile_wrapper -o $tmpdir/$base.perf.data $execname1" "readonly"] 424 setenv LD_LIBRARY_PATH $orig_ld_library_path 425 if { $id < 0 } { 426 warning "Failed to run profiler" 427 set status "fail" 428 } else { 429 set result [remote_wait "" 300] 430 set status [lindex $result 0] 431 verbose "perf result $result" 432 if { $status == 0 } { 433 set status "pass" 434 } else { 435 set status "fail" 436 } 437 } 438 } else { 439 set result [${tool}_load $execname1 "" ""] 440 set status [lindex $result 0] 441 } 442 443 set missing_file 0 444 set bprefix "" 445 # Make sure the profile data was generated, and fail if not. 446 if { $status == "pass" } { 447 # convert profile 448 if { $run_autofdo == 1 } { 449 set bprefix "afdo." 450 set cmd "create_gcov --binary $execname1 --profile=$tmpdir/$base.perf.data -gcov_version=1 --gcov=$tmpdir/$bprefix$base.$ext" 451 verbose "Running $cmd" 452 set id [remote_spawn "" $cmd] 453 if { $id < 0 } { 454 unsupported "$testcase -fauto-profile: cannot run create_gcov" 455 set status "fail" 456 return 457 } 458 set status [remote_wait "" 300] 459 set status "pass" 460 } 461 462 foreach ext $prof_ext { 463 remote_upload target $tmpdir/$bprefix$base.$ext 464 set files [glob -nocomplain $bprefix$base.$ext] 465 if { $files == "" } { 466 set status "fail" 467 set missing_file 1 468 fail "$testcase execution: file $bprefix$base.$ext does not exist, $option $profile_option" 469 } 470 } 471 } 472 if { $missing_file == 0 } { 473 $status "$testcase execution, $optstr" 474 } 475 476 # If there is dg-final code to execute for the generate step, do it 477 # even if it failed; it might clean up temporary files. 478 if ![string match $generate_final_code ""] { 479 profopt-final-code "generate" $generate_final_code $testcase 480 } 481 482 remote_file build delete $execname1 483 484 # Quit for this round if it failed 485 if { $status != "pass" } { 486 unresolved "$testcase compilation, $option $feedback_option" 487 unresolved "$testcase execution, $option $feedback_option" 488 continue 489 } 490 491 # Compile with feedback-directed optimizations. 492 493 set options "$extra_options" 494 lappend options "additional_flags=$option $extra_flags $feedback_option" 495 set optstr "$option $feedback_option" 496 if { [string first "-fauto-profile" $options] >= 0} { 497 set options [regsub -- "-fauto-profile" $options "-fauto-profile=$tmpdir/$bprefix$base.$ext"] 498 } 499 500 set comp_output [${tool}_target_compile "$src" "$execname2" "executable" $options] 501 502 # Prune warnings we know are unwanted. 503 set comp_output [prune_warnings $comp_output] 504 505 if ![${tool}_check_compile "$testcase compilation" $optstr $execname2 $comp_output] { 506 unresolved "$testcase execution, $optstr" 507 continue 508 } 509 510 # Run the profile-directed optimized test. 511 512 set result [${tool}_load "$execname2" "" ""] 513 set status [lindex $result 0] 514 $status "$testcase execution, $optstr" 515 516 # If there is dg-final code to execute for the use step, do it. 517 if ![string match $use_final_code ""] { 518 profopt-final-code "use" $use_final_code $testcase 519 } 520 521 # Remove the profiling data files. 522 foreach ext $prof_ext { 523 profopt-target-cleanup $tmpdir "$bprefix$base" $ext 524 profopt-target-cleanup $tmpdir $base "perf.data" 525 profopt-target-cleanup $tmpdir $base "gcda.imports" 526 } 527 528 if { $status != "pass" } { 529 continue 530 } 531 532 # If the test is not expected to produce performance data then 533 # we're done now. 534 if ![info exists perf_ext] { 535 remote_file build delete $execname2 536 continue 537 } 538 539 # Get the performance data from the test built with 540 # profile-directed optimization. If the file doesn't exist or if 541 # the value is zero, skip the performance comparison. 542 set val2 [profopt-perf-value $testcase $perf_ext $optstr] 543 if { $val2 <= 0 } { 544 remote_file build delete $execname2 545 continue 546 } 547 548 # Compile with normal optimizations. 549 550 set options "$extra_options" 551 lappend options "additional_flags=$option" 552 set optstr "$option" 553 set comp_output [${tool}_target_compile "$src" "$execname3" "executable" $options] 554 if ![${tool}_check_compile "$testcase compilation" $optstr $execname3 $comp_output] { 555 unresolved "$testcase execution, $optstr" 556 unresolved "$testcase perf check, $optstr" 557 continue 558 } 559 560 # Run the test with normal optimizations. 561 562 set result [${tool}_load "$execname3" "" ""] 563 set status [lindex $result 0] 564 $status "$testcase execution, $optstr" 565 if { $status != "pass" } { 566 unresolved "$testcase perf check, $optstr" 567 continue 568 } 569 570 # Get the performance data from the test built with normal 571 # optimization. 572 set val1 [profopt-perf-value $testcase $perf_ext $optstr] 573 if { $val1 < 0 } { 574 if { $val1 == -2 } { 575 # The data file existed with the profile-directed 576 # optimization so this one should, too. 577 fail "$testcase perf check: file $base.$perf_ext does not exist, $optstr" 578 } 579 continue 580 } 581 582 # Compare results of the two runs and fail if the time with the 583 # profile-directed optimization is significantly more than the time 584 # without it. 585 set status "pass" 586 if { $val2 > $val1 } { 587 # Check for a performance degration outside of allowable limits. 588 if { [expr $val2 - $val1] > [expr [expr $val1 * $perf_delta] / 100] } { 589 set status "fail" 590 } 591 } 592 if { $status == "fail" } { 593 fail "$testcase perf check: orig: $val1 new: $val2, $optstr" 594 } else { 595 $status "$testcase perf check, $optstr" 596 verbose "$testcase orig: $val1 new: $val2, $optstr" 2 597 remote_file build delete $execname2 598 remote_file build delete $execname3 599 } 600 } 601 unset testname_with_flags 602} 603