| OLD | NEW |
| (Empty) |
| 1 #!/bin/sh | |
| 2 # chromium-runtests.sh [testsuite] | |
| 3 # Script to run a respectable subset of Chromium's test suite | |
| 4 # (excepting parts that run the browser itself, and excepting layout tests). | |
| 5 # Run from parent of src directory. | |
| 6 # By default, runs all test suites. If you specify one testsuite | |
| 7 # (e.g. base_unittests), it only runs that one. | |
| 8 # | |
| 9 # Chromium's test suite uses gtest, so each executable obeys the options | |
| 10 # documented in the wiki at http://code.google.com/p/googletest | |
| 11 # In particular, you can run a single test with --gtest_filter=Foo.Bar, | |
| 12 # and get a full list of tests in each exe with --gtest_list_tests. | |
| 13 # | |
| 14 # Before running the tests, regardless of operating system: | |
| 15 # 1) Make sure your system has at least one printer installed, | |
| 16 # or printing_unittests and unit_tests' PrintJobTest.SimplePrint | |
| 17 # will fail. A fake printer is fine, nothing will be printed. | |
| 18 # 2) Install the test cert as described at | |
| 19 # http://bugs.winehq.org/show_bug.cgi?id=20370 | |
| 20 # or net_unittests' HTTPSRequestTest.*, SSLClientSocketTest.* | |
| 21 # and others may fail. | |
| 22 # | |
| 23 # Chrome doesn't start without the --no-sandbox | |
| 24 # option in wine, so skip test suites that invoke it directly until I | |
| 25 # figure out how to jam that in there. | |
| 26 | |
| 27 # The bot that runs this script seemed to ignore stderr, so redirect stderr to s
tdout by default | |
| 28 2>&1 | |
| 29 | |
| 30 usage() { | |
| 31 cat <<_EOF_ | |
| 32 Usage: sh chromium-runtests.sh [--options] [suite ...] | |
| 33 Runs chromium tests on Windows or Wine. | |
| 34 Options: | |
| 35 --individual - run tests individually | |
| 36 --groups - run tests grouped by their major gtest name | |
| 37 --gtest_filter X - only run the tests matching X | |
| 38 --target X - test with Debug or Release binaries, default to Debug | |
| 39 --just-crashes - run only tests epected to crash | |
| 40 --just-fails - run only tests epected to fail | |
| 41 --just-flaky - run only tests epected to fail sometimes | |
| 42 --just-hangs - run only tests epected to hang | |
| 43 --list-failures - show list of expected failures | |
| 44 --logfiles - log to one file per test, in logs subdir, rather than st
dout | |
| 45 --loops N - run tests N times | |
| 46 -n - dry run, only show what will be done | |
| 47 --suppression_dir - directory containing the suppression files | |
| 48 --timeout N - let each executable run for N seconds (default varies) | |
| 49 --used-suppressions - extract histogram of used valgrind suppressions from cur
rent contents of logs directory | |
| 50 --valgrind - run the tests under valgrind | |
| 51 --vnc N - run the tests inside a vnc server running on display N | |
| 52 --winedebug chan - e.g. --windebug +relay,+seh | |
| 53 Currently supported suites: | |
| 54 app_unittests base_unittests courgette_unittests googleurl_unittests | |
| 55 ipc_tests media_unittests net_unittests printing_unittests sbox_unittests | |
| 56 sbox_validation_tests setup_unittests tcmalloc_unittests unit_tests | |
| 57 Default is to run all suites. It takes about five minutes to run them all | |
| 58 together, 22 minutes to run them all individually. | |
| 59 _EOF_ | |
| 60 exit 1 | |
| 61 } | |
| 62 | |
| 63 # Tests, grouped by how long they take to run | |
| 64 # Skip ones that require chrome itself for the moment | |
| 65 SUITES_1="googleurl_unittests printing_unittests remoting_unittests sbox_validat
ion_tests setup_unittests" | |
| 66 #SUITES_10="app_unittests courgette_unittests ipc_tests reliability_tests sbox_i
ntegration_tests sbox_unittests tab_switching_test tcmalloc_unittests url_fetch_
test" | |
| 67 SUITES_10="app_unittests courgette_unittests ipc_tests sbox_unittests tcmalloc_u
nittests" | |
| 68 #SUITES_100="automated_ui_tests installer_util_unittests media_unittests nacl_ui
_tests net_perftests net_unittests plugin_tests sync_unit_tests" | |
| 69 SUITES_100="media_unittests net_unittests" | |
| 70 #SUITES_1000="base_unittests interactive_ui_tests memory_test page_cycler_tests
perf_tests test_shell_tests unit_tests" | |
| 71 SUITES_1000="base_unittests unit_tests" | |
| 72 #SUITES_10000="ui_tests startup_tests" | |
| 73 | |
| 74 THE_VALGRIND_CMD="/usr/local/valgrind-10880/bin/valgrind \ | |
| 75 --gen-suppressions=all \ | |
| 76 --leak-check=full \ | |
| 77 --num-callers=25 \ | |
| 78 --show-possible=no \ | |
| 79 --smc-check=all \ | |
| 80 --trace-children=yes \ | |
| 81 --track-origins=yes \ | |
| 82 -v \ | |
| 83 --workaround-gcc296-bugs=yes \ | |
| 84 " | |
| 85 | |
| 86 LANG=C | |
| 87 | |
| 88 PATTERN="are definitely|uninitialised|Unhandled exception|Invalid read|Invalid w
rite|Invalid free|Source and desti|Mismatched free|unaddressable byte|vex x86|im
possible|Assertion |INTERNAL ERROR|Terminated|Test failed|Alarm clock|Command ex
ited with non-zero status" | |
| 89 | |
| 90 reduce_verbosity() { | |
| 91 # Filter out valgrind's extra -v output except for the 'used_suppression' line
s | |
| 92 # Also remove extra carriage returns | |
| 93 awk '!/^--/ || /^--.*used_suppression:/' | tr -d '\015' | |
| 94 } | |
| 95 | |
| 96 # Filter out known failures | |
| 97 # Avoid tests that hung, failed, or crashed on windows in Dan's reference run, | |
| 98 # or which fail in a way we don't care about on Wine, | |
| 99 # or which hang or crash on wine in a way that keeps other tests from running. | |
| 100 # Also lists url of bug report, if any. | |
| 101 # Format with | |
| 102 # sh chromium-runtests.sh --list-failures | sort | awk '{printf("%-21s %-20s %
-52s %s\n", $1, $2, $3, $4);}' | |
| 103 | |
| 104 list_known_failures() { | |
| 105 cat <<_EOF_ | |
| 106 app_unittests crash-valgrind IconUtilTest.TestCreateSkBitmapFromHI
CON http://bugs.winehq.org/show_bug.cgi?id=20634, not a bug, need to
figure out how to handle DIB faults | |
| 107 base_unittests hang EtwTraceControllerTest.EnableDisable
http://bugs.winehq.org/show_bug.cgi?id=20946, advapi32.ControlTr
ace() not yet implemented | |
| 108 base_unittests crash EtwTraceConsumer*Test.*
http://bugs.winehq.org/show_bug.cgi?id=20946, advapi32.OpenTrace
() unimplemented | |
| 109 base_unittests crash EtwTraceProvider*Test.*
http://bugs.winehq.org/show_bug.cgi?id=20946, advapi32.RegisterT
raceGuids() unimplemented | |
| 110 base_unittests dontcare BaseWinUtilTest.FormatMessageW | |
| 111 base_unittests dontcare FileUtilTest.CountFilesCreatedAfter | |
| 112 base_unittests dontcare FileUtilTest.GetFileCreationLocalTime | |
| 113 base_unittests dontcare PEImageTest.EnumeratesPE
Alexandre triaged | |
| 114 base_unittests dontcare-winfail TimeTicks.HighResNow
fails if run individually on windows | |
| 115 base_unittests dontcare WMIUtilTest.* | |
| 116 base_unittests fail HMACTest.HMACObjectReuse
http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 117 base_unittests fail HMACTest.HmacSafeBrowsingResponseTest
http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 118 base_unittests fail HMACTest.RFC2202TestCases
http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 119 base_unittests fail_wine_vmware RSAPrivateKeyUnitTest.ShortIntegers | |
| 120 base_unittests flaky-dontcare StatsTableTest.MultipleProcesses
http://bugs.winehq.org/show_bug.cgi?id=20606 | |
| 121 base_unittests hang-dontcare DirectoryWatcherTest.* | |
| 122 base_unittests hang-valgrind JSONReaderTest.Reading
# not really a hang, takes 400 seconds | |
| 123 base_unittests hang-valgrind RSAPrivateKeyUnitTest.InitRandomTest
# not really a hang, takes 300 seconds | |
| 124 base_unittests flaky-valgrind TimeTicks.Deltas
# fails half the time under valgrind, timing issue? | |
| 125 base_unittests hang-valgrind TimerTest.RepeatingTimer* | |
| 126 base_unittests hang-valgrind TimeTicks.WinRollover
# not really a hang, takes 1000 seconds | |
| 127 base_unittests fail-valgrind ConditionVariableTest.LargeFastTaskTe
st # fails under wine + valgrind TODO(thestig): investigate | |
| 128 base_unittests fail-valgrind ProcessUtilTest.CalcFreeMemory
# fails under wine + valgrind TODO(thestig): investigate | |
| 129 base_unittests fail-valgrind ProcessUtilTest.KillSlowChild
# fails under wine + valgrind TODO(thestig): investigate | |
| 130 base_unittests fail-valgrind ProcessUtilTest.SpawnChild
# fails under wine + valgrind TODO(thestig): investigate | |
| 131 base_unittests flaky-valgrind StatsTableTest.StatsCounterTimer
# flaky, timing issues? TODO(thestig): investigate | |
| 132 base_unittests fail-valgrind StatsTableTest.StatsRate
# fails under wine + valgrind TODO(thestig): investigate | |
| 133 base_unittests fail-valgrind StatsTableTest.StatsScope
# fails under wine + valgrind TODO(thestig): investigate | |
| 134 ipc_tests flaky IPCChannelTest.ChannelTest
http://bugs.winehq.org/show_bug.cgi?id=20628 | |
| 135 ipc_tests flaky IPCChannelTest.SendMessageInChannelCo
nnected http://bugs.winehq.org/show_bug.cgi?id=20628 | |
| 136 ipc_tests hang IPCSyncChannelTest.*
http://bugs.winehq.org/show_bug.cgi?id=20390 | |
| 137 media_unittests crash FFmpegGlueTest.OpenClose | |
| 138 media_unittests crash FFmpegGlueTest.Read | |
| 139 media_unittests crash FFmpegGlueTest.Seek | |
| 140 media_unittests crash FFmpegGlueTest.Write | |
| 141 media_unittests fail_wine_vmware WinAudioTest.PCMWaveStreamTripleBuffe
r | |
| 142 media_unittests hang-valgrind WinAudioTest.PCMWaveSlowSource | |
| 143 net_unittests fail SSLClientSocketTest.Read_Interrupted
http://bugs.winehq.org/show_bug.cgi?id=20748 | |
| 144 net_unittests fail HTTPSRequestTest.HTTPSExpiredTest
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 145 net_unittests fail HTTPSRequestTest.HTTPSGetTest
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 146 net_unittests fail HTTPSRequestTest.HTTPSMismatchedTest
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 147 net_unittests fail SSLClientSocketTest.Connect
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 148 net_unittests fail SSLClientSocketTest.Read
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 149 net_unittests fail SSLClientSocketTest.Read_FullDuplex
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 150 net_unittests fail SSLClientSocketTest.Read_SmallChunks
# https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 151 net_unittests fail URLRequestTestHTTP.HTTPSToHTTPRedirec
tNoRefererTest # https/ssl failing on the bot, bad Wine? TODO(thestig): investi
gate | |
| 152 sbox_unittests fail JobTest.ProcessInJob | |
| 153 sbox_unittests fail JobTest.TestCreation | |
| 154 sbox_unittests fail JobTest.TestDetach | |
| 155 sbox_unittests fail JobTest.TestExceptions | |
| 156 sbox_unittests fail RestrictedTokenTest.AddAllSidToRestri
ctingSids | |
| 157 sbox_unittests fail RestrictedTokenTest.AddMultipleRestri
ctingSids | |
| 158 sbox_unittests fail RestrictedTokenTest.AddRestrictingSid | |
| 159 sbox_unittests fail RestrictedTokenTest.AddRestrictingSid
CurrentUser | |
| 160 sbox_unittests fail RestrictedTokenTest.AddRestrictingSid
LogonSession | |
| 161 sbox_unittests fail RestrictedTokenTest.DefaultDacl | |
| 162 sbox_unittests fail RestrictedTokenTest.DeleteAllPrivileg
es | |
| 163 sbox_unittests fail RestrictedTokenTest.DeleteAllPrivileg
esException | |
| 164 sbox_unittests fail RestrictedTokenTest.DeletePrivilege | |
| 165 sbox_unittests fail RestrictedTokenTest.DenyOwnerSid | |
| 166 sbox_unittests fail RestrictedTokenTest.DenySid | |
| 167 sbox_unittests fail RestrictedTokenTest.DenySids | |
| 168 sbox_unittests fail RestrictedTokenTest.DenySidsException | |
| 169 sbox_unittests fail RestrictedTokenTest.ResultToken | |
| 170 sbox_unittests fail ServiceResolverTest.PatchesServices | |
| 171 sbox_unittests flaky IPCTest.ClientFastServer | |
| 172 sbox_validation_tests fail ValidationSuite.* | |
| 173 unit_tests crash BlacklistManagerTest.*
http://crbug.com/27726 | |
| 174 unit_tests crash SafeBrowsingProtocolParsingTest.TestG
etHashWithMac http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 175 unit_tests crash-valgrind DnsMasterTest.MassiveConcurrentLookup
Test | |
| 176 unit_tests crash-valgrind NullModelTableViewTest.*
http://bugs.winehq.org/show_bug.cgi?id=20553 | |
| 177 unit_tests crash-valgrind RenderViewTest.OnPrintPageAsBitmap
http://bugs.winehq.org/show_bug.cgi?id=20657 (for wine oom) | |
| 178 unit_tests crash-valgrind TableViewTest.*
http://bugs.winehq.org/show_bug.cgi?id=20553 | |
| 179 unit_tests dontcare FirefoxImporterTest.Firefox2NSS3Decry
ptor # FF2 dlls without symbols cause leaks | |
| 180 unit_tests dontcare ImporterTest.Firefox2Importer
# FF2 dlls without symbols cause leaks | |
| 181 unit_tests dontcare SpellCheckTest.SpellCheckText | |
| 182 unit_tests dontcare-hangwin UtilityProcessHostTest.ExtensionUnpac
ker | |
| 183 unit_tests fail EncryptorTest.EncryptionDecryption
http://bugs.winehq.org/show_bug.cgi?id=20495 | |
| 184 unit_tests fail EncryptorTest.String16EncryptionDecry
ption http://bugs.winehq.org/show_bug.cgi?id=20495 | |
| 185 unit_tests fail ImporterTest.IEImporter
http://bugs.winehq.org/show_bug.cgi?id=20625 | |
| 186 unit_tests fail RenderViewTest.InsertCharacters
http://bugs.winehq.org/show_bug.cgi?id=20624 | |
| 187 unit_tests fail SafeBrowsingProtocolParsingTest.TestV
erifyChunkMac http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 188 unit_tests fail SafeBrowsingProtocolParsingTest.TestV
erifyUpdateMac http://bugs.winehq.org/show_bug.cgi?id=20340 | |
| 189 unit_tests fail_wine_vmware RenderProcessTest.TestTransportDIBAll
ocation | |
| 190 unit_tests hang-valgrind ExtensionAPIClientTest.*
Not really a hang, just takes 30 minutes | |
| 191 unit_tests hang-valgrind Win32WifiDataProviderTest.*
http://crbug.com/33446 | |
| 192 _EOF_ | |
| 193 } | |
| 194 | |
| 195 # Times are in seconds, and are twice as high as slowest observed runtime so far
in valgrind, | |
| 196 # rounded to the nearest power of two multiple of 100 seconds. | |
| 197 # TODO: make the returned value lower if --valgrind is not given | |
| 198 get_expected_runtime() { | |
| 199 case "$timeout_manual" in | |
| 200 [0-9]*) echo $timeout_manual; return;; | |
| 201 esac | |
| 202 | |
| 203 case $1 in | |
| 204 app_unittests) echo 200;; | |
| 205 base_unittests) echo 1000;; | |
| 206 courgette_unittests) echo 1000;; | |
| 207 googleurl_unittests) echo 200;; | |
| 208 ipc_tests) echo 400;; | |
| 209 media_unittests) echo 400;; | |
| 210 net_unittests) echo 2000;; | |
| 211 printing_unittests) echo 100;; | |
| 212 remoting_unittests) echo 200;; | |
| 213 sbox_unittests) echo 100;; | |
| 214 sbox_validation_tests) echo 100;; | |
| 215 setup_unittests) echo 100;; | |
| 216 tcmalloc_unittests) echo 1000;; | |
| 217 unit_tests) echo 4000;; | |
| 218 *) echo "unknown test $1" >&2 ; exec false;; | |
| 219 esac | |
| 220 } | |
| 221 | |
| 222 # Run $2... but kill it if it takes longer than $1 seconds | |
| 223 alarm() { time perl -e 'alarm shift; exec @ARGV' "$@"; } | |
| 224 | |
| 225 init_runtime() { | |
| 226 CHROME_ALLOCATOR=winheap | |
| 227 export CHROME_ALLOCATOR | |
| 228 | |
| 229 if test "$WINDIR" = "" | |
| 230 then | |
| 231 WINE=${WINE:-/usr/local/wine/bin/wine} | |
| 232 export WINE | |
| 233 WINESERVER=${WINESERVER:-/usr/local/wine/bin/wineserver} | |
| 234 WINEPREFIX=${WINEPREFIX:-$HOME/.wine-chromium-tests} | |
| 235 export WINEPREFIX | |
| 236 WINE_HEAP_REDZONE=16 | |
| 237 export WINE_HEAP_REDZONE | |
| 238 | |
| 239 if netstat -tlnp | grep :1337 | |
| 240 then | |
| 241 echo Please kill the server listening on port 1337, or reboot. The net te
sts need this port. | |
| 242 exit 1 | |
| 243 fi | |
| 244 if test ! -f /usr/share/ca-certificates/root_ca_cert.crt | |
| 245 then | |
| 246 echo "You need to do" | |
| 247 echo "sudo cp src/net/data/ssl/certificates/root_ca_cert.crt /usr/share/
ca-certificates/" | |
| 248 echo "sudo vi /etc/ca-certificates.conf (and add the line root_ca_cer
t.crt)" | |
| 249 echo "sudo update-ca-certificates" | |
| 250 echo "else ssl tests will fail." | |
| 251 echo "(Alternately, modify this script to run Juan's importer, http://bugs
.winehq.org/show_bug.cgi?id=20370#c4 )" | |
| 252 exit 1 | |
| 253 fi | |
| 254 | |
| 255 if test -n "$VNC" | |
| 256 then | |
| 257 export DISPLAY=":$VNC" | |
| 258 vncserver -kill "$DISPLAY" || true | |
| 259 # VNC servers don't clean these up if they get a SIGKILL, and would then | |
| 260 # refuse to start because these files are there. | |
| 261 rm -f "/tmp/.X${VNC}-lock" "/tmp/.X11-unix/X${VNC}" | |
| 262 vncserver "$DISPLAY" -ac -depth 24 -geometry 1024x768 | |
| 263 fi | |
| 264 $dry_run rm -rf $WINEPREFIX | |
| 265 $dry_run test -f winetricks || wget http://kegel.com/wine/winetricks | |
| 266 $dry_run sh winetricks nocrashdialog corefonts gecko > /dev/null | |
| 267 $dry_run sleep 1 | |
| 268 $dry_run $WINE winemine & | |
| 269 fi | |
| 270 } | |
| 271 | |
| 272 shutdown_runtime() { | |
| 273 if test "$WINDIR" = "" | |
| 274 then | |
| 275 $dry_run $WINESERVER -k | |
| 276 if test -n "$VNC" | |
| 277 then | |
| 278 vncserver -kill "$DISPLAY" | |
| 279 fi | |
| 280 fi | |
| 281 } | |
| 282 | |
| 283 # Looks up tests from our list of known bad tests. If $2 is not '.', picks test
s expected to fail in a particular way. | |
| 284 get_test_filter() | |
| 285 { | |
| 286 mysuite=$1 | |
| 287 myfilter=$2 | |
| 288 list_known_failures | tee tmp.1 | | |
| 289 awk '$1 == "'$mysuite'" && /'$myfilter'/ {print $3}' |tee tmp.2 | | |
| 290 tr '\012' : |tee tmp.3 | | |
| 291 sed 's/:$/\n/' | |
| 292 } | |
| 293 | |
| 294 # Output the logical and of the two gtest filters $1 and $2. | |
| 295 # Handle the case where $1 is empty. | |
| 296 and_gtest_filters() | |
| 297 { | |
| 298 # FIXME: handle more complex cases | |
| 299 case "$1" in | |
| 300 "") ;; | |
| 301 *) echo -n "$1": ;; | |
| 302 esac | |
| 303 echo $2 | |
| 304 } | |
| 305 | |
| 306 # Expands a gtest filter spec to a plain old list of tests separated by whitespa
ce | |
| 307 expand_test_list() | |
| 308 { | |
| 309 mysuite=$1 # e.g. base_unittests | |
| 310 myfilter=$2 # existing gtest_filter specification with wildcard | |
| 311 # List just the tests matching $myfilter, separated by colons | |
| 312 $WINE ./$mysuite.exe --gtest_filter=$myfilter --gtest_list_tests | | |
| 313 tr -d '\015' | | |
| 314 grep -v FLAKY | | |
| 315 perl -e 'while (<STDIN>) { chomp; if (/^[A-Z]/) { $testname=$_; } elsif (/./)
{ s/\s*//; print "$testname$_\n"} }' | |
| 316 } | |
| 317 | |
| 318 # Parse arguments | |
| 319 | |
| 320 announce=true | |
| 321 do_individual=no | |
| 322 dry_run= | |
| 323 extra_gtest_filter= | |
| 324 fail_filter="." | |
| 325 loops=1 | |
| 326 logfiles= | |
| 327 SUITES= | |
| 328 suppression_dirs= | |
| 329 TARGET=Debug | |
| 330 timeout_manual= | |
| 331 VALGRIND_CMD= | |
| 332 VNC= | |
| 333 want_fails=no | |
| 334 winedebug= | |
| 335 | |
| 336 while test "$1" != "" | |
| 337 do | |
| 338 case $1 in | |
| 339 --individual) do_individual=yes;; | |
| 340 --groups) do_individual=groups;; | |
| 341 --gtest_filter) extra_gtest_filter=$2; shift;; | |
| 342 --just-crashes) fail_filter="crash"; want_fails=yes;; | |
| 343 --just-fails) fail_filter="fail"; want_fails=yes;; | |
| 344 --just-flaky) fail_filter="flaky"; want_fails=yes;; | |
| 345 --just-hangs) fail_filter="hang"; want_fails=yes;; | |
| 346 --list-failures) list_known_failures; exit 0;; | |
| 347 --list-failures-html) list_known_failures | sed 's,http://\(.*\),<a href="http
://\1">\1</a>,;s/$/<br>/' ; exit 0;; | |
| 348 --loops) loops=$2; shift;; | |
| 349 -n) dry_run=true; announce=echo ;; | |
| 350 --suppression_dir) suppression_dirs="$suppression_dirs $2"; shift;; | |
| 351 --target) TARGET=$2; shift;; | |
| 352 --timeout) timeout_manual=$2; shift;; | |
| 353 --used-suppressions) cd logs; grep used_suppression *.log | sed 's/-1.*--[0-9]
*-- used_suppression//'; exit 0;; | |
| 354 --valgrind) VALGRIND_CMD="$THE_VALGRIND_CMD";; | |
| 355 --vnc) VNC=$2; shift;; | |
| 356 --winedebug) winedebug=$2; shift;; | |
| 357 --logfiles) logfiles=yes;; | |
| 358 -*) usage; exit 1;; | |
| 359 *) SUITES="$SUITES $1" ;; | |
| 360 esac | |
| 361 shift | |
| 362 done | |
| 363 | |
| 364 if test "$SUITES" = "" | |
| 365 then | |
| 366 SUITES="$SUITES_1 $SUITES_10 $SUITES_100 $SUITES_1000" | |
| 367 fi | |
| 368 | |
| 369 if test "$VALGRIND_CMD" != "" | |
| 370 then | |
| 371 if test "$suppression_dirs" = "" | |
| 372 then | |
| 373 # Default value for winezeug. | |
| 374 suppression_dirs="../../../ ../../../../../valgrind" | |
| 375 # Also try the script dir. | |
| 376 suppression_dirs="$suppression_dirs $(dirname $0)" | |
| 377 fi | |
| 378 # Check suppression_dirs for suppression files to create suppression_options | |
| 379 suppression_options= | |
| 380 for dir in $suppression_dirs | |
| 381 do | |
| 382 for f in valgrind-suppressions chromium-valgrind-suppressions | |
| 383 do | |
| 384 if test -f "$dir/$f" | |
| 385 then | |
| 386 dir="`cd $dir; pwd`" | |
| 387 suppression_options="$suppression_options --suppressions=$dir/$f" | |
| 388 fi | |
| 389 done | |
| 390 done | |
| 391 VALGRIND_CMD="$VALGRIND_CMD $suppression_options" | |
| 392 fi | |
| 393 | |
| 394 set -e | |
| 395 | |
| 396 trap shutdown_runtime 0 | |
| 397 init_runtime | |
| 398 export WINEDEBUG=$winedebug | |
| 399 | |
| 400 set -x | |
| 401 | |
| 402 mkdir -p logs | |
| 403 cd "src/chrome/$TARGET" | |
| 404 | |
| 405 i=1 | |
| 406 while test $i -le $loops | |
| 407 do | |
| 408 for suite in $SUITES | |
| 409 do | |
| 410 expected_to_fail="`get_test_filter $suite $fail_filter`" | |
| 411 case $want_fails in | |
| 412 no) filterspec=`and_gtest_filters "${extra_gtest_filter}" -${expected_to_fa
il}` ;; | |
| 413 yes) filterspec=`and_gtest_filters "${extra_gtest_filter}" ${expected_to_fa
il}` ;; | |
| 414 esac | |
| 415 | |
| 416 case $do_individual in | |
| 417 no) | |
| 418 $announce $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter=$filterspec | |
| 419 LOG=../../../logs/$suite-$i.log | |
| 420 $dry_run alarm `get_expected_runtime $suite` \ | |
| 421 $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter=$filterspec 2>&1
| eval reduce_verbosity | tee $LOG || errors=yes true | |
| 422 egrep -q "$PATTERN" $LOG && errors=yes | |
| 423 test "$logfiles" = yes || rm $LOG | |
| 424 ;; | |
| 425 yes) | |
| 426 for test in `expand_test_list $suite $filterspec` | |
| 427 do | |
| 428 $announce $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter="$test" | |
| 429 LOG=../../../logs/$suite-$test-$i.log | |
| 430 $dry_run alarm `get_expected_runtime $suite` \ | |
| 431 $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter="$test" 2>&1 |
eval reduce_verbosity | tee $LOG || errors=yes true | |
| 432 egrep -q "$PATTERN" $LOG && errors=yes | |
| 433 test "$logfiles" = yes || rm $LOG | |
| 434 done | |
| 435 ;; | |
| 436 groups) | |
| 437 for test in `expand_test_list $suite $filterspec | sed 's/\..*//' | sort -
u` | |
| 438 do | |
| 439 $announce $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter="$test.*-${exp
ected_to_fail}" | |
| 440 LOG=../../../logs/$suite-$test-$i.log | |
| 441 $dry_run alarm `get_expected_runtime $suite` \ | |
| 442 $VALGRIND_CMD $WINE ./$suite.exe --gtest_filter="$test.*-${exp
ected_to_fail}" 2>&1 | eval reduce_verbosity | tee $LOG || errors=yes true | |
| 443 egrep -q "$PATTERN" tmp.log && errors=yes | |
| 444 test "$logfiles" = yes || rm $LOG | |
| 445 done | |
| 446 ;; | |
| 447 esac | |
| 448 done | |
| 449 i=`expr $i + 1` | |
| 450 done | |
| 451 | |
| 452 case "$errors" in | |
| 453 yes) echo "Errors detected, condition red. Battle stations!" ; exit 1;; | |
| 454 *) echo "No errors detected." ;; | |
| 455 esac | |
| 456 | |
| OLD | NEW |