Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: tools/valgrind/waterfall.sh

Issue 6057006: Improve waterfall.sh - fetch only reports and failed test names rather then t... (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: '' Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/bin/bash 1 #!/bin/bash
2 2
3 # Copyright (c) 2010 The Chromium Authors. All rights reserved. 3 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
4 # Use of this source code is governed by a BSD-style license that can be 4 # Use of this source code is governed by a BSD-style license that can be
5 # found in the LICENSE file. 5 # found in the LICENSE file.
6 6
7 # This script can be used by waterfall sheriffs to fetch the status 7 # This script can be used by waterfall sheriffs to fetch the status
8 # of Valgrind bots on the memory waterfall and test if their local 8 # of Valgrind bots on the memory waterfall and test if their local
9 # suppressions match the reports on the waterfall. 9 # suppressions match the reports on the waterfall.
10 10
(...skipping 27 matching lines...) Expand all
38 else 38 else
39 echo "Need either curl or wget to download stuff... aborting" 39 echo "Need either curl or wget to download stuff... aborting"
40 exit 1 40 exit 1
41 fi 41 fi
42 # }}} 42 # }}}
43 } 43 }
44 44
45 fetch_logs() { 45 fetch_logs() {
46 # Fetch Valgrind logs from the waterfall {{{1 46 # Fetch Valgrind logs from the waterfall {{{1
47 47
48 # TODO(timurrrr,maruel): use JSON, see
49 # http://build.chromium.org/p/chromium.memory/json/help
50
48 rm -rf "$LOGS_DIR" # Delete old logs 51 rm -rf "$LOGS_DIR" # Delete old logs
49 mkdir "$LOGS_DIR" 52 mkdir "$LOGS_DIR"
50 53
51 echo "Fetching the list of builders..." 54 echo "Fetching the list of builders..."
52 download $WATERFALL_PAGE "$LOGS_DIR/builders" 55 download $WATERFALL_PAGE "$LOGS_DIR/builders"
53 SLAVES=$(grep "<a href=\"builders\/" "$LOGS_DIR/builders" | \ 56 SLAVES=$(grep "<a href=\"builders\/" "$LOGS_DIR/builders" | \
54 sed "s/.*<a href=\"builders\///" | sed "s/\".*//" | \ 57 sed "s/.*<a href=\"builders\///" | sed "s/\".*//" | \
55 sort | uniq) 58 sort | uniq)
56 59
57 for S in $SLAVES 60 for S in $SLAVES
58 do 61 do
59 SLAVE_URL=$WATERFALL_PAGE/$S 62 SLAVE_URL=$WATERFALL_PAGE/$S
60 SLAVE_NAME=$(echo $S | sed -e "s/%20/ /g" -e "s/%28/(/g" -e "s/%29/)/g") 63 SLAVE_NAME=$(echo $S | sed -e "s/%20/ /g" -e "s/%28/(/g" -e "s/%29/)/g")
61 echo -n "Fetching builds by slave '${SLAVE_NAME}'" 64 echo -n "Fetching builds by slave '${SLAVE_NAME}'"
62 download $SLAVE_URL "$LOGS_DIR/slave_${S}" 65 download $SLAVE_URL "$LOGS_DIR/slave_${S}"
63 66
64 # We speed up the 'fetch' step by skipping the builds/tests which succeeded. 67 # We speed up the 'fetch' step by skipping the builds/tests which succeeded.
65 # TODO(timurrrr): OTOH, we won't be able to check 68 # TODO(timurrrr): OTOH, we won't be able to check
66 # if some suppression is not used anymore. 69 # if some suppression is not used anymore.
67 LIST_OF_BUILDS=$(grep "<a href=\"\.\./builders/.*/builds/[0-9]\+.*failed" \ 70 LIST_OF_BUILDS=$(grep "rev.*<a href=\"\.\./builders/.*/builds/[0-9]\+" \
68 "$LOGS_DIR/slave_$S" | grep -v "failed compile" | \ 71 "$LOGS_DIR/slave_$S" | head -n 2 | \
69 sed "s/.*\/builds\///" | sed "s/\".*//" | head -n 2) 72 grep "failed" | grep -v "failed compile" | \
73 sed "s/.*\/builds\///" | sed "s/\".*//")
70 74
71 for BUILD in $LIST_OF_BUILDS 75 for BUILD in $LIST_OF_BUILDS
72 do 76 do
73 BUILD_RESULTS="$LOGS_DIR/slave_${S}_build_${BUILD}" 77 # We'll fetch a few tiny URLs now, let's use a temp file.
74 download $SLAVE_URL/builds/$BUILD "$BUILD_RESULTS" 78 TMPFILE=$(mktemp)
75 LIST_OF_TESTS=$(grep "<a href=\"[0-9]\+/steps/memory.*/logs/stdio\"" \ 79 download $SLAVE_URL/builds/$BUILD "$TMPFILE"
76 "$BUILD_RESULTS" | \ 80
77 sed "s/.*a href=\"//" | sed "s/\".*//") 81 REPORT_FILE="$LOGS_DIR/report_${S}_${BUILD}"
78 for TEST in $LIST_OF_TESTS 82 rm $REPORT_FILE 2>/dev/null || true # make sure it doesn't exist
jochen (gone - plz use gerrit) 2011/01/05 12:25:37 use rm -f instead 2>/dev/null || true
Timur Iskhodzhanov 2011/01/17 15:28:26 Done.
83
84 REPORT_URLS=$(grep -o "[0-9]\+/steps/memory.*/logs/[0-9A-F]\{16\}" \
85 "$TMPFILE" || true) # `true` is to succeed on empty output
jochen (gone - plz use gerrit) 2011/01/05 12:25:37 why does this need to succeed?
Timur Iskhodzhanov 2011/01/17 15:28:26 There's "set -e" and grep returns failure if no li
jochen (gone - plz use gerrit) 2011/01/17 19:54:01 But you don't use the return value, so it doesn't
86 FAILED_TESTS=$(grep -o "[0-9]\+/steps/memory.*/logs/[A-Za-z0-9.]\+" \
87 "$TMPFILE" | grep -v "[0-9A-F]\{16\}" | grep -v "stdio" \
88 || true)
89
90 for REPORT in $REPORT_URLS
79 do 91 do
80 REPORT_FILE=$(echo "report_${S}_$TEST" | sed "s/\/logs\/stdio//" | \ 92 # Add a hash line with the report so test_suppressions sees the hash.
81 sed "s/\/steps//" | sed "s/\//_/g") 93 # This is a workaround for http://crbug.com/68233#c1
82 echo -n "." 94 echo "$REPORT" | sed -e "s/.*\//Suppression (error hash=#/" \
83 download $SLAVE_URL/builds/$TEST "$LOGS_DIR/$REPORT_FILE" 95 -e "s/$/#):/" >> "$REPORT_FILE"
84 echo $SLAVE_URL/builds/$TEST >> "$LOGS_DIR/$REPORT_FILE" 96 download "$SLAVE_URL/builds/$REPORT/text" "$TMPFILE"
97 # `cat -v sed` is a workaround the last "}^M" line getting stripped
98 cat -v "$TMPFILE" | sed "s/\^M$//" >> "$REPORT_FILE"
jochen (gone - plz use gerrit) 2011/01/05 12:25:37 if you want to convert CRLF to unix, you should pi
Timur Iskhodzhanov 2011/01/17 15:28:26 Done.
99 echo >> "$REPORT_FILE"
85 done 100 done
101
102 for FAILURE in $FAILED_TESTS
103 do
104 download "$SLAVE_URL/builds/$FAILURE/text" "$TMPFILE"
105 # `cat head` is a workaround for http://crbug.com/68233
106 cat "$TMPFILE" | head -n 1 | sed "s/://" | sed "s/^/FAILED:/" \
107 >> "$REPORT_FILE"
108 done
109
110 rm "$TMPFILE"
111 echo $SLAVE_URL/builds/$BUILD >> "$REPORT_FILE"
86 done 112 done
87 echo " DONE" 113 echo " DONE"
88 done 114 done
89 # }}} 115 # }}}
90 } 116 }
91 117
92 match_suppressions() { 118 match_suppressions() {
93 PYTHONPATH=$THISDIR/../python/google \ 119 PYTHONPATH=$THISDIR/../python/google \
94 python "$THISDIR/test_suppressions.py" "$LOGS_DIR/report_"* 120 python "$THISDIR/test_suppressions.py" "$LOGS_DIR/report_"*
95 } 121 }
96 122
97 match_gtest_excludes() { 123 match_gtest_excludes() {
98 for PLATFORM in "Linux" "Chromium%20Mac" "Chromium%20OS" 124 for PLATFORM in "Linux" "Chromium%20Mac" "Chromium%20OS"
99 do 125 do
100 echo 126 echo
101 echo "Test failures on ${PLATFORM}:" | sed "s/%20/ /" 127 echo "Test failures on ${PLATFORM}:" | sed "s/%20/ /"
102 grep "\[ FAILED \] .* ([0-9]\+ ms)" -R "$LOGS_DIR"/*${PLATFORM}* | \ 128 grep -h -o "^FAILED:.*" -R "$LOGS_DIR"/*${PLATFORM}* | \
103 grep -v "FAILS\|FLAKY" | \ 129 grep -v "FAILS\|FLAKY" | \
104 sed -e "s/.*%20//" -e "s/_[1-9]\+:/:/" \ 130 sed -e "s/^FAILED://" -e "s/^/ /"
105 -e "s/\[ FAILED \] //" -e "s/ ([0-9]\+ ms)//" -e "s/^/ /"
106 # Don't put any operators between "grep | sed" and "RESULT=$PIPESTATUS" 131 # Don't put any operators between "grep | sed" and "RESULT=$PIPESTATUS"
107 RESULT=$PIPESTATUS 132 RESULT=$PIPESTATUS
108 133
109 if [ "$RESULT" == 1 ] 134 if [ "$RESULT" == 1 ]
110 then 135 then
111 echo " None!" 136 echo " None!"
112 else 137 else
113 echo 138 echo
114 echo " Note: we don't check for failures already excluded locally yet" 139 echo " Note: we don't check for failures already excluded locally yet"
115 echo " TODO(timurrrr): don't list tests we've already excluded locally" 140 echo " TODO(timurrrr): don't list tests we've already excluded locally"
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 find_blame "$2" "$3" "$4" 260 find_blame "$2" "$3" "$4"
236 else 261 else
237 THISNAME=$(basename "${0}") 262 THISNAME=$(basename "${0}")
238 echo "Usage: $THISNAME fetch|match|blame <builder> <test> <hash>" 263 echo "Usage: $THISNAME fetch|match|blame <builder> <test> <hash>"
239 echo " fetch - Fetch Valgrind logs from the memory waterfall" 264 echo " fetch - Fetch Valgrind logs from the memory waterfall"
240 echo " match - Test the local suppression files against the downloaded logs" 265 echo " match - Test the local suppression files against the downloaded logs"
241 echo " blame - Return the blame list for the revision where the suppression" 266 echo " blame - Return the blame list for the revision where the suppression"
242 echo " <hash> occured for the first time in the log for <test> on" 267 echo " <hash> occured for the first time in the log for <test> on"
243 echo " <builder>" 268 echo " <builder>"
244 fi 269 fi
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698