OLD | NEW |
(Empty) | |
| 1 // run_chromium_analysis_on_workers is an application that runs the specified |
| 2 // telemetry benchmark on swarming bots and uploads the results to Google |
| 3 // Storage. The requester is emailed when the task is done. |
| 4 package main |
| 5 |
| 6 import ( |
| 7 "flag" |
| 8 "fmt" |
| 9 "os" |
| 10 "path/filepath" |
| 11 "strings" |
| 12 "time" |
| 13 |
| 14 "github.com/skia-dev/glog" |
| 15 "go.skia.org/infra/ct/go/frontend" |
| 16 "go.skia.org/infra/ct/go/master_scripts/master_common" |
| 17 "go.skia.org/infra/ct/go/util" |
| 18 "go.skia.org/infra/go/common" |
| 19 "go.skia.org/infra/go/email" |
| 20 skutil "go.skia.org/infra/go/util" |
| 21 ) |
| 22 |
| 23 const ( |
| 24 MAX_PAGES_PER_SWARMING_BOT = 100 |
| 25 ) |
| 26 |
| 27 var ( |
| 28 emails = flag.String("emails", "", "The comma separated emai
l addresses to notify when the task is picked up and completes.") |
| 29 description = flag.String("description", "", "The description of
the run as entered by the requester.") |
| 30 gaeTaskID = flag.Int64("gae_task_id", -1, "The key of the task.
This task will be updated when the task is started and completed.") |
| 31 pagesetType = flag.String("pageset_type", "", "The type of pagese
ts to use. Eg: 10k, Mobile10k, All.") |
| 32 benchmarkName = flag.String("benchmark_name", "", "The telemetry be
nchmark to run on the workers.") |
| 33 benchmarkExtraArgs = flag.String("benchmark_extra_args", "", "The extra
arguments that are passed to the specified benchmark.") |
| 34 browserExtraArgs = flag.String("browser_extra_args", "", "The extra ar
guments that are passed to the browser while running the benchmark.") |
| 35 runID = flag.String("run_id", "", "The unique run id (typic
ally requester + timestamp).") |
| 36 |
| 37 taskCompletedSuccessfully = false |
| 38 |
| 39 chromiumPatchLink = util.MASTER_LOGSERVER_LINK |
| 40 benchmarkPatchLink = util.MASTER_LOGSERVER_LINK |
| 41 outputLink = util.MASTER_LOGSERVER_LINK |
| 42 ) |
| 43 |
| 44 func sendEmail(recipients []string) { |
| 45 // Send completion email. |
| 46 emailSubject := fmt.Sprintf("Cluster telemetry chromium analysis task ha
s completed (%s)", *runID) |
| 47 failureHtml := "" |
| 48 viewActionMarkup := "" |
| 49 var err error |
| 50 |
| 51 if taskCompletedSuccessfully { |
| 52 if viewActionMarkup, err = email.GetViewActionMarkup(outputLink,
"View Results", "Direct link to the CSV results"); err != nil { |
| 53 glog.Errorf("Failed to get view action markup: %s", err) |
| 54 return |
| 55 } |
| 56 } else { |
| 57 emailSubject += " with failures" |
| 58 failureHtml = util.GetFailureEmailHtml(*runID) |
| 59 if viewActionMarkup, err = email.GetViewActionMarkup(util.GetMas
terLogLink(*runID), "View Failure", "Direct link to the master log"); err != nil
{ |
| 60 glog.Errorf("Failed to get view action markup: %s", err) |
| 61 return |
| 62 } |
| 63 } |
| 64 bodyTemplate := ` |
| 65 The chromium analysis %s benchmark task on %s pageset has completed.<br/
> |
| 66 Run description: %s<br/> |
| 67 %s |
| 68 The CSV output is <a href='%s'>here</a>.<br/> |
| 69 The patch(es) you specified are here: |
| 70 <a href='%s'>chromium</a>/<a href='%s'>telemetry</a> |
| 71 <br/><br/> |
| 72 You can schedule more runs <a href='%s'>here</a>. |
| 73 <br/><br/> |
| 74 Thanks! |
| 75 ` |
| 76 emailBody := fmt.Sprintf(bodyTemplate, *benchmarkName, *pagesetType, *de
scription, failureHtml, outputLink, chromiumPatchLink, benchmarkPatchLink, front
end.ChromiumAnalysisTasksWebapp) |
| 77 if err := util.SendEmailWithMarkup(recipients, emailSubject, emailBody,
viewActionMarkup); err != nil { |
| 78 glog.Errorf("Error while sending email: %s", err) |
| 79 return |
| 80 } |
| 81 } |
| 82 |
| 83 func updateWebappTask() { |
| 84 // TODO(rmistry): Update this section when the analysis page is created. |
| 85 //vars := chromium_perf.UpdateVars{} |
| 86 //vars.Id = *gaeTaskID |
| 87 //vars.SetCompleted(taskCompletedSuccessfully) |
| 88 //vars.Results = sql.NullString{String: htmlOutputLink, Valid: true} |
| 89 //vars.NoPatchRawOutput = sql.NullString{String: noPatchOutputLink, Vali
d: true} |
| 90 //vars.WithPatchRawOutput = sql.NullString{String: withPatchOutputLink,
Valid: true} |
| 91 //skutil.LogErr(frontend.UpdateWebappTaskV2(&vars)) |
| 92 } |
| 93 |
| 94 func main() { |
| 95 defer common.LogPanic() |
| 96 master_common.Init() |
| 97 |
| 98 // Send start email. |
| 99 emailsArr := util.ParseEmails(*emails) |
| 100 emailsArr = append(emailsArr, util.CtAdmins...) |
| 101 if len(emailsArr) == 0 { |
| 102 glog.Error("At least one email address must be specified") |
| 103 return |
| 104 } |
| 105 // TODO(rmistry): Update the below when the analysis page is created. |
| 106 // skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&chromium_perf.Upda
teVars{}, *gaeTaskID)) |
| 107 skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Chromium analysis", *r
unID, *description)) |
| 108 // Ensure webapp is updated and email is sent even if task fails. |
| 109 defer updateWebappTask() |
| 110 defer sendEmail(emailsArr) |
| 111 // Cleanup dirs after run completes. |
| 112 defer skutil.RemoveAll(filepath.Join(util.StorageDir, util.BenchmarkRuns
Dir)) |
| 113 // Finish with glog flush and how long the task took. |
| 114 defer util.TimeTrack(time.Now(), "Running chromium analysis task on work
ers") |
| 115 defer glog.Flush() |
| 116 |
| 117 if *pagesetType == "" { |
| 118 glog.Error("Must specify --pageset_type") |
| 119 return |
| 120 } |
| 121 if *benchmarkName == "" { |
| 122 glog.Error("Must specify --benchmark_name") |
| 123 return |
| 124 } |
| 125 if *runID == "" { |
| 126 glog.Error("Must specify --run_id") |
| 127 return |
| 128 } |
| 129 |
| 130 // Instantiate GsUtil object. |
| 131 gs, err := util.NewGsUtil(nil) |
| 132 if err != nil { |
| 133 glog.Errorf("Could not instantiate gsutil object: %s", err) |
| 134 return |
| 135 } |
| 136 remoteOutputDir := filepath.Join(util.ChromiumAnalysisRunsDir, *runID) |
| 137 |
| 138 // Copy the patches to Google Storage. |
| 139 chromiumPatchName := *runID + ".chromium.patch" |
| 140 benchmarkPatchName := *runID + ".benchmark.patch" |
| 141 for _, patchName := range []string{chromiumPatchName, benchmarkPatchName
} { |
| 142 if err := gs.UploadFile(patchName, os.TempDir(), remoteOutputDir
); err != nil { |
| 143 glog.Errorf("Could not upload %s to %s: %s", patchName,
remoteOutputDir, err) |
| 144 return |
| 145 } |
| 146 } |
| 147 chromiumPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GSBucketName,
remoteOutputDir, chromiumPatchName) |
| 148 benchmarkPatchLink = util.GS_HTTP_LINK + filepath.Join(util.GSBucketName
, remoteOutputDir, benchmarkPatchName) |
| 149 |
| 150 // Create the required chromium build. |
| 151 chromiumHash, skiaHash, err := util.CreateChromiumBuild(*runID, "Linux",
"", "", true, true) |
| 152 if err != nil { |
| 153 glog.Errorf("Could not create chromium build: %s", err) |
| 154 return |
| 155 } |
| 156 chromiumBuild := fmt.Sprintf("try-%s-%s-%s-withpatch", chromiumHash, ski
aHash, *runID) |
| 157 |
| 158 // Archive, trigger and collect swarming tasks. |
| 159 isolateExtraArgs := map[string]string{ |
| 160 "CHROMIUM_BUILD": chromiumBuild, |
| 161 "RUN_ID": *runID, |
| 162 "BENCHMARK": *benchmarkName, |
| 163 "BENCHMARK_ARGS": *benchmarkExtraArgs, |
| 164 "BROWSER_EXTRA_ARGS": *browserExtraArgs, |
| 165 } |
| 166 if err := util.TriggerSwarmingTask(*pagesetType, "chromium_analysis", ut
il.CHROMIUM_ANALYSIS_ISOLATE, 2*time.Hour, 1*time.Hour, MAX_PAGES_PER_SWARMING_B
OT, isolateExtraArgs); err != nil { |
| 167 glog.Errorf("Error encountered when swarming tasks: %s", err) |
| 168 return |
| 169 } |
| 170 |
| 171 // If "--output-format=csv-pivot-table" was specified then merge all CSV
files and upload. |
| 172 noOutputSlaves := []string{} |
| 173 pathToPyFiles := util.GetPathToPyFiles(false) |
| 174 if strings.Contains(*benchmarkExtraArgs, "--output-format=csv-pivot-tabl
e") { |
| 175 if noOutputSlaves, err = util.MergeUploadCSVFiles(*runID, pathTo
PyFiles, gs, util.PagesetTypeToInfo[*pagesetType].NumPages, MAX_PAGES_PER_SWARMI
NG_BOT); err != nil { |
| 176 glog.Errorf("Unable to merge and upload CSV files for %s
: %s", *runID, err) |
| 177 } |
| 178 } |
| 179 |
| 180 // Construct the output link. |
| 181 outputLink = util.GS_HTTP_LINK + filepath.Join(util.GSBucketName, util.B
enchmarkRunsDir, *runID, "consolidated_outputs", *runID+".output") |
| 182 |
| 183 // Display the no output slaves. |
| 184 for _, noOutputSlave := range noOutputSlaves { |
| 185 fmt.Printf("Missing output from %s\n", noOutputSlave) |
| 186 } |
| 187 |
| 188 taskCompletedSuccessfully = true |
| 189 } |
OLD | NEW |