OLD | NEW |
1 // capture_skps_on_workers is an application that captures SKPs of the | 1 // capture_skps_on_workers is an application that captures SKPs of the |
2 // specified patchset type on all CT workers and uploads the results to Google | 2 // specified patchset type on all CT workers and uploads the results to Google |
3 // Storage. The requester is emailed when the task is done. | 3 // Storage. The requester is emailed when the task is done. |
4 package main | 4 package main |
5 | 5 |
6 import ( | 6 import ( |
7 "bytes" | |
8 "flag" | 7 "flag" |
9 "fmt" | 8 "fmt" |
| 9 "io/ioutil" |
| 10 "path" |
| 11 "path/filepath" |
| 12 "runtime" |
| 13 "strconv" |
10 "strings" | 14 "strings" |
11 "text/template" | |
12 "time" | 15 "time" |
13 | 16 |
14 "github.com/skia-dev/glog" | 17 "github.com/skia-dev/glog" |
15 "go.skia.org/infra/ct/go/ctfe/capture_skps" | 18 "go.skia.org/infra/ct/go/ctfe/capture_skps" |
16 "go.skia.org/infra/ct/go/frontend" | 19 "go.skia.org/infra/ct/go/frontend" |
17 "go.skia.org/infra/ct/go/master_scripts/master_common" | 20 "go.skia.org/infra/ct/go/master_scripts/master_common" |
18 "go.skia.org/infra/ct/go/util" | 21 "go.skia.org/infra/ct/go/util" |
19 "go.skia.org/infra/go/common" | 22 "go.skia.org/infra/go/common" |
| 23 "go.skia.org/infra/go/swarming" |
20 skutil "go.skia.org/infra/go/util" | 24 skutil "go.skia.org/infra/go/util" |
21 ) | 25 ) |
22 | 26 |
| 27 const ( |
| 28 MAX_PAGES_PER_SWARMING_BOT_CAPTURE_SKPS = 100 |
| 29 MAX_PAGES_PER_SWARMING_BOT_CAPTURE_SKPS_FROM_PDFS = 1000 |
| 30 ) |
| 31 |
23 var ( | 32 var ( |
24 emails = flag.String("emails", "", "The comma separated email ad
dresses to notify when the task is picked up and completes.") | 33 emails = flag.String("emails", "", "The comma separated email ad
dresses to notify when the task is picked up and completes.") |
25 description = flag.String("description", "", "The description of the
run as entered by the requester.") | 34 description = flag.String("description", "", "The description of the
run as entered by the requester.") |
26 gaeTaskID = flag.Int64("gae_task_id", -1, "The key of the App Engin
e task. This task will be updated when the task is completed.") | 35 gaeTaskID = flag.Int64("gae_task_id", -1, "The key of the App Engin
e task. This task will be updated when the task is completed.") |
27 pagesetType = flag.String("pageset_type", "", "The type of pagesets t
o use. Eg: 10k, Mobile10k, All.") | 36 pagesetType = flag.String("pageset_type", "", "The type of pagesets t
o use. Eg: 10k, Mobile10k, All.") |
28 chromiumBuild = flag.String("chromium_build", "", "The chromium build t
o use for this capture SKPs run.") | 37 chromiumBuild = flag.String("chromium_build", "", "The chromium build t
o use for this capture SKPs run.") |
29 targetPlatform = flag.String("target_platform", util.PLATFORM_LINUX, "Th
e platform the benchmark will run on (Android / Linux).") | 38 targetPlatform = flag.String("target_platform", util.PLATFORM_LINUX, "Th
e platform the benchmark will run on (Android / Linux).") |
30 runID = flag.String("run_id", "", "The unique run id (typically
requester + timestamp).") | 39 runID = flag.String("run_id", "", "The unique run id (typically
requester + timestamp).") |
31 | 40 |
32 taskCompletedSuccessfully = false | 41 taskCompletedSuccessfully = false |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
72 glog.Error("At least one email address must be specified") | 81 glog.Error("At least one email address must be specified") |
73 return | 82 return |
74 } | 83 } |
75 skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&capture_skps.UpdateVa
rs{}, *gaeTaskID)) | 84 skutil.LogErr(frontend.UpdateWebappTaskSetStarted(&capture_skps.UpdateVa
rs{}, *gaeTaskID)) |
76 skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Capture SKPs", *runID,
*description)) | 85 skutil.LogErr(util.SendTaskStartEmail(emailsArr, "Capture SKPs", *runID,
*description)) |
77 // Ensure webapp is updated and completion email is sent even if task | 86 // Ensure webapp is updated and completion email is sent even if task |
78 // fails. | 87 // fails. |
79 defer updateWebappTask() | 88 defer updateWebappTask() |
80 defer sendEmail(emailsArr) | 89 defer sendEmail(emailsArr) |
81 | 90 |
82 if !*master_common.Local { | |
83 // Cleanup tmp files after the run. | |
84 defer util.CleanTmpDir() | |
85 } | |
86 // Finish with glog flush and how long the task took. | 91 // Finish with glog flush and how long the task took. |
87 defer util.TimeTrack(time.Now(), "Running capture skps task on workers") | 92 defer util.TimeTrack(time.Now(), "Running capture skps task on workers") |
88 defer glog.Flush() | 93 defer glog.Flush() |
89 | 94 |
90 if *pagesetType == "" { | 95 if *pagesetType == "" { |
91 glog.Error("Must specify --pageset_type") | 96 glog.Error("Must specify --pageset_type") |
92 return | 97 return |
93 } | 98 } |
94 if *chromiumBuild == "" { | 99 if *chromiumBuild == "" { |
95 glog.Error("Must specify --chromium_build") | 100 glog.Error("Must specify --chromium_build") |
96 return | 101 return |
97 } | 102 } |
98 if *runID == "" { | 103 if *runID == "" { |
99 glog.Error("Must specify --run_id") | 104 glog.Error("Must specify --run_id") |
100 return | 105 return |
101 } | 106 } |
102 | 107 |
103 » workerScript := "capture_skps" | 108 » isolateFile := util.CAPTURE_SKPS_ISOLATE |
| 109 » maxPages := MAX_PAGES_PER_SWARMING_BOT_CAPTURE_SKPS |
104 if strings.Contains(strings.ToUpper(*pagesetType), "PDF") { | 110 if strings.Contains(strings.ToUpper(*pagesetType), "PDF") { |
105 // For PDF pagesets use the capture_skps_from_pdfs worker script
. | 111 // For PDF pagesets use the capture_skps_from_pdfs worker script
. |
106 » » workerScript = "capture_skps_from_pdfs" | 112 » » isolateFile = util.CAPTURE_SKPS_FROM_PDFS_ISOLATE |
| 113 » » maxPages = MAX_PAGES_PER_SWARMING_BOT_CAPTURE_SKPS_FROM_PDFS |
107 // TODO(rmistry): Uncomment when ready to capture SKPs. | 114 // TODO(rmistry): Uncomment when ready to capture SKPs. |
108 //// Sync PDFium and build pdfium_test binary which will be used
by the worker script. | 115 //// Sync PDFium and build pdfium_test binary which will be used
by the worker script. |
109 //if err := util.SyncDir(util.PDFiumTreeDir); err != nil { | 116 //if err := util.SyncDir(util.PDFiumTreeDir); err != nil { |
110 // glog.Errorf("Could not sync PDFium: %s", err) | 117 // glog.Errorf("Could not sync PDFium: %s", err) |
111 // return | 118 // return |
112 //} | 119 //} |
113 //if err := util.BuildPDFium(); err != nil { | 120 //if err := util.BuildPDFium(); err != nil { |
114 // glog.Errorf("Could not build PDFium: %s", err) | 121 // glog.Errorf("Could not build PDFium: %s", err) |
115 // return | 122 // return |
116 //} | 123 //} |
117 //// Copy pdfium_test to Google Storage. | 124 //// Copy pdfium_test to Google Storage. |
118 //pdfiumLocalDir := path.Join(util.PDFiumTreeDir, "out", "Debug"
) | 125 //pdfiumLocalDir := path.Join(util.PDFiumTreeDir, "out", "Debug"
) |
119 //pdfiumRemoteDir := path.Join(util.BINARIES_DIR_NAME, *chromium
Build) | 126 //pdfiumRemoteDir := path.Join(util.BINARIES_DIR_NAME, *chromium
Build) |
120 //// Instantiate GsUtil object. | 127 //// Instantiate GsUtil object. |
121 //gs, err := util.NewGsUtil(nil) | 128 //gs, err := util.NewGsUtil(nil) |
122 //if err != nil { | 129 //if err != nil { |
123 // glog.Error(err) | 130 // glog.Error(err) |
124 // return | 131 // return |
125 //} | 132 //} |
126 //if err := gs.UploadFile(util.BINARY_PDFIUM_TEST, pdfiumLocalDi
r, pdfiumRemoteDir); err != nil { | 133 //if err := gs.UploadFile(util.BINARY_PDFIUM_TEST, pdfiumLocalDi
r, pdfiumRemoteDir); err != nil { |
127 // glog.Errorf("Could not upload %s to %s: %s", util.BINARY
_PDFIUM_TEST, pdfiumRemoteDir, err) | 134 // glog.Errorf("Could not upload %s to %s: %s", util.BINARY
_PDFIUM_TEST, pdfiumRemoteDir, err) |
128 // return | 135 // return |
129 //} | 136 //} |
130 } | 137 } |
131 | 138 |
132 » // Run the capture SKPs script on all workers. | 139 » // Instantiate the swarming client. |
133 » captureSKPsCmdTemplate := "DISPLAY=:0 {{.WorkerScript}} --worker_num={{.
WorkerNum}} --log_dir={{.LogDir}} --log_id={{.RunID}} " + | 140 » workDir, err := ioutil.TempDir("", "swarming_work_") |
134 » » "--pageset_type={{.PagesetType}} --chromium_build={{.ChromiumBui
ld}} --run_id={{.RunID}} " + | 141 » if err != nil { |
135 » » "--target_platform={{.TargetPlatform}} --local={{.Local}};" | 142 » » glog.Errorf("Could not get temp dir: %s", err) |
136 » captureSKPsTemplateParsed := template.Must(template.New("capture_skps_cm
d").Parse(captureSKPsCmdTemplate)) | 143 » » return |
137 » captureSKPsCmdBytes := new(bytes.Buffer) | 144 » } |
138 » if err := captureSKPsTemplateParsed.Execute(captureSKPsCmdBytes, struct
{ | 145 » s, err := swarming.NewSwarmingClient(workDir) |
139 » » WorkerScript string | 146 » if err != nil { |
140 » » WorkerNum string | 147 » » glog.Errorf("Could not instantiate swarming client: %s", err) |
141 » » LogDir string | 148 » » return |
142 » » PagesetType string | 149 » } |
143 » » ChromiumBuild string | 150 » defer s.Cleanup() |
144 » » RunID string | 151 » // Create isolated.gen.json files from tasks. |
145 » » TargetPlatform string | 152 » taskNames := []string{} |
146 » » Local bool | 153 » for i := 1; i <= util.PagesetTypeToInfo[*pagesetType].NumPages/maxPages;
i++ { |
147 » }{ | 154 » » taskNames = append(taskNames, fmt.Sprintf("capture_skps_%d", i)) |
148 » » WorkerScript: workerScript, | 155 » } |
149 » » WorkerNum: util.WORKER_NUM_KEYWORD, | 156 » genJSONs := []string{} |
150 » » LogDir: util.GLogDir, | 157 » // Get path to isolate files. |
151 » » PagesetType: *pagesetType, | 158 » _, currentFile, _, _ := runtime.Caller(0) |
152 » » ChromiumBuild: *chromiumBuild, | 159 » pathToIsolates := filepath.Join(filepath.Dir((filepath.Dir(filepath.Dir(
filepath.Dir(currentFile))))), "isolates") |
153 » » RunID: *runID, | 160 |
154 » » TargetPlatform: *targetPlatform, | 161 » for i, taskName := range taskNames { |
155 » » Local: *master_common.Local, | 162 » » extraArgs := map[string]string{ |
156 » }); err != nil { | 163 » » » "START_RANGE": strconv.Itoa(util.GetStartRange(i+1, m
axPages)), |
157 » » glog.Errorf("Failed to execute template: %s", err) | 164 » » » "NUM": strconv.Itoa(maxPages), |
| 165 » » » "PAGESET_TYPE": *pagesetType, |
| 166 » » » "CHROMIUM_BUILD": *chromiumBuild, |
| 167 » » » "RUN_ID": *runID, |
| 168 » » } |
| 169 » » genJSON, err := s.CreateIsolatedGenJSON(path.Join(pathToIsolates
, isolateFile), s.WorkDir, "linux", taskName, extraArgs, []string{}) |
| 170 » » if err != nil { |
| 171 » » » glog.Errorf("Could not create isolated.gen.json for task
%s: %s", taskName, err) |
| 172 » » » return |
| 173 » » } |
| 174 » » genJSONs = append(genJSONs, genJSON) |
| 175 » } |
| 176 » // Empty the remote dir before the workers upload to it. |
| 177 » gs, err := util.NewGsUtil(nil) |
| 178 » if err != nil { |
| 179 » » glog.Error(err) |
| 180 » » return |
| 181 » } |
| 182 » skpGSBaseDir := filepath.Join(util.SWARMING_DIR_NAME, util.SKPS_DIR_NAME
, *pagesetType) |
| 183 » skutil.LogErr(gs.DeleteRemoteDir(skpGSBaseDir)) |
| 184 » if strings.Contains(strings.ToUpper(*pagesetType), "PDF") { |
| 185 » » pdfGSBaseDir := filepath.Join(util.SWARMING_DIR_NAME, util.PDFS_
DIR_NAME, *pagesetType) |
| 186 » » skutil.LogErr(gs.DeleteRemoteDir(pdfGSBaseDir)) |
| 187 » } |
| 188 » // Archive, trigger and collect swarming tasks. |
| 189 » if err := util.ArchiveTriggerCollectSwarmingTask(s, taskNames, genJSONs,
2*time.Hour, 1*time.Hour); err != nil { |
| 190 » » glog.Errorf("Error encountered when swarming tasks: %s", err) |
158 return | 191 return |
159 } | 192 } |
160 | 193 |
161 cmd := append(master_common.WorkerSetupCmds(), | |
162 // The main command that captures SKPs on all workers. | |
163 captureSKPsCmdBytes.String()) | |
164 _, err := util.SSH(strings.Join(cmd, " "), util.Slaves, util.CAPTURE_SKP
S_TIMEOUT) | |
165 if err != nil { | |
166 glog.Errorf("Error while running cmd %s: %s", cmd, err) | |
167 return | |
168 } | |
169 | |
170 taskCompletedSuccessfully = true | 194 taskCompletedSuccessfully = true |
171 } | 195 } |
OLD | NEW |