OLD | NEW |
(Empty) | |
| 1 stats = {} |
| 2 |
| 3 -- switch this to run on the automated scraper system |
| 4 newline = "\n" |
| 5 -- newline = "\\n" |
| 6 |
| 7 function sk_scrape_startcanvas(c, fileName) |
| 8 canvas = c |
| 9 oldstackstr = "<invalid>" |
| 10 end |
| 11 |
| 12 function sk_scrape_endcanvas(c, fileName) |
| 13 canvas = nil |
| 14 end |
| 15 |
| 16 function string.starts(String,Start) |
| 17 return string.sub(String,1,string.len(Start))==Start |
| 18 end |
| 19 |
| 20 function build_stack_string(stack) |
| 21 local info = "" |
| 22 for i = 1, #stack do |
| 23 local element = stack[i]; |
| 24 info = info .. element["op"] .. ", " .. element["type"] .. ", aa:" .. to
string(element["aa"]) |
| 25 if (element["type"] == "path") then |
| 26 if (element["path"]:getSegmentTypes() == "line" and element["path"]:
isConvex()) then |
| 27 info = info .. ", convex_poly " .. element["path"]:countPoints()
.. " points" |
| 28 else |
| 29 info = info .. ", fill: " .. element["path"]:getFillType() |
| 30 info = info .. ", segments: (" .. element["path"]:getSegmentType
s() .. ")" |
| 31 info = info .. ", convex:" .. tostring(element["path"]:isConvex(
)) |
| 32 end |
| 33 end |
| 34 info = info .. newline |
| 35 end |
| 36 return info |
| 37 end |
| 38 |
| 39 function sk_scrape_accumulate(t) |
| 40 if (string.starts(t.verb, "draw")) then |
| 41 local stack = canvas:getReducedClipStack() |
| 42 local stackstr = build_stack_string(stack) |
| 43 if (stackstr ~= "") then |
| 44 if (stats[stackstr] == nil) then |
| 45 stats[stackstr] = {} |
| 46 stats[stackstr].drawCnt = 0 |
| 47 stats[stackstr].instanceCnt = 0 |
| 48 end |
| 49 stats[stackstr].drawCnt = stats[stackstr].drawCnt + 1 |
| 50 if (stackstr ~= oldstackstr) then |
| 51 stats[stackstr].instanceCnt = stats[stackstr].instanceCnt + 1 |
| 52 end |
| 53 end |
| 54 oldstackstr = stackstr |
| 55 end |
| 56 end |
| 57 |
| 58 function print_stats(stats) |
| 59 function sort_by_draw_cnt(a, b) |
| 60 return a.data.drawCnt > b.data.drawCnt |
| 61 end |
| 62 array = {} |
| 63 for k,v in pairs(stats) do |
| 64 array[#array + 1] = { name = k, data = v } |
| 65 end |
| 66 table.sort(array, sort_by_draw_cnt) |
| 67 for i = 1, #array do |
| 68 io.write("\n-------\n", array[i].name, tostring(array[i].data.drawCnt),
" draws, ", tostring(array[i].data.instanceCnt), " instances.\n") |
| 69 end |
| 70 end |
| 71 |
| 72 function sk_scrape_summarize() |
| 73 print_stats(stats) |
| 74 --[[ To use the web scraper comment out the print above, run the code below
to generate an |
| 75 aggregate table on the automated scraper system. Then use the print_sta
ts function on |
| 76 agg_stats in the aggregator step. |
| 77 for k,v in pairs(stats) do |
| 78 if (v.drawCnt ~= nil) then |
| 79 -- io.write("\n-------\n", k, tostring(v.drawCnt), " draws, ", tost
ring(v.instanceCnt), " instances.\n") |
| 80 local tableEntry = 'agg_stats["' .. k .. '"]' |
| 81 io.write(tableEntry, " = ", tableEntry, " or {}\n") |
| 82 io.write(tableEntry, ".drawCnt = (", tableEntry, ".drawCnt or 0 ) +
", v.drawCnt, "\n") |
| 83 io.write(tableEntry, ".instanceCnt = (", tableEntry, ".instanceCnt
or 0 ) + ", v.instanceCnt, "\n") |
| 84 end |
| 85 end |
| 86 --]] |
| 87 end |
OLD | NEW |