OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 package memory | 5 package memory |
6 | 6 |
7 import ( | 7 import ( |
8 "bytes" | 8 "bytes" |
9 "fmt" | 9 "fmt" |
10 "sort" | 10 "sort" |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
53 | 53 |
54 func (s serializedPvals) Len() int { return len(s) } | 54 func (s serializedPvals) Len() int { return len(s) } |
55 func (s serializedPvals) Swap(i, j int) { s[i], s[j] = s[j], s[i] } | 55 func (s serializedPvals) Swap(i, j int) { s[i], s[j] = s[j], s[i] } |
56 func (s serializedPvals) Less(i, j int) bool { return bytes.Compare(s[i], s[j]) < 0 } | 56 func (s serializedPvals) Less(i, j int) bool { return bytes.Compare(s[i], s[j]) < 0 } |
57 | 57 |
58 // prop name -> [<serialized DSProperty>, ...] | 58 // prop name -> [<serialized DSProperty>, ...] |
59 // includes special values '__key__' and '__ancestor__' which contains all of | 59 // includes special values '__key__' and '__ancestor__' which contains all of |
60 // the ancestor entries for this key. | 60 // the ancestor entries for this key. |
61 type serializedIndexablePmap map[string]serializedPvals | 61 type serializedIndexablePmap map[string]serializedPvals |
62 | 62 |
63 func serializeRow(vals []ds.Property) serializedPvals { | |
iannucci
2015/09/10 03:56:58
This was a convenient refactoring. It'll become mo
| |
64 dups := map[string]struct{}{} | |
65 ret := make(serializedPvals, 0, len(vals)) | |
66 for _, v := range vals { | |
67 if v.IndexSetting() == ds.NoIndex { | |
68 continue | |
69 } | |
70 data := serialize.ToBytes(v.ForIndex()) | |
71 dataS := string(data) | |
72 if _, ok := dups[dataS]; ok { | |
73 continue | |
74 } | |
75 dups[dataS] = struct{}{} | |
76 ret = append(ret, data) | |
77 } | |
78 return ret | |
79 } | |
80 | |
63 func partiallySerialize(k ds.Key, pm ds.PropertyMap) (ret serializedIndexablePma p) { | 81 func partiallySerialize(k ds.Key, pm ds.PropertyMap) (ret serializedIndexablePma p) { |
64 ret = make(serializedIndexablePmap, len(pm)+2) | 82 ret = make(serializedIndexablePmap, len(pm)+2) |
83 if k == nil { | |
84 impossible(fmt.Errorf("key to partiallySerialize is nil")) | |
85 } | |
65 ret["__key__"] = [][]byte{serialize.ToBytes(ds.MkProperty(k))} | 86 ret["__key__"] = [][]byte{serialize.ToBytes(ds.MkProperty(k))} |
66 for k != nil { | 87 for k != nil { |
67 ret["__ancestor__"] = append(ret["__ancestor__"], serialize.ToBy tes(ds.MkProperty(k))) | 88 ret["__ancestor__"] = append(ret["__ancestor__"], serialize.ToBy tes(ds.MkProperty(k))) |
68 k = k.Parent() | 89 k = k.Parent() |
69 } | 90 } |
70 for k, vals := range pm { | 91 for k, vals := range pm { |
71 » » dups := stringSet{} | 92 » » newVals := serializeRow(vals) |
72 » » newVals := make(serializedPvals, 0, len(vals)) | |
73 » » for _, v := range vals { | |
74 » » » if v.IndexSetting() == ds.NoIndex { | |
75 » » » » continue | |
76 » » » } | |
77 » » » data := serialize.ToBytes(v) | |
78 » » » dataS := string(data) | |
79 » » » if !dups.add(dataS) { | |
80 » » » » continue | |
81 » » » } | |
82 » » » newVals = append(newVals, data) | |
83 » » } | |
84 if len(newVals) > 0 { | 93 if len(newVals) > 0 { |
85 ret[k] = newVals | 94 ret[k] = newVals |
86 } | 95 } |
87 } | 96 } |
88 return | 97 return |
89 } | 98 } |
90 | 99 |
91 // indexRowGen contains enough information to generate all of the index rows whi ch | 100 // indexRowGen contains enough information to generate all of the index rows whi ch |
92 // correspond with a propertyList and a ds.IndexDefinition. | 101 // correspond with a propertyList and a ds.IndexDefinition. |
93 type indexRowGen struct { | 102 type indexRowGen struct { |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
297 compIdx := []*ds.IndexDefinition{} | 306 compIdx := []*ds.IndexDefinition{} |
298 walkCompIdxs(store, nil, func(i *ds.IndexDefinition) bool { | 307 walkCompIdxs(store, nil, func(i *ds.IndexDefinition) bool { |
299 compIdx = append(compIdx, i) | 308 compIdx = append(compIdx, i) |
300 return true | 309 return true |
301 }) | 310 }) |
302 | 311 |
303 mergeIndexes(key.Namespace(), store, | 312 mergeIndexes(key.Namespace(), store, |
304 indexEntriesWithBuiltins(key, oldEnt, compIdx), | 313 indexEntriesWithBuiltins(key, oldEnt, compIdx), |
305 indexEntriesWithBuiltins(key, newEnt, compIdx)) | 314 indexEntriesWithBuiltins(key, newEnt, compIdx)) |
306 } | 315 } |
OLD | NEW |