| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 package memory | 5 package memory |
| 6 | 6 |
| 7 import ( | 7 import ( |
| 8 "fmt" | 8 "fmt" |
| 9 "strings" | 9 "strings" |
| 10 "testing" | 10 "testing" |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 50 "Val", 6, 8, 7, Next, | 50 "Val", 6, 8, 7, Next, |
| 51 "When", 27, Next, | 51 "When", 27, Next, |
| 52 "Extra", "zebra", | 52 "Extra", "zebra", |
| 53 ), | 53 ), |
| 54 pmap("$key", key("Kind", 3), Next, | 54 pmap("$key", key("Kind", 3), Next, |
| 55 "Val", 1, 2, 2, 100, Next, | 55 "Val", 1, 2, 2, 100, Next, |
| 56 "When", 996688461000000, Next, | 56 "When", 996688461000000, Next, |
| 57 "Extra", "waffle", | 57 "Extra", "waffle", |
| 58 ), | 58 ), |
| 59 pmap("$key", key("Kind", 6), Next, | 59 pmap("$key", key("Kind", 6), Next, |
| 60 » » "Val", 5, Next, | 60 » » "Val", 5, 3, 2, Next, |
| 61 "When", time.Date(2000, time.January, 1, 1, 1, 1, 1, time.UTC),
Next, | 61 "When", time.Date(2000, time.January, 1, 1, 1, 1, 1, time.UTC),
Next, |
| 62 "Extra", "waffle", | 62 "Extra", "waffle", |
| 63 ), | 63 ), |
| 64 pmap("$key", key("Kind", 3, "Child", "seven"), Next, | 64 pmap("$key", key("Kind", 3, "Child", "seven"), Next, |
| 65 "Interesting", 28, Next, | 65 "Interesting", 28, Next, |
| 66 "Extra", "hello", | 66 "Extra", "hello", |
| 67 ), | 67 ), |
| 68 pmap("$key", key("Unique", 1), Next, | 68 pmap("$key", key("Unique", 1), Next, |
| 69 "Derp", 39, | 69 "Derp", 39, |
| 70 ), | 70 ), |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 | 146 |
| 147 // get ziggy with it | 147 // get ziggy with it |
| 148 {q: nq("Kind").Eq("Extra", "waffle").Eq("Val", 1
00), get: []ds.PropertyMap{ | 148 {q: nq("Kind").Eq("Extra", "waffle").Eq("Val", 1
00), get: []ds.PropertyMap{ |
| 149 stage1Data[2], | 149 stage1Data[2], |
| 150 }}, | 150 }}, |
| 151 | 151 |
| 152 {q: nq("Child").Eq("Interesting", 28).Eq("Extra"
, "hello"), get: []ds.PropertyMap{ | 152 {q: nq("Child").Eq("Interesting", 28).Eq("Extra"
, "hello"), get: []ds.PropertyMap{ |
| 153 stage1Data[4], | 153 stage1Data[4], |
| 154 }}, | 154 }}, |
| 155 | 155 |
| 156 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)).Order("
Val"). | 156 » » » » {q: nq("Kind").Eq("Val", 2, 3), get: []ds.Proper
tyMap{ |
| 157 » » » » » Start(curs("Val", 1, "__key__", key("Kin
d", 3))). | 157 » » » » » stage1Data[0], |
| 158 » » » » » End(curs("Val", 90, "__key__", key("Kind
", 3, "Zeta", "woot")))), keys: []*ds.Key{}, | 158 » » » » » stage1Data[3], |
| 159 » » » » }}, |
| 160 |
| 161 » » » » // note the kind :) |
| 162 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)). |
| 163 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 164 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 165 » » » » » keys: []*ds.Key{ |
| 166 » » » » » » key("Kind", 3), |
| 167 » » » » » » key("Kind", 3, "Kind", 1), |
| 168 » » » » » » key("Kind", 3, "Kind", 2), |
| 169 » » » » » » key("Kind", 3, "Kind", 3), |
| 170 » » » » » }, |
| 159 }, | 171 }, |
| 160 | 172 |
| 161 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)).Order("
Val"). | 173 » » » » {q: (nq("").Ancestor(key("Kind", 3)). |
| 162 » » » » » Start(curs("Val", 1, "__key__", key("Kin
d", 3))). | 174 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 163 » » » » » End(curs("Val", 90, "__key__", key("Kind
", 3, "Zeta", "woot")))), | 175 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 164 » » » » » keys: []*ds.Key{}, | 176 » » » » » keys: []*ds.Key{ |
| 177 » » » » » » key("Kind", 3), |
| 178 » » » » » » key("Kind", 3, "Child", "seven")
, |
| 179 » » » » » » key("Kind", 3, "Kind", 1), |
| 180 » » » » » » key("Kind", 3, "Kind", 2), |
| 181 » » » » » » key("Kind", 3, "Kind", 3), |
| 182 » » » » » }, |
| 183 » » » » }, |
| 184 |
| 185 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)). |
| 186 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 187 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 188 » » » » » keys: []*ds.Key{ |
| 189 » » » » » » key("Kind", 3), |
| 190 » » » » » » key("Kind", 3, "Kind", 1), |
| 191 » » » » » » key("Kind", 3, "Kind", 2), |
| 192 » » » » » » key("Kind", 3, "Kind", 3), |
| 193 » » » » » }, |
| 165 inTxn: true}, | 194 inTxn: true}, |
| 166 | 195 |
| 196 {q: nq("Kind").Ancestor(key("Kind", 3)).Eq("Val"
, 3, 4), |
| 197 keys: []*ds.Key{ |
| 198 key("Kind", 3, "Kind", 2), |
| 199 key("Kind", 3, "Kind", 3), |
| 200 }, |
| 201 get: []ds.PropertyMap{ |
| 202 stage2Data[1], |
| 203 stage2Data[2], |
| 204 }, |
| 205 }, |
| 206 |
| 167 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5), get:
[]ds.PropertyMap{ | 207 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5), get:
[]ds.PropertyMap{ |
| 168 stage1Data[0], stage1Data[3], | 208 stage1Data[0], stage1Data[3], |
| 169 }}, | 209 }}, |
| 170 | 210 |
| 171 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5).Order(
"-Val"), get: []ds.PropertyMap{ | 211 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5).Order(
"-Val"), get: []ds.PropertyMap{ |
| 172 stage1Data[3], stage1Data[0], | 212 stage1Data[3], stage1Data[0], |
| 173 }}, | 213 }}, |
| 174 | 214 |
| 175 {q: nq("").Gt("__key__", key("Kind", 2)), | 215 {q: nq("").Gt("__key__", key("Kind", 2)), |
| 176 // count counts from the index with Keys
Only and so counts the deleted | 216 // count counts from the index with Keys
Only and so counts the deleted |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 311 }), ShouldErrLike, strings.Join([]string
{ | 351 }), ShouldErrLike, strings.Join([]string
{ |
| 312 "Consider adding:", | 352 "Consider adding:", |
| 313 "- kind: Something", | 353 "- kind: Something", |
| 314 " properties:", | 354 " properties:", |
| 315 " - name: Does", | 355 " - name: Does", |
| 316 " - name: Not", | 356 " - name: Not", |
| 317 " - name: Work", | 357 " - name: Work", |
| 318 " direction: desc", | 358 " direction: desc", |
| 319 }, "\n")) | 359 }, "\n")) |
| 320 }, | 360 }, |
| 361 |
| 362 func(c context.Context) { |
| 363 data := ds.Get(c) |
| 364 q := nq("Something").Ancestor(key("Kind"
, 3)).Order("Val") |
| 365 So(data.Run(q, func(ds.Key, ds.CursorCB)
bool { |
| 366 return true |
| 367 }), ShouldErrLike, strings.Join([]string
{ |
| 368 "Consider adding:", |
| 369 "- kind: Something", |
| 370 " ancestor: yes", |
| 371 " properties:", |
| 372 " - name: Val", |
| 373 }, "\n")) |
| 374 }, |
| 321 }, | 375 }, |
| 322 }, | 376 }, |
| 323 | 377 |
| 324 { | 378 { |
| 325 expect: []qExpect{ | 379 expect: []qExpect{ |
| 326 // eventual consistency; Unique/1 is deleted at
HEAD. Keysonly finds it, | 380 // eventual consistency; Unique/1 is deleted at
HEAD. Keysonly finds it, |
| 327 // but 'normal' doesn't. | 381 // but 'normal' doesn't. |
| 328 {q: nq("Unique").Gt("__key__", key("AKind", 5)).
Lte("__key__", key("Zeta", "prime")), | 382 {q: nq("Unique").Gt("__key__", key("AKind", 5)).
Lte("__key__", key("Zeta", "prime")), |
| 329 keys: []*ds.Key{key("Unique", 1)}, | 383 keys: []*ds.Key{key("Unique", 1)}, |
| 330 get: []ds.PropertyMap{}}, | 384 get: []ds.PropertyMap{}}, |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 463 count, err := data.Count(q) | 517 count, err := data.Count(q) |
| 464 So(err, ShouldErrLike, "Insufficient indexes") | 518 So(err, ShouldErrLike, "Insufficient indexes") |
| 465 | 519 |
| 466 testing.AutoIndex(true) | 520 testing.AutoIndex(true) |
| 467 | 521 |
| 468 count, err = data.Count(q) | 522 count, err = data.Count(q) |
| 469 So(err, ShouldBeNil) | 523 So(err, ShouldBeNil) |
| 470 So(count, ShouldEqual, 2) | 524 So(count, ShouldEqual, 2) |
| 471 }) | 525 }) |
| 472 } | 526 } |
| OLD | NEW |