| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 package memory | 5 package memory |
| 6 | 6 |
| 7 import ( | 7 import ( |
| 8 "fmt" | 8 "fmt" |
| 9 "strings" | 9 "strings" |
| 10 "testing" | 10 "testing" |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 50 "Val", 6, 8, 7, Next, | 50 "Val", 6, 8, 7, Next, |
| 51 "When", 27, Next, | 51 "When", 27, Next, |
| 52 "Extra", "zebra", | 52 "Extra", "zebra", |
| 53 ), | 53 ), |
| 54 pmap("$key", key("Kind", 3), Next, | 54 pmap("$key", key("Kind", 3), Next, |
| 55 "Val", 1, 2, 2, 100, Next, | 55 "Val", 1, 2, 2, 100, Next, |
| 56 "When", 996688461000000, Next, | 56 "When", 996688461000000, Next, |
| 57 "Extra", "waffle", | 57 "Extra", "waffle", |
| 58 ), | 58 ), |
| 59 pmap("$key", key("Kind", 6), Next, | 59 pmap("$key", key("Kind", 6), Next, |
| 60 » » "Val", 5, Next, | 60 » » "Val", 5, 3, 2, Next, |
| 61 "When", time.Date(2000, time.January, 1, 1, 1, 1, 1, time.UTC),
Next, | 61 "When", time.Date(2000, time.January, 1, 1, 1, 1, 1, time.UTC),
Next, |
| 62 "Extra", "waffle", | 62 "Extra", "waffle", |
| 63 ), | 63 ), |
| 64 pmap("$key", key("Kind", 3, "Child", "seven"), Next, | 64 pmap("$key", key("Kind", 3, "Child", "seven"), Next, |
| 65 "Interesting", 28, Next, | 65 "Interesting", 28, Next, |
| 66 "Extra", "hello", | 66 "Extra", "hello", |
| 67 ), | 67 ), |
| 68 pmap("$key", key("Unique", 1), Next, | 68 pmap("$key", key("Unique", 1), Next, |
| 69 "Derp", 39, | 69 "Derp", 39, |
| 70 ), | 70 ), |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 148 | 148 |
| 149 // get ziggy with it | 149 // get ziggy with it |
| 150 {q: nq("Kind").Eq("Extra", "waffle").Eq("Val", 1
00), get: []ds.PropertyMap{ | 150 {q: nq("Kind").Eq("Extra", "waffle").Eq("Val", 1
00), get: []ds.PropertyMap{ |
| 151 stage1Data[2], | 151 stage1Data[2], |
| 152 }}, | 152 }}, |
| 153 | 153 |
| 154 {q: nq("Child").Eq("Interesting", 28).Eq("Extra"
, "hello"), get: []ds.PropertyMap{ | 154 {q: nq("Child").Eq("Interesting", 28).Eq("Extra"
, "hello"), get: []ds.PropertyMap{ |
| 155 stage1Data[4], | 155 stage1Data[4], |
| 156 }}, | 156 }}, |
| 157 | 157 |
| 158 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)).Order("
Val"). | 158 » » » » {q: nq("Kind").Eq("Val", 2, 3), get: []ds.Proper
tyMap{ |
| 159 » » » » » Start(curs("Val", 1, "__key__", key("Kin
d", 3))). | 159 » » » » » stage1Data[0], |
| 160 » » » » » End(curs("Val", 90, "__key__", key("Kind
", 3, "Zeta", "woot")))), keys: []*ds.Key{}, | 160 » » » » » stage1Data[3], |
| 161 » » » » }}, |
| 162 |
| 163 » » » » // note the kind :) |
| 164 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)). |
| 165 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 166 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 167 » » » » » keys: []*ds.Key{ |
| 168 » » » » » » key("Kind", 3), |
| 169 » » » » » » key("Kind", 3, "Kind", 1), |
| 170 » » » » » » key("Kind", 3, "Kind", 2), |
| 171 » » » » » » key("Kind", 3, "Kind", 3), |
| 172 » » » » » }, |
| 161 }, | 173 }, |
| 162 | 174 |
| 163 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)).Order("
Val"). | 175 » » » » {q: (nq("").Ancestor(key("Kind", 3)). |
| 164 » » » » » Start(curs("Val", 1, "__key__", key("Kin
d", 3))). | 176 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 165 » » » » » End(curs("Val", 90, "__key__", key("Kind
", 3, "Zeta", "woot")))), | 177 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 166 » » » » » keys: []*ds.Key{}, | 178 » » » » » keys: []*ds.Key{ |
| 179 » » » » » » key("Kind", 3), |
| 180 » » » » » » key("Kind", 3, "Child", "seven")
, |
| 181 » » » » » » key("Kind", 3, "Kind", 1), |
| 182 » » » » » » key("Kind", 3, "Kind", 2), |
| 183 » » » » » » key("Kind", 3, "Kind", 3), |
| 184 » » » » » }, |
| 185 » » » » }, |
| 186 |
| 187 » » » » {q: (nq("Kind").Ancestor(key("Kind", 3)). |
| 188 » » » » » Start(curs("__key__", key("Kind", 3))). |
| 189 » » » » » End(curs("__key__", key("Kind", 3, "Zeta
", "woot")))), |
| 190 » » » » » keys: []*ds.Key{ |
| 191 » » » » » » key("Kind", 3), |
| 192 » » » » » » key("Kind", 3, "Kind", 1), |
| 193 » » » » » » key("Kind", 3, "Kind", 2), |
| 194 » » » » » » key("Kind", 3, "Kind", 3), |
| 195 » » » » » }, |
| 167 inTxn: true}, | 196 inTxn: true}, |
| 168 | 197 |
| 198 {q: nq("Kind").Ancestor(key("Kind", 3)).Eq("Val"
, 3, 4), |
| 199 keys: []*ds.Key{ |
| 200 key("Kind", 3, "Kind", 2), |
| 201 key("Kind", 3, "Kind", 3), |
| 202 }, |
| 203 get: []ds.PropertyMap{ |
| 204 stage2Data[1], |
| 205 stage2Data[2], |
| 206 }, |
| 207 }, |
| 208 |
| 169 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5), get:
[]ds.PropertyMap{ | 209 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5), get:
[]ds.PropertyMap{ |
| 170 stage1Data[0], stage1Data[3], | 210 stage1Data[0], stage1Data[3], |
| 171 }}, | 211 }}, |
| 172 | 212 |
| 173 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5).Order(
"-Val"), get: []ds.PropertyMap{ | 213 {q: nq("Kind").Gt("Val", 2).Lte("Val", 5).Order(
"-Val"), get: []ds.PropertyMap{ |
| 174 stage1Data[3], stage1Data[0], | 214 stage1Data[3], stage1Data[0], |
| 175 }}, | 215 }}, |
| 176 | 216 |
| 177 {q: nq("").Gt("__key__", key("Kind", 2)), | 217 {q: nq("").Gt("__key__", key("Kind", 2)), |
| 178 // count counts from the index with Keys
Only and so counts the deleted | 218 // count counts from the index with Keys
Only and so counts the deleted |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 313 }), ShouldErrLike, strings.Join([]string
{ | 353 }), ShouldErrLike, strings.Join([]string
{ |
| 314 "Consider adding:", | 354 "Consider adding:", |
| 315 "- kind: Something", | 355 "- kind: Something", |
| 316 " properties:", | 356 " properties:", |
| 317 " - name: Does", | 357 " - name: Does", |
| 318 " - name: Not", | 358 " - name: Not", |
| 319 " - name: Work", | 359 " - name: Work", |
| 320 " direction: desc", | 360 " direction: desc", |
| 321 }, "\n")) | 361 }, "\n")) |
| 322 }, | 362 }, |
| 363 |
| 364 func(c context.Context) { |
| 365 data := ds.Get(c) |
| 366 q := nq("Something").Ancestor(key("Kind"
, 3)).Order("Val") |
| 367 So(data.Run(q, func(ds.Key, ds.CursorCB)
bool { |
| 368 return true |
| 369 }), ShouldErrLike, strings.Join([]string
{ |
| 370 "Consider adding:", |
| 371 "- kind: Something", |
| 372 " ancestor: yes", |
| 373 " properties:", |
| 374 " - name: Val", |
| 375 }, "\n")) |
| 376 }, |
| 323 }, | 377 }, |
| 324 }, | 378 }, |
| 325 | 379 |
| 326 { | 380 { |
| 327 expect: []qExpect{ | 381 expect: []qExpect{ |
| 328 // eventual consistency; Unique/1 is deleted at
HEAD. Keysonly finds it, | 382 // eventual consistency; Unique/1 is deleted at
HEAD. Keysonly finds it, |
| 329 // but 'normal' doesn't. | 383 // but 'normal' doesn't. |
| 330 {q: nq("Unique").Gt("__key__", key("AKind", 5)).
Lte("__key__", key("Zeta", "prime")), | 384 {q: nq("Unique").Gt("__key__", key("AKind", 5)).
Lte("__key__", key("Zeta", "prime")), |
| 331 keys: []*ds.Key{key("Unique", 1)}, | 385 keys: []*ds.Key{key("Unique", 1)}, |
| 332 get: []ds.PropertyMap{}}, | 386 get: []ds.PropertyMap{}}, |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 465 count, err := data.Count(q) | 519 count, err := data.Count(q) |
| 466 So(err, ShouldErrLike, "Insufficient indexes") | 520 So(err, ShouldErrLike, "Insufficient indexes") |
| 467 | 521 |
| 468 testing.AutoIndex(true) | 522 testing.AutoIndex(true) |
| 469 | 523 |
| 470 count, err = data.Count(q) | 524 count, err = data.Count(q) |
| 471 So(err, ShouldBeNil) | 525 So(err, ShouldBeNil) |
| 472 So(count, ShouldEqual, 2) | 526 So(count, ShouldEqual, 2) |
| 473 }) | 527 }) |
| 474 } | 528 } |
| OLD | NEW |