Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: sync/tools/testserver/chromiumsync_test.py

Issue 2130453004: [Sync] Move //sync to //components/sync. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rebase. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « sync/tools/testserver/chromiumsync.py ('k') | sync/tools/testserver/run_sync_testserver.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Tests exercising chromiumsync and SyncDataModel."""
7
8 import pickle
9 import unittest
10
11 import autofill_specifics_pb2
12 import bookmark_specifics_pb2
13 import chromiumsync
14 import managed_user_specifics_pb2
15 import sync_pb2
16 import theme_specifics_pb2
17
18 class SyncDataModelTest(unittest.TestCase):
19 def setUp(self):
20 self.model = chromiumsync.SyncDataModel()
21 # The Synced Bookmarks folder is not created by default
22 self._expect_synced_bookmarks_folder = False
23
24 def AddToModel(self, proto):
25 self.model._entries[proto.id_string] = proto
26
27 def GetChangesFromTimestamp(self, requested_types, timestamp):
28 message = sync_pb2.GetUpdatesMessage()
29 message.from_timestamp = timestamp
30 for data_type in requested_types:
31 getattr(message.requested_types,
32 chromiumsync.SYNC_TYPE_TO_DESCRIPTOR[
33 data_type].name).SetInParent()
34 return self.model.GetChanges(
35 chromiumsync.UpdateSieve(message, self.model.migration_history))
36
37 def FindMarkerByNumber(self, markers, datatype):
38 """Search a list of progress markers and find the one for a datatype."""
39 for marker in markers:
40 if marker.data_type_id == datatype.number:
41 return marker
42 self.fail('Required marker not found: %s' % datatype.name)
43
44 def testPermanentItemSpecs(self):
45 specs = chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS
46
47 declared_specs = set(['0'])
48 for spec in specs:
49 self.assertTrue(spec.parent_tag in declared_specs, 'parent tags must '
50 'be declared before use')
51 declared_specs.add(spec.tag)
52
53 unique_datatypes = set([x.sync_type for x in specs])
54 self.assertEqual(unique_datatypes,
55 set(chromiumsync.ALL_TYPES[1:]),
56 'Every sync datatype should have a permanent folder '
57 'associated with it')
58
59 def testSaveEntry(self):
60 proto = sync_pb2.SyncEntity()
61 proto.id_string = 'abcd'
62 proto.version = 0
63 self.assertFalse(self.model._ItemExists(proto.id_string))
64 self.model._SaveEntry(proto)
65 self.assertEqual(1, proto.version)
66 self.assertTrue(self.model._ItemExists(proto.id_string))
67 self.model._SaveEntry(proto)
68 self.assertEqual(2, proto.version)
69 proto.version = 0
70 self.assertTrue(self.model._ItemExists(proto.id_string))
71 self.assertEqual(2, self.model._entries[proto.id_string].version)
72
73 def testCreatePermanentItems(self):
74 self.model._CreateDefaultPermanentItems(chromiumsync.ALL_TYPES)
75 self.assertEqual(len(chromiumsync.ALL_TYPES) + 1,
76 len(self.model._entries))
77
78 def ExpectedPermanentItemCount(self, sync_type):
79 if sync_type == chromiumsync.BOOKMARK:
80 if self._expect_synced_bookmarks_folder:
81 return 4
82 else:
83 return 3
84 else:
85 return 1
86
87 def testGetChangesFromTimestampZeroForEachType(self):
88 all_types = chromiumsync.ALL_TYPES[1:]
89 for sync_type in all_types:
90 self.model = chromiumsync.SyncDataModel()
91 request_types = [sync_type]
92
93 version, changes, remaining = (
94 self.GetChangesFromTimestamp(request_types, 0))
95
96 expected_count = self.ExpectedPermanentItemCount(sync_type)
97 self.assertEqual(expected_count, version)
98 self.assertEqual(expected_count, len(changes))
99 for change in changes:
100 self.assertTrue(change.HasField('server_defined_unique_tag'))
101 self.assertEqual(change.version, change.sync_timestamp)
102 self.assertTrue(change.version <= version)
103
104 # Test idempotence: another GetUpdates from ts=0 shouldn't recreate.
105 version, changes, remaining = (
106 self.GetChangesFromTimestamp(request_types, 0))
107 self.assertEqual(expected_count, version)
108 self.assertEqual(expected_count, len(changes))
109 self.assertEqual(0, remaining)
110
111 # Doing a wider GetUpdates from timestamp zero shouldn't recreate either.
112 new_version, changes, remaining = (
113 self.GetChangesFromTimestamp(all_types, 0))
114 if self._expect_synced_bookmarks_folder:
115 self.assertEqual(len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS),
116 new_version)
117 else:
118 self.assertEqual(
119 len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS) -1,
120 new_version)
121 self.assertEqual(new_version, len(changes))
122 self.assertEqual(0, remaining)
123 version, changes, remaining = (
124 self.GetChangesFromTimestamp(request_types, 0))
125 self.assertEqual(new_version, version)
126 self.assertEqual(expected_count, len(changes))
127 self.assertEqual(0, remaining)
128
129 def testBatchSize(self):
130 for sync_type in chromiumsync.ALL_TYPES[1:]:
131 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
132 self.model = chromiumsync.SyncDataModel()
133 request_types = [sync_type]
134
135 for i in range(self.model._BATCH_SIZE*3):
136 entry = sync_pb2.SyncEntity()
137 entry.id_string = 'batch test %d' % i
138 entry.specifics.CopyFrom(specifics)
139 self.model._SaveEntry(entry)
140 last_bit = self.ExpectedPermanentItemCount(sync_type)
141 version, changes, changes_remaining = (
142 self.GetChangesFromTimestamp(request_types, 0))
143 self.assertEqual(self.model._BATCH_SIZE, version)
144 self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining)
145 version, changes, changes_remaining = (
146 self.GetChangesFromTimestamp(request_types, version))
147 self.assertEqual(self.model._BATCH_SIZE*2, version)
148 self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining)
149 version, changes, changes_remaining = (
150 self.GetChangesFromTimestamp(request_types, version))
151 self.assertEqual(self.model._BATCH_SIZE*3, version)
152 self.assertEqual(last_bit, changes_remaining)
153 version, changes, changes_remaining = (
154 self.GetChangesFromTimestamp(request_types, version))
155 self.assertEqual(self.model._BATCH_SIZE*3 + last_bit, version)
156 self.assertEqual(0, changes_remaining)
157
158 # Now delete a third of the items.
159 for i in xrange(self.model._BATCH_SIZE*3 - 1, 0, -3):
160 entry = sync_pb2.SyncEntity()
161 entry.id_string = 'batch test %d' % i
162 entry.deleted = True
163 self.model._SaveEntry(entry)
164
165 # The batch counts shouldn't change.
166 version, changes, changes_remaining = (
167 self.GetChangesFromTimestamp(request_types, 0))
168 self.assertEqual(self.model._BATCH_SIZE, len(changes))
169 self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining)
170 version, changes, changes_remaining = (
171 self.GetChangesFromTimestamp(request_types, version))
172 self.assertEqual(self.model._BATCH_SIZE, len(changes))
173 self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining)
174 version, changes, changes_remaining = (
175 self.GetChangesFromTimestamp(request_types, version))
176 self.assertEqual(self.model._BATCH_SIZE, len(changes))
177 self.assertEqual(last_bit, changes_remaining)
178 version, changes, changes_remaining = (
179 self.GetChangesFromTimestamp(request_types, version))
180 self.assertEqual(last_bit, len(changes))
181 self.assertEqual(self.model._BATCH_SIZE*4 + last_bit, version)
182 self.assertEqual(0, changes_remaining)
183
184 def testCommitEachDataType(self):
185 for sync_type in chromiumsync.ALL_TYPES[1:]:
186 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
187 self.model = chromiumsync.SyncDataModel()
188 my_cache_guid = '112358132134'
189 parent = 'foobar'
190 commit_session = {}
191
192 # Start with a GetUpdates from timestamp 0, to populate permanent items.
193 original_version, original_changes, changes_remaining = (
194 self.GetChangesFromTimestamp([sync_type], 0))
195
196 def DoCommit(original=None, id_string='', name=None, parent=None,
197 position=0):
198 proto = sync_pb2.SyncEntity()
199 if original is not None:
200 proto.version = original.version
201 proto.id_string = original.id_string
202 proto.parent_id_string = original.parent_id_string
203 proto.name = original.name
204 else:
205 proto.id_string = id_string
206 proto.version = 0
207 proto.specifics.CopyFrom(specifics)
208 if name is not None:
209 proto.name = name
210 if parent:
211 proto.parent_id_string = parent.id_string
212 proto.insert_after_item_id = 'please discard'
213 proto.position_in_parent = position
214 proto.folder = True
215 proto.deleted = False
216 result = self.model.CommitEntry(proto, my_cache_guid, commit_session)
217 self.assertTrue(result)
218 return (proto, result)
219
220 # Commit a new item.
221 proto1, result1 = DoCommit(name='namae', id_string='Foo',
222 parent=original_changes[-1], position=100)
223 # Commit an item whose parent is another item (referenced via the
224 # pre-commit ID).
225 proto2, result2 = DoCommit(name='Secondo', id_string='Bar',
226 parent=proto1, position=-100)
227 # Commit a sibling of the second item.
228 proto3, result3 = DoCommit(name='Third!', id_string='Baz',
229 parent=proto1, position=-50)
230
231 self.assertEqual(3, len(commit_session))
232 for p, r in [(proto1, result1), (proto2, result2), (proto3, result3)]:
233 self.assertNotEqual(r.id_string, p.id_string)
234 self.assertEqual(r.originator_client_item_id, p.id_string)
235 self.assertEqual(r.originator_cache_guid, my_cache_guid)
236 self.assertTrue(r is not self.model._entries[r.id_string],
237 "Commit result didn't make a defensive copy.")
238 self.assertTrue(p is not self.model._entries[r.id_string],
239 "Commit result didn't make a defensive copy.")
240 self.assertEqual(commit_session.get(p.id_string), r.id_string)
241 self.assertTrue(r.version > original_version)
242 self.assertEqual(result1.parent_id_string, proto1.parent_id_string)
243 self.assertEqual(result2.parent_id_string, result1.id_string)
244 version, changes, remaining = (
245 self.GetChangesFromTimestamp([sync_type], original_version))
246 self.assertEqual(3, len(changes))
247 self.assertEqual(0, remaining)
248 self.assertEqual(original_version + 3, version)
249 self.assertEqual([result1, result2, result3], changes)
250 for c in changes:
251 self.assertTrue(c is not self.model._entries[c.id_string],
252 "GetChanges didn't make a defensive copy.")
253 self.assertTrue(result2.position_in_parent < result3.position_in_parent)
254 self.assertEqual(-100, result2.position_in_parent)
255
256 # Now update the items so that the second item is the parent of the
257 # first; with the first sandwiched between two new items (4 and 5).
258 # Do this in a new commit session, meaning we'll reference items from
259 # the first batch by their post-commit, server IDs.
260 commit_session = {}
261 old_cache_guid = my_cache_guid
262 my_cache_guid = 'A different GUID'
263 proto2b, result2b = DoCommit(original=result2,
264 parent=original_changes[-1])
265 proto4, result4 = DoCommit(id_string='ID4', name='Four',
266 parent=result2, position=-200)
267 proto1b, result1b = DoCommit(original=result1,
268 parent=result2, position=-150)
269 proto5, result5 = DoCommit(id_string='ID5', name='Five', parent=result2,
270 position=150)
271
272 self.assertEqual(2, len(commit_session), 'Only new items in second '
273 'batch should be in the session')
274 for p, r, original in [(proto2b, result2b, proto2),
275 (proto4, result4, proto4),
276 (proto1b, result1b, proto1),
277 (proto5, result5, proto5)]:
278 self.assertEqual(r.originator_client_item_id, original.id_string)
279 if original is not p:
280 self.assertEqual(r.id_string, p.id_string,
281 'Ids should be stable after first commit')
282 self.assertEqual(r.originator_cache_guid, old_cache_guid)
283 else:
284 self.assertNotEqual(r.id_string, p.id_string)
285 self.assertEqual(r.originator_cache_guid, my_cache_guid)
286 self.assertEqual(commit_session.get(p.id_string), r.id_string)
287 self.assertTrue(r is not self.model._entries[r.id_string],
288 "Commit result didn't make a defensive copy.")
289 self.assertTrue(p is not self.model._entries[r.id_string],
290 "Commit didn't make a defensive copy.")
291 self.assertTrue(r.version > p.version)
292 version, changes, remaining = (
293 self.GetChangesFromTimestamp([sync_type], original_version))
294 self.assertEqual(5, len(changes))
295 self.assertEqual(0, remaining)
296 self.assertEqual(original_version + 7, version)
297 self.assertEqual([result3, result2b, result4, result1b, result5], changes)
298 for c in changes:
299 self.assertTrue(c is not self.model._entries[c.id_string],
300 "GetChanges didn't make a defensive copy.")
301 self.assertTrue(result4.parent_id_string ==
302 result1b.parent_id_string ==
303 result5.parent_id_string ==
304 result2b.id_string)
305 self.assertTrue(result4.position_in_parent <
306 result1b.position_in_parent <
307 result5.position_in_parent)
308
309 def testUpdateSieve(self):
310 # from_timestamp, legacy mode
311 autofill = chromiumsync.SYNC_TYPE_FIELDS['autofill']
312 theme = chromiumsync.SYNC_TYPE_FIELDS['theme']
313 msg = sync_pb2.GetUpdatesMessage()
314 msg.from_timestamp = 15412
315 msg.requested_types.autofill.SetInParent()
316 msg.requested_types.theme.SetInParent()
317
318 sieve = chromiumsync.UpdateSieve(msg)
319 self.assertEqual(sieve._state,
320 {chromiumsync.TOP_LEVEL: 15412,
321 chromiumsync.AUTOFILL: 15412,
322 chromiumsync.THEME: 15412})
323
324 response = sync_pb2.GetUpdatesResponse()
325 sieve.SaveProgress(15412, response)
326 self.assertEqual(0, len(response.new_progress_marker))
327 self.assertFalse(response.HasField('new_timestamp'))
328
329 response = sync_pb2.GetUpdatesResponse()
330 sieve.SaveProgress(15413, response)
331 self.assertEqual(0, len(response.new_progress_marker))
332 self.assertTrue(response.HasField('new_timestamp'))
333 self.assertEqual(15413, response.new_timestamp)
334
335 # Existing tokens
336 msg = sync_pb2.GetUpdatesMessage()
337 marker = msg.from_progress_marker.add()
338 marker.data_type_id = autofill.number
339 marker.token = pickle.dumps((15412, 1))
340 marker = msg.from_progress_marker.add()
341 marker.data_type_id = theme.number
342 marker.token = pickle.dumps((15413, 1))
343 sieve = chromiumsync.UpdateSieve(msg)
344 self.assertEqual(sieve._state,
345 {chromiumsync.TOP_LEVEL: 15412,
346 chromiumsync.AUTOFILL: 15412,
347 chromiumsync.THEME: 15413})
348
349 response = sync_pb2.GetUpdatesResponse()
350 sieve.SaveProgress(15413, response)
351 self.assertEqual(1, len(response.new_progress_marker))
352 self.assertFalse(response.HasField('new_timestamp'))
353 marker = response.new_progress_marker[0]
354 self.assertEqual(marker.data_type_id, autofill.number)
355 self.assertEqual(pickle.loads(marker.token), (15413, 1))
356 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
357
358 # Empty tokens indicating from timestamp = 0
359 msg = sync_pb2.GetUpdatesMessage()
360 marker = msg.from_progress_marker.add()
361 marker.data_type_id = autofill.number
362 marker.token = pickle.dumps((412, 1))
363 marker = msg.from_progress_marker.add()
364 marker.data_type_id = theme.number
365 marker.token = ''
366 sieve = chromiumsync.UpdateSieve(msg)
367 self.assertEqual(sieve._state,
368 {chromiumsync.TOP_LEVEL: 0,
369 chromiumsync.AUTOFILL: 412,
370 chromiumsync.THEME: 0})
371 response = sync_pb2.GetUpdatesResponse()
372 sieve.SaveProgress(1, response)
373 self.assertEqual(1, len(response.new_progress_marker))
374 self.assertFalse(response.HasField('new_timestamp'))
375 marker = response.new_progress_marker[0]
376 self.assertEqual(marker.data_type_id, theme.number)
377 self.assertEqual(pickle.loads(marker.token), (1, 1))
378 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
379
380 response = sync_pb2.GetUpdatesResponse()
381 sieve.SaveProgress(412, response)
382 self.assertEqual(1, len(response.new_progress_marker))
383 self.assertFalse(response.HasField('new_timestamp'))
384 marker = response.new_progress_marker[0]
385 self.assertEqual(marker.data_type_id, theme.number)
386 self.assertEqual(pickle.loads(marker.token), (412, 1))
387 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
388
389 response = sync_pb2.GetUpdatesResponse()
390 sieve.SaveProgress(413, response)
391 self.assertEqual(2, len(response.new_progress_marker))
392 self.assertFalse(response.HasField('new_timestamp'))
393 marker = self.FindMarkerByNumber(response.new_progress_marker, theme)
394 self.assertEqual(pickle.loads(marker.token), (413, 1))
395 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
396 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill)
397 self.assertEqual(pickle.loads(marker.token), (413, 1))
398 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
399
400 # Migration token timestamps (client gives timestamp, server returns token)
401 # These are for migrating from the old 'timestamp' protocol to the
402 # progressmarker protocol, and have nothing to do with the MIGRATION_DONE
403 # error code.
404 msg = sync_pb2.GetUpdatesMessage()
405 marker = msg.from_progress_marker.add()
406 marker.data_type_id = autofill.number
407 marker.timestamp_token_for_migration = 15213
408 marker = msg.from_progress_marker.add()
409 marker.data_type_id = theme.number
410 marker.timestamp_token_for_migration = 15211
411 sieve = chromiumsync.UpdateSieve(msg)
412 self.assertEqual(sieve._state,
413 {chromiumsync.TOP_LEVEL: 15211,
414 chromiumsync.AUTOFILL: 15213,
415 chromiumsync.THEME: 15211})
416 response = sync_pb2.GetUpdatesResponse()
417 sieve.SaveProgress(16000, response) # There were updates
418 self.assertEqual(2, len(response.new_progress_marker))
419 self.assertFalse(response.HasField('new_timestamp'))
420 marker = self.FindMarkerByNumber(response.new_progress_marker, theme)
421 self.assertEqual(pickle.loads(marker.token), (16000, 1))
422 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
423 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill)
424 self.assertEqual(pickle.loads(marker.token), (16000, 1))
425 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
426
427 msg = sync_pb2.GetUpdatesMessage()
428 marker = msg.from_progress_marker.add()
429 marker.data_type_id = autofill.number
430 marker.timestamp_token_for_migration = 3000
431 marker = msg.from_progress_marker.add()
432 marker.data_type_id = theme.number
433 marker.timestamp_token_for_migration = 3000
434 sieve = chromiumsync.UpdateSieve(msg)
435 self.assertEqual(sieve._state,
436 {chromiumsync.TOP_LEVEL: 3000,
437 chromiumsync.AUTOFILL: 3000,
438 chromiumsync.THEME: 3000})
439 response = sync_pb2.GetUpdatesResponse()
440 sieve.SaveProgress(3000, response) # Already up to date
441 self.assertEqual(2, len(response.new_progress_marker))
442 self.assertFalse(response.HasField('new_timestamp'))
443 marker = self.FindMarkerByNumber(response.new_progress_marker, theme)
444 self.assertEqual(pickle.loads(marker.token), (3000, 1))
445 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
446 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill)
447 self.assertEqual(pickle.loads(marker.token), (3000, 1))
448 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
449
450 def testCheckRaiseTransientError(self):
451 testserver = chromiumsync.TestServer()
452 http_code, raw_respon = testserver.HandleSetTransientError()
453 self.assertEqual(http_code, 200)
454 try:
455 testserver.CheckTransientError()
456 self.fail('Should have raised transient error exception')
457 except chromiumsync.TransientError:
458 self.assertTrue(testserver.transient_error)
459
460 def testUpdateSieveStoreMigration(self):
461 autofill = chromiumsync.SYNC_TYPE_FIELDS['autofill']
462 theme = chromiumsync.SYNC_TYPE_FIELDS['theme']
463 migrator = chromiumsync.MigrationHistory()
464 msg = sync_pb2.GetUpdatesMessage()
465 marker = msg.from_progress_marker.add()
466 marker.data_type_id = autofill.number
467 marker.token = pickle.dumps((15412, 1))
468 marker = msg.from_progress_marker.add()
469 marker.data_type_id = theme.number
470 marker.token = pickle.dumps((15413, 1))
471 sieve = chromiumsync.UpdateSieve(msg, migrator)
472 sieve.CheckMigrationState()
473
474 migrator.Bump([chromiumsync.BOOKMARK, chromiumsync.PASSWORD]) # v=2
475 sieve = chromiumsync.UpdateSieve(msg, migrator)
476 sieve.CheckMigrationState()
477 self.assertEqual(sieve._state,
478 {chromiumsync.TOP_LEVEL: 15412,
479 chromiumsync.AUTOFILL: 15412,
480 chromiumsync.THEME: 15413})
481
482 migrator.Bump([chromiumsync.AUTOFILL, chromiumsync.PASSWORD]) # v=3
483 sieve = chromiumsync.UpdateSieve(msg, migrator)
484 try:
485 sieve.CheckMigrationState()
486 self.fail('Should have raised.')
487 except chromiumsync.MigrationDoneError, error:
488 # We want this to happen.
489 self.assertEqual([chromiumsync.AUTOFILL], error.datatypes)
490
491 msg = sync_pb2.GetUpdatesMessage()
492 marker = msg.from_progress_marker.add()
493 marker.data_type_id = autofill.number
494 marker.token = ''
495 marker = msg.from_progress_marker.add()
496 marker.data_type_id = theme.number
497 marker.token = pickle.dumps((15413, 1))
498 sieve = chromiumsync.UpdateSieve(msg, migrator)
499 sieve.CheckMigrationState()
500 response = sync_pb2.GetUpdatesResponse()
501 sieve.SaveProgress(15412, response) # There were updates
502 self.assertEqual(1, len(response.new_progress_marker))
503 self.assertFalse(response.HasField('new_timestamp'))
504 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
505 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill)
506 self.assertEqual(pickle.loads(marker.token), (15412, 3))
507 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
508 msg = sync_pb2.GetUpdatesMessage()
509 marker = msg.from_progress_marker.add()
510 marker.data_type_id = autofill.number
511 marker.token = pickle.dumps((15412, 3))
512 marker = msg.from_progress_marker.add()
513 marker.data_type_id = theme.number
514 marker.token = pickle.dumps((15413, 1))
515 sieve = chromiumsync.UpdateSieve(msg, migrator)
516 sieve.CheckMigrationState()
517
518 migrator.Bump([chromiumsync.THEME, chromiumsync.AUTOFILL]) # v=4
519 migrator.Bump([chromiumsync.AUTOFILL]) # v=5
520 sieve = chromiumsync.UpdateSieve(msg, migrator)
521 try:
522 sieve.CheckMigrationState()
523 self.fail("Should have raised.")
524 except chromiumsync.MigrationDoneError, error:
525 # We want this to happen.
526 self.assertEqual(set([chromiumsync.THEME, chromiumsync.AUTOFILL]),
527 set(error.datatypes))
528 msg = sync_pb2.GetUpdatesMessage()
529 marker = msg.from_progress_marker.add()
530 marker.data_type_id = autofill.number
531 marker.token = ''
532 marker = msg.from_progress_marker.add()
533 marker.data_type_id = theme.number
534 marker.token = pickle.dumps((15413, 1))
535 sieve = chromiumsync.UpdateSieve(msg, migrator)
536 try:
537 sieve.CheckMigrationState()
538 self.fail("Should have raised.")
539 except chromiumsync.MigrationDoneError, error:
540 # We want this to happen.
541 self.assertEqual([chromiumsync.THEME], error.datatypes)
542
543 msg = sync_pb2.GetUpdatesMessage()
544 marker = msg.from_progress_marker.add()
545 marker.data_type_id = autofill.number
546 marker.token = ''
547 marker = msg.from_progress_marker.add()
548 marker.data_type_id = theme.number
549 marker.token = ''
550 sieve = chromiumsync.UpdateSieve(msg, migrator)
551 sieve.CheckMigrationState()
552 response = sync_pb2.GetUpdatesResponse()
553 sieve.SaveProgress(15412, response) # There were updates
554 self.assertEqual(2, len(response.new_progress_marker))
555 self.assertFalse(response.HasField('new_timestamp'))
556 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
557 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill)
558 self.assertEqual(pickle.loads(marker.token), (15412, 5))
559 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
560 marker = self.FindMarkerByNumber(response.new_progress_marker, theme)
561 self.assertEqual(pickle.loads(marker.token), (15412, 4))
562 self.assertFalse(marker.HasField('timestamp_token_for_migration'))
563 msg = sync_pb2.GetUpdatesMessage()
564 marker = msg.from_progress_marker.add()
565 marker.data_type_id = autofill.number
566 marker.token = pickle.dumps((15412, 5))
567 marker = msg.from_progress_marker.add()
568 marker.data_type_id = theme.number
569 marker.token = pickle.dumps((15413, 4))
570 sieve = chromiumsync.UpdateSieve(msg, migrator)
571 sieve.CheckMigrationState()
572
573 def testCreateSyncedBookmarks(self):
574 version1, changes, remaining = (
575 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], 0))
576 id_string = self.model._MakeCurrentId(chromiumsync.BOOKMARK,
577 '<server tag>synced_bookmarks')
578 self.assertFalse(self.model._ItemExists(id_string))
579 self._expect_synced_bookmarks_folder = True
580 self.model.TriggerCreateSyncedBookmarks()
581 self.assertTrue(self.model._ItemExists(id_string))
582
583 # Check that the version changed when the folder was created and the only
584 # change was the folder creation.
585 version2, changes, remaining = (
586 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], version1))
587 self.assertEqual(len(changes), 1)
588 self.assertEqual(changes[0].id_string, id_string)
589 self.assertNotEqual(version1, version2)
590 self.assertEqual(
591 self.ExpectedPermanentItemCount(chromiumsync.BOOKMARK),
592 version2)
593
594 # Ensure getting from timestamp 0 includes the folder.
595 version, changes, remaining = (
596 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], 0))
597 self.assertEqual(
598 self.ExpectedPermanentItemCount(chromiumsync.BOOKMARK),
599 len(changes))
600 self.assertEqual(version2, version)
601
602 def testAcknowledgeManagedUser(self):
603 # Create permanent items.
604 self.GetChangesFromTimestamp([chromiumsync.MANAGED_USER], 0)
605 proto = sync_pb2.SyncEntity()
606 proto.id_string = 'abcd'
607 proto.version = 0
608
609 # Make sure the managed_user field exists.
610 proto.specifics.managed_user.acknowledged = False
611 self.assertTrue(proto.specifics.HasField('managed_user'))
612 self.AddToModel(proto)
613 version1, changes1, remaining1 = (
614 self.GetChangesFromTimestamp([chromiumsync.MANAGED_USER], 0))
615 for change in changes1:
616 self.assertTrue(not change.specifics.managed_user.acknowledged)
617
618 # Turn on managed user acknowledgement
619 self.model.acknowledge_managed_users = True
620
621 version2, changes2, remaining2 = (
622 self.GetChangesFromTimestamp([chromiumsync.MANAGED_USER], 0))
623 for change in changes2:
624 self.assertTrue(change.specifics.managed_user.acknowledged)
625
626 def testGetKey(self):
627 [key1] = self.model.GetKeystoreKeys()
628 [key2] = self.model.GetKeystoreKeys()
629 self.assertTrue(len(key1))
630 self.assertEqual(key1, key2)
631
632 # Trigger the rotation. A subsequent GetUpdates should return the nigori
633 # node (whose timestamp was bumped by the rotation).
634 version1, changes, remaining = (
635 self.GetChangesFromTimestamp([chromiumsync.NIGORI], 0))
636 self.model.TriggerRotateKeystoreKeys()
637 version2, changes, remaining = (
638 self.GetChangesFromTimestamp([chromiumsync.NIGORI], version1))
639 self.assertNotEqual(version1, version2)
640 self.assertEquals(len(changes), 1)
641 self.assertEquals(changes[0].name, "Nigori")
642
643 # The current keys should contain the old keys, with the new key appended.
644 [key1, key3] = self.model.GetKeystoreKeys()
645 self.assertEquals(key1, key2)
646 self.assertNotEqual(key1, key3)
647 self.assertTrue(len(key3) > 0)
648
649 def testTriggerEnableKeystoreEncryption(self):
650 version1, changes, remaining = (
651 self.GetChangesFromTimestamp([chromiumsync.EXPERIMENTS], 0))
652 keystore_encryption_id_string = (
653 self.model._ClientTagToId(
654 chromiumsync.EXPERIMENTS,
655 chromiumsync.KEYSTORE_ENCRYPTION_EXPERIMENT_TAG))
656
657 self.assertFalse(self.model._ItemExists(keystore_encryption_id_string))
658 self.model.TriggerEnableKeystoreEncryption()
659 self.assertTrue(self.model._ItemExists(keystore_encryption_id_string))
660
661 # The creation of the experiment should be downloaded on the next
662 # GetUpdates.
663 version2, changes, remaining = (
664 self.GetChangesFromTimestamp([chromiumsync.EXPERIMENTS], version1))
665 self.assertEqual(len(changes), 1)
666 self.assertEqual(changes[0].id_string, keystore_encryption_id_string)
667 self.assertNotEqual(version1, version2)
668
669 # Verify the experiment was created properly and is enabled.
670 self.assertEqual(chromiumsync.KEYSTORE_ENCRYPTION_EXPERIMENT_TAG,
671 changes[0].client_defined_unique_tag)
672 self.assertTrue(changes[0].HasField("specifics"))
673 self.assertTrue(changes[0].specifics.HasField("experiments"))
674 self.assertTrue(
675 changes[0].specifics.experiments.HasField("keystore_encryption"))
676 self.assertTrue(
677 changes[0].specifics.experiments.keystore_encryption.enabled)
678
679 if __name__ == '__main__':
680 unittest.main()
OLDNEW
« no previous file with comments | « sync/tools/testserver/chromiumsync.py ('k') | sync/tools/testserver/run_sync_testserver.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698