| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 3 # Use of this source code is governed by a BSD-style license that can be | |
| 4 # found in the LICENSE file. | |
| 5 | |
| 6 """Tests exercising chromiumsync and SyncDataModel.""" | |
| 7 | |
| 8 import pickle | |
| 9 import unittest | |
| 10 | |
| 11 import autofill_specifics_pb2 | |
| 12 import bookmark_specifics_pb2 | |
| 13 import chromiumsync | |
| 14 import sync_pb2 | |
| 15 import theme_specifics_pb2 | |
| 16 | |
| 17 class SyncDataModelTest(unittest.TestCase): | |
| 18 def setUp(self): | |
| 19 self.model = chromiumsync.SyncDataModel() | |
| 20 # The Synced Bookmarks folder is not created by default | |
| 21 self._expect_synced_bookmarks_folder = False | |
| 22 | |
| 23 def AddToModel(self, proto): | |
| 24 self.model._entries[proto.id_string] = proto | |
| 25 | |
| 26 def GetChangesFromTimestamp(self, requested_types, timestamp): | |
| 27 message = sync_pb2.GetUpdatesMessage() | |
| 28 message.from_timestamp = timestamp | |
| 29 for data_type in requested_types: | |
| 30 getattr(message.requested_types, | |
| 31 chromiumsync.SYNC_TYPE_TO_DESCRIPTOR[ | |
| 32 data_type].name).SetInParent() | |
| 33 return self.model.GetChanges( | |
| 34 chromiumsync.UpdateSieve(message, self.model.migration_history)) | |
| 35 | |
| 36 def FindMarkerByNumber(self, markers, datatype): | |
| 37 """Search a list of progress markers and find the one for a datatype.""" | |
| 38 for marker in markers: | |
| 39 if marker.data_type_id == datatype.number: | |
| 40 return marker | |
| 41 self.fail('Required marker not found: %s' % datatype.name) | |
| 42 | |
| 43 def testPermanentItemSpecs(self): | |
| 44 specs = chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS | |
| 45 | |
| 46 declared_specs = set(['0']) | |
| 47 for spec in specs: | |
| 48 self.assertTrue(spec.parent_tag in declared_specs, 'parent tags must ' | |
| 49 'be declared before use') | |
| 50 declared_specs.add(spec.tag) | |
| 51 | |
| 52 unique_datatypes = set([x.sync_type for x in specs]) | |
| 53 self.assertEqual(unique_datatypes, | |
| 54 set(chromiumsync.ALL_TYPES[1:]), | |
| 55 'Every sync datatype should have a permanent folder ' | |
| 56 'associated with it') | |
| 57 | |
| 58 def testSaveEntry(self): | |
| 59 proto = sync_pb2.SyncEntity() | |
| 60 proto.id_string = 'abcd' | |
| 61 proto.version = 0 | |
| 62 self.assertFalse(self.model._ItemExists(proto.id_string)) | |
| 63 self.model._SaveEntry(proto) | |
| 64 self.assertEqual(1, proto.version) | |
| 65 self.assertTrue(self.model._ItemExists(proto.id_string)) | |
| 66 self.model._SaveEntry(proto) | |
| 67 self.assertEqual(2, proto.version) | |
| 68 proto.version = 0 | |
| 69 self.assertTrue(self.model._ItemExists(proto.id_string)) | |
| 70 self.assertEqual(2, self.model._entries[proto.id_string].version) | |
| 71 | |
| 72 def testCreatePermanentItems(self): | |
| 73 self.model._CreateDefaultPermanentItems(chromiumsync.ALL_TYPES) | |
| 74 self.assertEqual(len(chromiumsync.ALL_TYPES) + 1, | |
| 75 len(self.model._entries)) | |
| 76 | |
| 77 def ExpectedPermanentItemCount(self, sync_type): | |
| 78 if sync_type == chromiumsync.BOOKMARK: | |
| 79 if self._expect_synced_bookmarks_folder: | |
| 80 return 4 | |
| 81 else: | |
| 82 return 3 | |
| 83 else: | |
| 84 return 1 | |
| 85 | |
| 86 def testGetChangesFromTimestampZeroForEachType(self): | |
| 87 all_types = chromiumsync.ALL_TYPES[1:] | |
| 88 for sync_type in all_types: | |
| 89 self.model = chromiumsync.SyncDataModel() | |
| 90 request_types = [sync_type] | |
| 91 | |
| 92 version, changes, remaining = ( | |
| 93 self.GetChangesFromTimestamp(request_types, 0)) | |
| 94 | |
| 95 expected_count = self.ExpectedPermanentItemCount(sync_type) | |
| 96 self.assertEqual(expected_count, version) | |
| 97 self.assertEqual(expected_count, len(changes)) | |
| 98 for change in changes: | |
| 99 self.assertTrue(change.HasField('server_defined_unique_tag')) | |
| 100 self.assertEqual(change.version, change.sync_timestamp) | |
| 101 self.assertTrue(change.version <= version) | |
| 102 | |
| 103 # Test idempotence: another GetUpdates from ts=0 shouldn't recreate. | |
| 104 version, changes, remaining = ( | |
| 105 self.GetChangesFromTimestamp(request_types, 0)) | |
| 106 self.assertEqual(expected_count, version) | |
| 107 self.assertEqual(expected_count, len(changes)) | |
| 108 self.assertEqual(0, remaining) | |
| 109 | |
| 110 # Doing a wider GetUpdates from timestamp zero shouldn't recreate either. | |
| 111 new_version, changes, remaining = ( | |
| 112 self.GetChangesFromTimestamp(all_types, 0)) | |
| 113 if self._expect_synced_bookmarks_folder: | |
| 114 self.assertEqual(len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS), | |
| 115 new_version) | |
| 116 else: | |
| 117 self.assertEqual( | |
| 118 len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS) -1, | |
| 119 new_version) | |
| 120 self.assertEqual(new_version, len(changes)) | |
| 121 self.assertEqual(0, remaining) | |
| 122 version, changes, remaining = ( | |
| 123 self.GetChangesFromTimestamp(request_types, 0)) | |
| 124 self.assertEqual(new_version, version) | |
| 125 self.assertEqual(expected_count, len(changes)) | |
| 126 self.assertEqual(0, remaining) | |
| 127 | |
| 128 def testBatchSize(self): | |
| 129 for sync_type in chromiumsync.ALL_TYPES[1:]: | |
| 130 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type) | |
| 131 self.model = chromiumsync.SyncDataModel() | |
| 132 request_types = [sync_type] | |
| 133 | |
| 134 for i in range(self.model._BATCH_SIZE*3): | |
| 135 entry = sync_pb2.SyncEntity() | |
| 136 entry.id_string = 'batch test %d' % i | |
| 137 entry.specifics.CopyFrom(specifics) | |
| 138 self.model._SaveEntry(entry) | |
| 139 last_bit = self.ExpectedPermanentItemCount(sync_type) | |
| 140 version, changes, changes_remaining = ( | |
| 141 self.GetChangesFromTimestamp(request_types, 0)) | |
| 142 self.assertEqual(self.model._BATCH_SIZE, version) | |
| 143 self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining) | |
| 144 version, changes, changes_remaining = ( | |
| 145 self.GetChangesFromTimestamp(request_types, version)) | |
| 146 self.assertEqual(self.model._BATCH_SIZE*2, version) | |
| 147 self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining) | |
| 148 version, changes, changes_remaining = ( | |
| 149 self.GetChangesFromTimestamp(request_types, version)) | |
| 150 self.assertEqual(self.model._BATCH_SIZE*3, version) | |
| 151 self.assertEqual(last_bit, changes_remaining) | |
| 152 version, changes, changes_remaining = ( | |
| 153 self.GetChangesFromTimestamp(request_types, version)) | |
| 154 self.assertEqual(self.model._BATCH_SIZE*3 + last_bit, version) | |
| 155 self.assertEqual(0, changes_remaining) | |
| 156 | |
| 157 # Now delete a third of the items. | |
| 158 for i in xrange(self.model._BATCH_SIZE*3 - 1, 0, -3): | |
| 159 entry = sync_pb2.SyncEntity() | |
| 160 entry.id_string = 'batch test %d' % i | |
| 161 entry.deleted = True | |
| 162 self.model._SaveEntry(entry) | |
| 163 | |
| 164 # The batch counts shouldn't change. | |
| 165 version, changes, changes_remaining = ( | |
| 166 self.GetChangesFromTimestamp(request_types, 0)) | |
| 167 self.assertEqual(self.model._BATCH_SIZE, len(changes)) | |
| 168 self.assertEqual(self.model._BATCH_SIZE*2 + last_bit, changes_remaining) | |
| 169 version, changes, changes_remaining = ( | |
| 170 self.GetChangesFromTimestamp(request_types, version)) | |
| 171 self.assertEqual(self.model._BATCH_SIZE, len(changes)) | |
| 172 self.assertEqual(self.model._BATCH_SIZE + last_bit, changes_remaining) | |
| 173 version, changes, changes_remaining = ( | |
| 174 self.GetChangesFromTimestamp(request_types, version)) | |
| 175 self.assertEqual(self.model._BATCH_SIZE, len(changes)) | |
| 176 self.assertEqual(last_bit, changes_remaining) | |
| 177 version, changes, changes_remaining = ( | |
| 178 self.GetChangesFromTimestamp(request_types, version)) | |
| 179 self.assertEqual(last_bit, len(changes)) | |
| 180 self.assertEqual(self.model._BATCH_SIZE*4 + last_bit, version) | |
| 181 self.assertEqual(0, changes_remaining) | |
| 182 | |
| 183 def testCommitEachDataType(self): | |
| 184 for sync_type in chromiumsync.ALL_TYPES[1:]: | |
| 185 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type) | |
| 186 self.model = chromiumsync.SyncDataModel() | |
| 187 my_cache_guid = '112358132134' | |
| 188 parent = 'foobar' | |
| 189 commit_session = {} | |
| 190 | |
| 191 # Start with a GetUpdates from timestamp 0, to populate permanent items. | |
| 192 original_version, original_changes, changes_remaining = ( | |
| 193 self.GetChangesFromTimestamp([sync_type], 0)) | |
| 194 | |
| 195 def DoCommit(original=None, id_string='', name=None, parent=None, | |
| 196 position=0): | |
| 197 proto = sync_pb2.SyncEntity() | |
| 198 if original is not None: | |
| 199 proto.version = original.version | |
| 200 proto.id_string = original.id_string | |
| 201 proto.parent_id_string = original.parent_id_string | |
| 202 proto.name = original.name | |
| 203 else: | |
| 204 proto.id_string = id_string | |
| 205 proto.version = 0 | |
| 206 proto.specifics.CopyFrom(specifics) | |
| 207 if name is not None: | |
| 208 proto.name = name | |
| 209 if parent: | |
| 210 proto.parent_id_string = parent.id_string | |
| 211 proto.insert_after_item_id = 'please discard' | |
| 212 proto.position_in_parent = position | |
| 213 proto.folder = True | |
| 214 proto.deleted = False | |
| 215 result = self.model.CommitEntry(proto, my_cache_guid, commit_session) | |
| 216 self.assertTrue(result) | |
| 217 return (proto, result) | |
| 218 | |
| 219 # Commit a new item. | |
| 220 proto1, result1 = DoCommit(name='namae', id_string='Foo', | |
| 221 parent=original_changes[-1], position=100) | |
| 222 # Commit an item whose parent is another item (referenced via the | |
| 223 # pre-commit ID). | |
| 224 proto2, result2 = DoCommit(name='Secondo', id_string='Bar', | |
| 225 parent=proto1, position=-100) | |
| 226 # Commit a sibling of the second item. | |
| 227 proto3, result3 = DoCommit(name='Third!', id_string='Baz', | |
| 228 parent=proto1, position=-50) | |
| 229 | |
| 230 self.assertEqual(3, len(commit_session)) | |
| 231 for p, r in [(proto1, result1), (proto2, result2), (proto3, result3)]: | |
| 232 self.assertNotEqual(r.id_string, p.id_string) | |
| 233 self.assertEqual(r.originator_client_item_id, p.id_string) | |
| 234 self.assertEqual(r.originator_cache_guid, my_cache_guid) | |
| 235 self.assertTrue(r is not self.model._entries[r.id_string], | |
| 236 "Commit result didn't make a defensive copy.") | |
| 237 self.assertTrue(p is not self.model._entries[r.id_string], | |
| 238 "Commit result didn't make a defensive copy.") | |
| 239 self.assertEqual(commit_session.get(p.id_string), r.id_string) | |
| 240 self.assertTrue(r.version > original_version) | |
| 241 self.assertEqual(result1.parent_id_string, proto1.parent_id_string) | |
| 242 self.assertEqual(result2.parent_id_string, result1.id_string) | |
| 243 version, changes, remaining = ( | |
| 244 self.GetChangesFromTimestamp([sync_type], original_version)) | |
| 245 self.assertEqual(3, len(changes)) | |
| 246 self.assertEqual(0, remaining) | |
| 247 self.assertEqual(original_version + 3, version) | |
| 248 self.assertEqual([result1, result2, result3], changes) | |
| 249 for c in changes: | |
| 250 self.assertTrue(c is not self.model._entries[c.id_string], | |
| 251 "GetChanges didn't make a defensive copy.") | |
| 252 self.assertTrue(result2.position_in_parent < result3.position_in_parent) | |
| 253 self.assertEqual(-100, result2.position_in_parent) | |
| 254 | |
| 255 # Now update the items so that the second item is the parent of the | |
| 256 # first; with the first sandwiched between two new items (4 and 5). | |
| 257 # Do this in a new commit session, meaning we'll reference items from | |
| 258 # the first batch by their post-commit, server IDs. | |
| 259 commit_session = {} | |
| 260 old_cache_guid = my_cache_guid | |
| 261 my_cache_guid = 'A different GUID' | |
| 262 proto2b, result2b = DoCommit(original=result2, | |
| 263 parent=original_changes[-1]) | |
| 264 proto4, result4 = DoCommit(id_string='ID4', name='Four', | |
| 265 parent=result2, position=-200) | |
| 266 proto1b, result1b = DoCommit(original=result1, | |
| 267 parent=result2, position=-150) | |
| 268 proto5, result5 = DoCommit(id_string='ID5', name='Five', parent=result2, | |
| 269 position=150) | |
| 270 | |
| 271 self.assertEqual(2, len(commit_session), 'Only new items in second ' | |
| 272 'batch should be in the session') | |
| 273 for p, r, original in [(proto2b, result2b, proto2), | |
| 274 (proto4, result4, proto4), | |
| 275 (proto1b, result1b, proto1), | |
| 276 (proto5, result5, proto5)]: | |
| 277 self.assertEqual(r.originator_client_item_id, original.id_string) | |
| 278 if original is not p: | |
| 279 self.assertEqual(r.id_string, p.id_string, | |
| 280 'Ids should be stable after first commit') | |
| 281 self.assertEqual(r.originator_cache_guid, old_cache_guid) | |
| 282 else: | |
| 283 self.assertNotEqual(r.id_string, p.id_string) | |
| 284 self.assertEqual(r.originator_cache_guid, my_cache_guid) | |
| 285 self.assertEqual(commit_session.get(p.id_string), r.id_string) | |
| 286 self.assertTrue(r is not self.model._entries[r.id_string], | |
| 287 "Commit result didn't make a defensive copy.") | |
| 288 self.assertTrue(p is not self.model._entries[r.id_string], | |
| 289 "Commit didn't make a defensive copy.") | |
| 290 self.assertTrue(r.version > p.version) | |
| 291 version, changes, remaining = ( | |
| 292 self.GetChangesFromTimestamp([sync_type], original_version)) | |
| 293 self.assertEqual(5, len(changes)) | |
| 294 self.assertEqual(0, remaining) | |
| 295 self.assertEqual(original_version + 7, version) | |
| 296 self.assertEqual([result3, result2b, result4, result1b, result5], changes) | |
| 297 for c in changes: | |
| 298 self.assertTrue(c is not self.model._entries[c.id_string], | |
| 299 "GetChanges didn't make a defensive copy.") | |
| 300 self.assertTrue(result4.parent_id_string == | |
| 301 result1b.parent_id_string == | |
| 302 result5.parent_id_string == | |
| 303 result2b.id_string) | |
| 304 self.assertTrue(result4.position_in_parent < | |
| 305 result1b.position_in_parent < | |
| 306 result5.position_in_parent) | |
| 307 | |
| 308 def testUpdateSieve(self): | |
| 309 # from_timestamp, legacy mode | |
| 310 autofill = chromiumsync.SYNC_TYPE_FIELDS['autofill'] | |
| 311 theme = chromiumsync.SYNC_TYPE_FIELDS['theme'] | |
| 312 msg = sync_pb2.GetUpdatesMessage() | |
| 313 msg.from_timestamp = 15412 | |
| 314 msg.requested_types.autofill.SetInParent() | |
| 315 msg.requested_types.theme.SetInParent() | |
| 316 | |
| 317 sieve = chromiumsync.UpdateSieve(msg) | |
| 318 self.assertEqual(sieve._state, | |
| 319 {chromiumsync.TOP_LEVEL: 15412, | |
| 320 chromiumsync.AUTOFILL: 15412, | |
| 321 chromiumsync.THEME: 15412}) | |
| 322 | |
| 323 response = sync_pb2.GetUpdatesResponse() | |
| 324 sieve.SaveProgress(15412, response) | |
| 325 self.assertEqual(0, len(response.new_progress_marker)) | |
| 326 self.assertFalse(response.HasField('new_timestamp')) | |
| 327 | |
| 328 response = sync_pb2.GetUpdatesResponse() | |
| 329 sieve.SaveProgress(15413, response) | |
| 330 self.assertEqual(0, len(response.new_progress_marker)) | |
| 331 self.assertTrue(response.HasField('new_timestamp')) | |
| 332 self.assertEqual(15413, response.new_timestamp) | |
| 333 | |
| 334 # Existing tokens | |
| 335 msg = sync_pb2.GetUpdatesMessage() | |
| 336 marker = msg.from_progress_marker.add() | |
| 337 marker.data_type_id = autofill.number | |
| 338 marker.token = pickle.dumps((15412, 1)) | |
| 339 marker = msg.from_progress_marker.add() | |
| 340 marker.data_type_id = theme.number | |
| 341 marker.token = pickle.dumps((15413, 1)) | |
| 342 sieve = chromiumsync.UpdateSieve(msg) | |
| 343 self.assertEqual(sieve._state, | |
| 344 {chromiumsync.TOP_LEVEL: 15412, | |
| 345 chromiumsync.AUTOFILL: 15412, | |
| 346 chromiumsync.THEME: 15413}) | |
| 347 | |
| 348 response = sync_pb2.GetUpdatesResponse() | |
| 349 sieve.SaveProgress(15413, response) | |
| 350 self.assertEqual(1, len(response.new_progress_marker)) | |
| 351 self.assertFalse(response.HasField('new_timestamp')) | |
| 352 marker = response.new_progress_marker[0] | |
| 353 self.assertEqual(marker.data_type_id, autofill.number) | |
| 354 self.assertEqual(pickle.loads(marker.token), (15413, 1)) | |
| 355 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 356 | |
| 357 # Empty tokens indicating from timestamp = 0 | |
| 358 msg = sync_pb2.GetUpdatesMessage() | |
| 359 marker = msg.from_progress_marker.add() | |
| 360 marker.data_type_id = autofill.number | |
| 361 marker.token = pickle.dumps((412, 1)) | |
| 362 marker = msg.from_progress_marker.add() | |
| 363 marker.data_type_id = theme.number | |
| 364 marker.token = '' | |
| 365 sieve = chromiumsync.UpdateSieve(msg) | |
| 366 self.assertEqual(sieve._state, | |
| 367 {chromiumsync.TOP_LEVEL: 0, | |
| 368 chromiumsync.AUTOFILL: 412, | |
| 369 chromiumsync.THEME: 0}) | |
| 370 response = sync_pb2.GetUpdatesResponse() | |
| 371 sieve.SaveProgress(1, response) | |
| 372 self.assertEqual(1, len(response.new_progress_marker)) | |
| 373 self.assertFalse(response.HasField('new_timestamp')) | |
| 374 marker = response.new_progress_marker[0] | |
| 375 self.assertEqual(marker.data_type_id, theme.number) | |
| 376 self.assertEqual(pickle.loads(marker.token), (1, 1)) | |
| 377 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 378 | |
| 379 response = sync_pb2.GetUpdatesResponse() | |
| 380 sieve.SaveProgress(412, response) | |
| 381 self.assertEqual(1, len(response.new_progress_marker)) | |
| 382 self.assertFalse(response.HasField('new_timestamp')) | |
| 383 marker = response.new_progress_marker[0] | |
| 384 self.assertEqual(marker.data_type_id, theme.number) | |
| 385 self.assertEqual(pickle.loads(marker.token), (412, 1)) | |
| 386 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 387 | |
| 388 response = sync_pb2.GetUpdatesResponse() | |
| 389 sieve.SaveProgress(413, response) | |
| 390 self.assertEqual(2, len(response.new_progress_marker)) | |
| 391 self.assertFalse(response.HasField('new_timestamp')) | |
| 392 marker = self.FindMarkerByNumber(response.new_progress_marker, theme) | |
| 393 self.assertEqual(pickle.loads(marker.token), (413, 1)) | |
| 394 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 395 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill) | |
| 396 self.assertEqual(pickle.loads(marker.token), (413, 1)) | |
| 397 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 398 | |
| 399 # Migration token timestamps (client gives timestamp, server returns token) | |
| 400 # These are for migrating from the old 'timestamp' protocol to the | |
| 401 # progressmarker protocol, and have nothing to do with the MIGRATION_DONE | |
| 402 # error code. | |
| 403 msg = sync_pb2.GetUpdatesMessage() | |
| 404 marker = msg.from_progress_marker.add() | |
| 405 marker.data_type_id = autofill.number | |
| 406 marker.timestamp_token_for_migration = 15213 | |
| 407 marker = msg.from_progress_marker.add() | |
| 408 marker.data_type_id = theme.number | |
| 409 marker.timestamp_token_for_migration = 15211 | |
| 410 sieve = chromiumsync.UpdateSieve(msg) | |
| 411 self.assertEqual(sieve._state, | |
| 412 {chromiumsync.TOP_LEVEL: 15211, | |
| 413 chromiumsync.AUTOFILL: 15213, | |
| 414 chromiumsync.THEME: 15211}) | |
| 415 response = sync_pb2.GetUpdatesResponse() | |
| 416 sieve.SaveProgress(16000, response) # There were updates | |
| 417 self.assertEqual(2, len(response.new_progress_marker)) | |
| 418 self.assertFalse(response.HasField('new_timestamp')) | |
| 419 marker = self.FindMarkerByNumber(response.new_progress_marker, theme) | |
| 420 self.assertEqual(pickle.loads(marker.token), (16000, 1)) | |
| 421 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 422 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill) | |
| 423 self.assertEqual(pickle.loads(marker.token), (16000, 1)) | |
| 424 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 425 | |
| 426 msg = sync_pb2.GetUpdatesMessage() | |
| 427 marker = msg.from_progress_marker.add() | |
| 428 marker.data_type_id = autofill.number | |
| 429 marker.timestamp_token_for_migration = 3000 | |
| 430 marker = msg.from_progress_marker.add() | |
| 431 marker.data_type_id = theme.number | |
| 432 marker.timestamp_token_for_migration = 3000 | |
| 433 sieve = chromiumsync.UpdateSieve(msg) | |
| 434 self.assertEqual(sieve._state, | |
| 435 {chromiumsync.TOP_LEVEL: 3000, | |
| 436 chromiumsync.AUTOFILL: 3000, | |
| 437 chromiumsync.THEME: 3000}) | |
| 438 response = sync_pb2.GetUpdatesResponse() | |
| 439 sieve.SaveProgress(3000, response) # Already up to date | |
| 440 self.assertEqual(2, len(response.new_progress_marker)) | |
| 441 self.assertFalse(response.HasField('new_timestamp')) | |
| 442 marker = self.FindMarkerByNumber(response.new_progress_marker, theme) | |
| 443 self.assertEqual(pickle.loads(marker.token), (3000, 1)) | |
| 444 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 445 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill) | |
| 446 self.assertEqual(pickle.loads(marker.token), (3000, 1)) | |
| 447 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 448 | |
| 449 def testCheckRaiseTransientError(self): | |
| 450 testserver = chromiumsync.TestServer() | |
| 451 http_code, raw_respon = testserver.HandleSetTransientError() | |
| 452 self.assertEqual(http_code, 200) | |
| 453 try: | |
| 454 testserver.CheckTransientError() | |
| 455 self.fail('Should have raised transient error exception') | |
| 456 except chromiumsync.TransientError: | |
| 457 self.assertTrue(testserver.transient_error) | |
| 458 | |
| 459 def testUpdateSieveStoreMigration(self): | |
| 460 autofill = chromiumsync.SYNC_TYPE_FIELDS['autofill'] | |
| 461 theme = chromiumsync.SYNC_TYPE_FIELDS['theme'] | |
| 462 migrator = chromiumsync.MigrationHistory() | |
| 463 msg = sync_pb2.GetUpdatesMessage() | |
| 464 marker = msg.from_progress_marker.add() | |
| 465 marker.data_type_id = autofill.number | |
| 466 marker.token = pickle.dumps((15412, 1)) | |
| 467 marker = msg.from_progress_marker.add() | |
| 468 marker.data_type_id = theme.number | |
| 469 marker.token = pickle.dumps((15413, 1)) | |
| 470 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 471 sieve.CheckMigrationState() | |
| 472 | |
| 473 migrator.Bump([chromiumsync.BOOKMARK, chromiumsync.PASSWORD]) # v=2 | |
| 474 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 475 sieve.CheckMigrationState() | |
| 476 self.assertEqual(sieve._state, | |
| 477 {chromiumsync.TOP_LEVEL: 15412, | |
| 478 chromiumsync.AUTOFILL: 15412, | |
| 479 chromiumsync.THEME: 15413}) | |
| 480 | |
| 481 migrator.Bump([chromiumsync.AUTOFILL, chromiumsync.PASSWORD]) # v=3 | |
| 482 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 483 try: | |
| 484 sieve.CheckMigrationState() | |
| 485 self.fail('Should have raised.') | |
| 486 except chromiumsync.MigrationDoneError, error: | |
| 487 # We want this to happen. | |
| 488 self.assertEqual([chromiumsync.AUTOFILL], error.datatypes) | |
| 489 | |
| 490 msg = sync_pb2.GetUpdatesMessage() | |
| 491 marker = msg.from_progress_marker.add() | |
| 492 marker.data_type_id = autofill.number | |
| 493 marker.token = '' | |
| 494 marker = msg.from_progress_marker.add() | |
| 495 marker.data_type_id = theme.number | |
| 496 marker.token = pickle.dumps((15413, 1)) | |
| 497 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 498 sieve.CheckMigrationState() | |
| 499 response = sync_pb2.GetUpdatesResponse() | |
| 500 sieve.SaveProgress(15412, response) # There were updates | |
| 501 self.assertEqual(1, len(response.new_progress_marker)) | |
| 502 self.assertFalse(response.HasField('new_timestamp')) | |
| 503 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 504 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill) | |
| 505 self.assertEqual(pickle.loads(marker.token), (15412, 3)) | |
| 506 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 507 msg = sync_pb2.GetUpdatesMessage() | |
| 508 marker = msg.from_progress_marker.add() | |
| 509 marker.data_type_id = autofill.number | |
| 510 marker.token = pickle.dumps((15412, 3)) | |
| 511 marker = msg.from_progress_marker.add() | |
| 512 marker.data_type_id = theme.number | |
| 513 marker.token = pickle.dumps((15413, 1)) | |
| 514 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 515 sieve.CheckMigrationState() | |
| 516 | |
| 517 migrator.Bump([chromiumsync.THEME, chromiumsync.AUTOFILL]) # v=4 | |
| 518 migrator.Bump([chromiumsync.AUTOFILL]) # v=5 | |
| 519 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 520 try: | |
| 521 sieve.CheckMigrationState() | |
| 522 self.fail("Should have raised.") | |
| 523 except chromiumsync.MigrationDoneError, error: | |
| 524 # We want this to happen. | |
| 525 self.assertEqual(set([chromiumsync.THEME, chromiumsync.AUTOFILL]), | |
| 526 set(error.datatypes)) | |
| 527 msg = sync_pb2.GetUpdatesMessage() | |
| 528 marker = msg.from_progress_marker.add() | |
| 529 marker.data_type_id = autofill.number | |
| 530 marker.token = '' | |
| 531 marker = msg.from_progress_marker.add() | |
| 532 marker.data_type_id = theme.number | |
| 533 marker.token = pickle.dumps((15413, 1)) | |
| 534 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 535 try: | |
| 536 sieve.CheckMigrationState() | |
| 537 self.fail("Should have raised.") | |
| 538 except chromiumsync.MigrationDoneError, error: | |
| 539 # We want this to happen. | |
| 540 self.assertEqual([chromiumsync.THEME], error.datatypes) | |
| 541 | |
| 542 msg = sync_pb2.GetUpdatesMessage() | |
| 543 marker = msg.from_progress_marker.add() | |
| 544 marker.data_type_id = autofill.number | |
| 545 marker.token = '' | |
| 546 marker = msg.from_progress_marker.add() | |
| 547 marker.data_type_id = theme.number | |
| 548 marker.token = '' | |
| 549 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 550 sieve.CheckMigrationState() | |
| 551 response = sync_pb2.GetUpdatesResponse() | |
| 552 sieve.SaveProgress(15412, response) # There were updates | |
| 553 self.assertEqual(2, len(response.new_progress_marker)) | |
| 554 self.assertFalse(response.HasField('new_timestamp')) | |
| 555 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 556 marker = self.FindMarkerByNumber(response.new_progress_marker, autofill) | |
| 557 self.assertEqual(pickle.loads(marker.token), (15412, 5)) | |
| 558 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 559 marker = self.FindMarkerByNumber(response.new_progress_marker, theme) | |
| 560 self.assertEqual(pickle.loads(marker.token), (15412, 4)) | |
| 561 self.assertFalse(marker.HasField('timestamp_token_for_migration')) | |
| 562 msg = sync_pb2.GetUpdatesMessage() | |
| 563 marker = msg.from_progress_marker.add() | |
| 564 marker.data_type_id = autofill.number | |
| 565 marker.token = pickle.dumps((15412, 5)) | |
| 566 marker = msg.from_progress_marker.add() | |
| 567 marker.data_type_id = theme.number | |
| 568 marker.token = pickle.dumps((15413, 4)) | |
| 569 sieve = chromiumsync.UpdateSieve(msg, migrator) | |
| 570 sieve.CheckMigrationState() | |
| 571 | |
| 572 def testCreateSyncedBookmaks(self): | |
| 573 version1, changes, remaining = ( | |
| 574 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], 0)) | |
| 575 id_string = self.model._MakeCurrentId(chromiumsync.BOOKMARK, | |
| 576 '<server tag>synced_bookmarks') | |
| 577 self.assertFalse(self.model._ItemExists(id_string)) | |
| 578 self._expect_synced_bookmarks_folder = True | |
| 579 self.model.TriggerCreateSyncedBookmarks() | |
| 580 self.assertTrue(self.model._ItemExists(id_string)) | |
| 581 | |
| 582 # Check that the version changed when the folder was created and the only | |
| 583 # change was the folder creation. | |
| 584 version2, changes, remaining = ( | |
| 585 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], version1)) | |
| 586 self.assertEqual(len(changes), 1) | |
| 587 self.assertEqual(changes[0].id_string, id_string) | |
| 588 self.assertNotEqual(version1, version2) | |
| 589 self.assertEqual( | |
| 590 self.ExpectedPermanentItemCount(chromiumsync.BOOKMARK), | |
| 591 version2) | |
| 592 | |
| 593 # Ensure getting from timestamp 0 includes the folder. | |
| 594 version, changes, remaining = ( | |
| 595 self.GetChangesFromTimestamp([chromiumsync.BOOKMARK], 0)) | |
| 596 self.assertEqual( | |
| 597 self.ExpectedPermanentItemCount(chromiumsync.BOOKMARK), | |
| 598 len(changes)) | |
| 599 self.assertEqual(version2, version) | |
| 600 | |
| 601 def testGetKey(self): | |
| 602 [key1] = self.model.GetKeystoreKeys() | |
| 603 [key2] = self.model.GetKeystoreKeys() | |
| 604 self.assertTrue(len(key1)) | |
| 605 self.assertEqual(key1, key2) | |
| 606 | |
| 607 # Trigger the rotation. A subsequent GetUpdates should return the nigori | |
| 608 # node (whose timestamp was bumped by the rotation). | |
| 609 version1, changes, remaining = ( | |
| 610 self.GetChangesFromTimestamp([chromiumsync.NIGORI], 0)) | |
| 611 self.model.TriggerRotateKeystoreKeys() | |
| 612 version2, changes, remaining = ( | |
| 613 self.GetChangesFromTimestamp([chromiumsync.NIGORI], version1)) | |
| 614 self.assertNotEqual(version1, version2) | |
| 615 self.assertEquals(len(changes), 1) | |
| 616 self.assertEquals(changes[0].name, "Nigori") | |
| 617 | |
| 618 # The current keys should contain the old keys, with the new key appended. | |
| 619 [key1, key3] = self.model.GetKeystoreKeys() | |
| 620 self.assertEquals(key1, key2) | |
| 621 self.assertNotEqual(key1, key3) | |
| 622 self.assertTrue(len(key3) > 0) | |
| 623 | |
| 624 def testTriggerEnableKeystoreEncryption(self): | |
| 625 version1, changes, remaining = ( | |
| 626 self.GetChangesFromTimestamp([chromiumsync.EXPERIMENTS], 0)) | |
| 627 keystore_encryption_id_string = ( | |
| 628 self.model._ClientTagToId( | |
| 629 chromiumsync.EXPERIMENTS, | |
| 630 chromiumsync.KEYSTORE_ENCRYPTION_EXPERIMENT_TAG)) | |
| 631 | |
| 632 self.assertFalse(self.model._ItemExists(keystore_encryption_id_string)) | |
| 633 self.model.TriggerEnableKeystoreEncryption() | |
| 634 self.assertTrue(self.model._ItemExists(keystore_encryption_id_string)) | |
| 635 | |
| 636 # The creation of the experiment should be downloaded on the next | |
| 637 # GetUpdates. | |
| 638 version2, changes, remaining = ( | |
| 639 self.GetChangesFromTimestamp([chromiumsync.EXPERIMENTS], version1)) | |
| 640 self.assertEqual(len(changes), 1) | |
| 641 self.assertEqual(changes[0].id_string, keystore_encryption_id_string) | |
| 642 self.assertNotEqual(version1, version2) | |
| 643 | |
| 644 # Verify the experiment was created properly and is enabled. | |
| 645 self.assertEqual(chromiumsync.KEYSTORE_ENCRYPTION_EXPERIMENT_TAG, | |
| 646 changes[0].client_defined_unique_tag) | |
| 647 self.assertTrue(changes[0].HasField("specifics")) | |
| 648 self.assertTrue(changes[0].specifics.HasField("experiments")) | |
| 649 self.assertTrue( | |
| 650 changes[0].specifics.experiments.HasField("keystore_encryption")) | |
| 651 self.assertTrue( | |
| 652 changes[0].specifics.experiments.keystore_encryption.enabled) | |
| 653 | |
| 654 if __name__ == '__main__': | |
| 655 unittest.main() | |
| OLD | NEW |