| OLD | NEW |
| (Empty) |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 """An implementation of the server side of the Chromium sync protocol. | |
| 6 | |
| 7 The details of the protocol are described mostly by comments in the protocol | |
| 8 buffer definition at chrome/browser/sync/protocol/sync.proto. | |
| 9 """ | |
| 10 | |
| 11 import base64 | |
| 12 import cgi | |
| 13 import copy | |
| 14 import google.protobuf.text_format | |
| 15 import hashlib | |
| 16 import operator | |
| 17 import pickle | |
| 18 import random | |
| 19 import string | |
| 20 import sys | |
| 21 import threading | |
| 22 import time | |
| 23 import urlparse | |
| 24 import uuid | |
| 25 | |
| 26 import app_list_specifics_pb2 | |
| 27 import app_notification_specifics_pb2 | |
| 28 import app_setting_specifics_pb2 | |
| 29 import app_specifics_pb2 | |
| 30 import arc_package_specifics_pb2 | |
| 31 import article_specifics_pb2 | |
| 32 import autofill_specifics_pb2 | |
| 33 import bookmark_specifics_pb2 | |
| 34 import client_commands_pb2 | |
| 35 import dictionary_specifics_pb2 | |
| 36 import get_updates_caller_info_pb2 | |
| 37 import extension_setting_specifics_pb2 | |
| 38 import extension_specifics_pb2 | |
| 39 import favicon_image_specifics_pb2 | |
| 40 import favicon_tracking_specifics_pb2 | |
| 41 import history_delete_directive_specifics_pb2 | |
| 42 import managed_user_setting_specifics_pb2 | |
| 43 import managed_user_specifics_pb2 | |
| 44 import managed_user_shared_setting_specifics_pb2 | |
| 45 import managed_user_whitelist_specifics_pb2 | |
| 46 import nigori_specifics_pb2 | |
| 47 import password_specifics_pb2 | |
| 48 import preference_specifics_pb2 | |
| 49 import priority_preference_specifics_pb2 | |
| 50 import search_engine_specifics_pb2 | |
| 51 import session_specifics_pb2 | |
| 52 import sync_pb2 | |
| 53 import sync_enums_pb2 | |
| 54 import synced_notification_app_info_specifics_pb2 | |
| 55 import synced_notification_specifics_pb2 | |
| 56 import theme_specifics_pb2 | |
| 57 import typed_url_specifics_pb2 | |
| 58 import wifi_credential_specifics_pb2 | |
| 59 | |
| 60 # An enumeration of the various kinds of data that can be synced. | |
| 61 # Over the wire, this enumeration is not used: a sync object's type is | |
| 62 # inferred by which EntitySpecifics field it has. But in the context | |
| 63 # of a program, it is useful to have an enumeration. | |
| 64 ALL_TYPES = ( | |
| 65 TOP_LEVEL, # The type of the 'Google Chrome' folder. | |
| 66 APPS, | |
| 67 APP_LIST, | |
| 68 APP_NOTIFICATION, | |
| 69 APP_SETTINGS, | |
| 70 ARC_PACKAGE, | |
| 71 ARTICLE, | |
| 72 AUTOFILL, | |
| 73 AUTOFILL_PROFILE, | |
| 74 AUTOFILL_WALLET, | |
| 75 AUTOFILL_WALLET_METADATA, | |
| 76 BOOKMARK, | |
| 77 DEVICE_INFO, | |
| 78 DICTIONARY, | |
| 79 EXPERIMENTS, | |
| 80 EXTENSIONS, | |
| 81 HISTORY_DELETE_DIRECTIVE, | |
| 82 MANAGED_USER_SETTING, | |
| 83 MANAGED_USER_SHARED_SETTING, | |
| 84 MANAGED_USER_WHITELIST, | |
| 85 MANAGED_USER, | |
| 86 NIGORI, | |
| 87 PASSWORD, | |
| 88 PREFERENCE, | |
| 89 PRIORITY_PREFERENCE, | |
| 90 SEARCH_ENGINE, | |
| 91 SESSION, | |
| 92 SYNCED_NOTIFICATION, | |
| 93 SYNCED_NOTIFICATION_APP_INFO, | |
| 94 THEME, | |
| 95 TYPED_URL, | |
| 96 EXTENSION_SETTINGS, | |
| 97 FAVICON_IMAGES, | |
| 98 FAVICON_TRACKING, | |
| 99 WIFI_CREDENTIAL) = range(35) | |
| 100 | |
| 101 # An enumeration on the frequency at which the server should send errors | |
| 102 # to the client. This would be specified by the url that triggers the error. | |
| 103 # Note: This enum should be kept in the same order as the enum in sync_test.h. | |
| 104 SYNC_ERROR_FREQUENCY = ( | |
| 105 ERROR_FREQUENCY_NONE, | |
| 106 ERROR_FREQUENCY_ALWAYS, | |
| 107 ERROR_FREQUENCY_TWO_THIRDS) = range(3) | |
| 108 | |
| 109 # Well-known server tag of the top level 'Google Chrome' folder. | |
| 110 TOP_LEVEL_FOLDER_TAG = 'google_chrome' | |
| 111 | |
| 112 # Given a sync type from ALL_TYPES, find the FieldDescriptor corresponding | |
| 113 # to that datatype. Note that TOP_LEVEL has no such token. | |
| 114 SYNC_TYPE_FIELDS = sync_pb2.EntitySpecifics.DESCRIPTOR.fields_by_name | |
| 115 SYNC_TYPE_TO_DESCRIPTOR = { | |
| 116 APP_LIST: SYNC_TYPE_FIELDS['app_list'], | |
| 117 APP_NOTIFICATION: SYNC_TYPE_FIELDS['app_notification'], | |
| 118 APP_SETTINGS: SYNC_TYPE_FIELDS['app_setting'], | |
| 119 APPS: SYNC_TYPE_FIELDS['app'], | |
| 120 ARC_PACKAGE: SYNC_TYPE_FIELDS['arc_package'], | |
| 121 ARTICLE: SYNC_TYPE_FIELDS['article'], | |
| 122 AUTOFILL: SYNC_TYPE_FIELDS['autofill'], | |
| 123 AUTOFILL_PROFILE: SYNC_TYPE_FIELDS['autofill_profile'], | |
| 124 AUTOFILL_WALLET: SYNC_TYPE_FIELDS['autofill_wallet'], | |
| 125 AUTOFILL_WALLET_METADATA: SYNC_TYPE_FIELDS['wallet_metadata'], | |
| 126 BOOKMARK: SYNC_TYPE_FIELDS['bookmark'], | |
| 127 DEVICE_INFO: SYNC_TYPE_FIELDS['device_info'], | |
| 128 DICTIONARY: SYNC_TYPE_FIELDS['dictionary'], | |
| 129 EXPERIMENTS: SYNC_TYPE_FIELDS['experiments'], | |
| 130 EXTENSION_SETTINGS: SYNC_TYPE_FIELDS['extension_setting'], | |
| 131 EXTENSIONS: SYNC_TYPE_FIELDS['extension'], | |
| 132 FAVICON_IMAGES: SYNC_TYPE_FIELDS['favicon_image'], | |
| 133 FAVICON_TRACKING: SYNC_TYPE_FIELDS['favicon_tracking'], | |
| 134 HISTORY_DELETE_DIRECTIVE: SYNC_TYPE_FIELDS['history_delete_directive'], | |
| 135 MANAGED_USER_SHARED_SETTING: | |
| 136 SYNC_TYPE_FIELDS['managed_user_shared_setting'], | |
| 137 MANAGED_USER_SETTING: SYNC_TYPE_FIELDS['managed_user_setting'], | |
| 138 MANAGED_USER_WHITELIST: SYNC_TYPE_FIELDS['managed_user_whitelist'], | |
| 139 MANAGED_USER: SYNC_TYPE_FIELDS['managed_user'], | |
| 140 NIGORI: SYNC_TYPE_FIELDS['nigori'], | |
| 141 PASSWORD: SYNC_TYPE_FIELDS['password'], | |
| 142 PREFERENCE: SYNC_TYPE_FIELDS['preference'], | |
| 143 PRIORITY_PREFERENCE: SYNC_TYPE_FIELDS['priority_preference'], | |
| 144 SEARCH_ENGINE: SYNC_TYPE_FIELDS['search_engine'], | |
| 145 SESSION: SYNC_TYPE_FIELDS['session'], | |
| 146 SYNCED_NOTIFICATION: SYNC_TYPE_FIELDS["synced_notification"], | |
| 147 SYNCED_NOTIFICATION_APP_INFO: | |
| 148 SYNC_TYPE_FIELDS["synced_notification_app_info"], | |
| 149 THEME: SYNC_TYPE_FIELDS['theme'], | |
| 150 TYPED_URL: SYNC_TYPE_FIELDS['typed_url'], | |
| 151 WIFI_CREDENTIAL: SYNC_TYPE_FIELDS["wifi_credential"], | |
| 152 } | |
| 153 | |
| 154 # The parent ID used to indicate a top-level node. | |
| 155 ROOT_ID = '0' | |
| 156 | |
| 157 # Unix time epoch +1 day in struct_time format. The tuple corresponds to | |
| 158 # UTC Thursday Jan 2 1970, 00:00:00, non-dst. | |
| 159 # We have to add one day after start of epoch, since in timezones with positive | |
| 160 # UTC offset time.mktime throws an OverflowError, | |
| 161 # rather then returning negative number. | |
| 162 FIRST_DAY_UNIX_TIME_EPOCH = (1970, 1, 2, 0, 0, 0, 4, 2, 0) | |
| 163 ONE_DAY_SECONDS = 60 * 60 * 24 | |
| 164 | |
| 165 # The number of characters in the server-generated encryption key. | |
| 166 KEYSTORE_KEY_LENGTH = 16 | |
| 167 | |
| 168 # The hashed client tags for some experiment nodes. | |
| 169 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG = "pis8ZRzh98/MKLtVEio2mr42LQA=" | |
| 170 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG = "Z1xgeh3QUBa50vdEPd8C/4c7jfE=" | |
| 171 | |
| 172 class Error(Exception): | |
| 173 """Error class for this module.""" | |
| 174 | |
| 175 | |
| 176 class ProtobufDataTypeFieldNotUnique(Error): | |
| 177 """An entry should not have more than one data type present.""" | |
| 178 | |
| 179 | |
| 180 class DataTypeIdNotRecognized(Error): | |
| 181 """The requested data type is not recognized.""" | |
| 182 | |
| 183 | |
| 184 class MigrationDoneError(Error): | |
| 185 """A server-side migration occurred; clients must re-sync some datatypes. | |
| 186 | |
| 187 Attributes: | |
| 188 datatypes: a list of the datatypes (python enum) needing migration. | |
| 189 """ | |
| 190 | |
| 191 def __init__(self, datatypes): | |
| 192 self.datatypes = datatypes | |
| 193 | |
| 194 | |
| 195 class StoreBirthdayError(Error): | |
| 196 """The client sent a birthday that doesn't correspond to this server.""" | |
| 197 | |
| 198 | |
| 199 class TransientError(Error): | |
| 200 """The client would be sent a transient error.""" | |
| 201 | |
| 202 | |
| 203 class SyncInducedError(Error): | |
| 204 """The client would be sent an error.""" | |
| 205 | |
| 206 | |
| 207 class InducedErrorFrequencyNotDefined(Error): | |
| 208 """The error frequency defined is not handled.""" | |
| 209 | |
| 210 | |
| 211 class ClientNotConnectedError(Error): | |
| 212 """The client is not connected to the server.""" | |
| 213 | |
| 214 | |
| 215 def GetEntryType(entry): | |
| 216 """Extract the sync type from a SyncEntry. | |
| 217 | |
| 218 Args: | |
| 219 entry: A SyncEntity protobuf object whose type to determine. | |
| 220 Returns: | |
| 221 An enum value from ALL_TYPES if the entry's type can be determined, or None | |
| 222 if the type cannot be determined. | |
| 223 Raises: | |
| 224 ProtobufDataTypeFieldNotUnique: More than one type was indicated by | |
| 225 the entry. | |
| 226 """ | |
| 227 if entry.server_defined_unique_tag == TOP_LEVEL_FOLDER_TAG: | |
| 228 return TOP_LEVEL | |
| 229 entry_types = GetEntryTypesFromSpecifics(entry.specifics) | |
| 230 if not entry_types: | |
| 231 return None | |
| 232 | |
| 233 # If there is more than one, either there's a bug, or else the caller | |
| 234 # should use GetEntryTypes. | |
| 235 if len(entry_types) > 1: | |
| 236 raise ProtobufDataTypeFieldNotUnique | |
| 237 return entry_types[0] | |
| 238 | |
| 239 | |
| 240 def GetEntryTypesFromSpecifics(specifics): | |
| 241 """Determine the sync types indicated by an EntitySpecifics's field(s). | |
| 242 | |
| 243 If the specifics have more than one recognized data type field (as commonly | |
| 244 happens with the requested_types field of GetUpdatesMessage), all types | |
| 245 will be returned. Callers must handle the possibility of the returned | |
| 246 value having more than one item. | |
| 247 | |
| 248 Args: | |
| 249 specifics: A EntitySpecifics protobuf message whose extensions to | |
| 250 enumerate. | |
| 251 Returns: | |
| 252 A list of the sync types (values from ALL_TYPES) associated with each | |
| 253 recognized extension of the specifics message. | |
| 254 """ | |
| 255 return [data_type for data_type, field_descriptor | |
| 256 in SYNC_TYPE_TO_DESCRIPTOR.iteritems() | |
| 257 if specifics.HasField(field_descriptor.name)] | |
| 258 | |
| 259 | |
| 260 def SyncTypeToProtocolDataTypeId(data_type): | |
| 261 """Convert from a sync type (python enum) to the protocol's data type id.""" | |
| 262 return SYNC_TYPE_TO_DESCRIPTOR[data_type].number | |
| 263 | |
| 264 | |
| 265 def ProtocolDataTypeIdToSyncType(protocol_data_type_id): | |
| 266 """Convert from the protocol's data type id to a sync type (python enum).""" | |
| 267 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems(): | |
| 268 if field_descriptor.number == protocol_data_type_id: | |
| 269 return data_type | |
| 270 raise DataTypeIdNotRecognized | |
| 271 | |
| 272 | |
| 273 def DataTypeStringToSyncTypeLoose(data_type_string): | |
| 274 """Converts a human-readable string to a sync type (python enum). | |
| 275 | |
| 276 Capitalization and pluralization don't matter; this function is appropriate | |
| 277 for values that might have been typed by a human being; e.g., command-line | |
| 278 flags or query parameters. | |
| 279 """ | |
| 280 if data_type_string.isdigit(): | |
| 281 return ProtocolDataTypeIdToSyncType(int(data_type_string)) | |
| 282 name = data_type_string.lower().rstrip('s') | |
| 283 for data_type, field_descriptor in SYNC_TYPE_TO_DESCRIPTOR.iteritems(): | |
| 284 if field_descriptor.name.lower().rstrip('s') == name: | |
| 285 return data_type | |
| 286 raise DataTypeIdNotRecognized | |
| 287 | |
| 288 | |
| 289 def MakeNewKeystoreKey(): | |
| 290 """Returns a new random keystore key.""" | |
| 291 return ''.join(random.choice(string.ascii_uppercase + string.digits) | |
| 292 for x in xrange(KEYSTORE_KEY_LENGTH)) | |
| 293 | |
| 294 | |
| 295 def SyncTypeToString(data_type): | |
| 296 """Formats a sync type enum (from ALL_TYPES) to a human-readable string.""" | |
| 297 return SYNC_TYPE_TO_DESCRIPTOR[data_type].name | |
| 298 | |
| 299 | |
| 300 def CallerInfoToString(caller_info_source): | |
| 301 """Formats a GetUpdatesSource enum value to a readable string.""" | |
| 302 return get_updates_caller_info_pb2.GetUpdatesCallerInfo \ | |
| 303 .DESCRIPTOR.enum_types_by_name['GetUpdatesSource'] \ | |
| 304 .values_by_number[caller_info_source].name | |
| 305 | |
| 306 | |
| 307 def ShortDatatypeListSummary(data_types): | |
| 308 """Formats compactly a list of sync types (python enums) for human eyes. | |
| 309 | |
| 310 This function is intended for use by logging. If the list of datatypes | |
| 311 contains almost all of the values, the return value will be expressed | |
| 312 in terms of the datatypes that aren't set. | |
| 313 """ | |
| 314 included = set(data_types) - set([TOP_LEVEL]) | |
| 315 if not included: | |
| 316 return 'nothing' | |
| 317 excluded = set(ALL_TYPES) - included - set([TOP_LEVEL]) | |
| 318 if not excluded: | |
| 319 return 'everything' | |
| 320 simple_text = '+'.join(sorted([SyncTypeToString(x) for x in included])) | |
| 321 all_but_text = 'all except %s' % ( | |
| 322 '+'.join(sorted([SyncTypeToString(x) for x in excluded]))) | |
| 323 if len(included) < len(excluded) or len(simple_text) <= len(all_but_text): | |
| 324 return simple_text | |
| 325 else: | |
| 326 return all_but_text | |
| 327 | |
| 328 | |
| 329 def GetDefaultEntitySpecifics(data_type): | |
| 330 """Get an EntitySpecifics having a sync type's default field value.""" | |
| 331 specifics = sync_pb2.EntitySpecifics() | |
| 332 if data_type in SYNC_TYPE_TO_DESCRIPTOR: | |
| 333 descriptor = SYNC_TYPE_TO_DESCRIPTOR[data_type] | |
| 334 getattr(specifics, descriptor.name).SetInParent() | |
| 335 return specifics | |
| 336 | |
| 337 | |
| 338 class PermanentItem(object): | |
| 339 """A specification of one server-created permanent item. | |
| 340 | |
| 341 Attributes: | |
| 342 tag: A known-to-the-client value that uniquely identifies a server-created | |
| 343 permanent item. | |
| 344 name: The human-readable display name for this item. | |
| 345 parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates | |
| 346 a top-level item. Otherwise, this must be the tag value of some other | |
| 347 server-created permanent item. | |
| 348 sync_type: A value from ALL_TYPES, giving the datatype of this permanent | |
| 349 item. This controls which types of client GetUpdates requests will | |
| 350 cause the permanent item to be created and returned. | |
| 351 create_by_default: Whether the permanent item is created at startup or not. | |
| 352 This value is set to True in the default case. Non-default permanent items | |
| 353 are those that are created only when a client explicitly tells the server | |
| 354 to do so. | |
| 355 """ | |
| 356 | |
| 357 def __init__(self, tag, name, parent_tag, sync_type, create_by_default=True): | |
| 358 self.tag = tag | |
| 359 self.name = name | |
| 360 self.parent_tag = parent_tag | |
| 361 self.sync_type = sync_type | |
| 362 self.create_by_default = create_by_default | |
| 363 | |
| 364 | |
| 365 class MigrationHistory(object): | |
| 366 """A record of the migration events associated with an account. | |
| 367 | |
| 368 Each migration event invalidates one or more datatypes on all clients | |
| 369 that had synced the datatype before the event. Such clients will continue | |
| 370 to receive MigrationDone errors until they throw away their progress and | |
| 371 re-sync that datatype from the beginning. | |
| 372 """ | |
| 373 def __init__(self): | |
| 374 self._migrations = {} | |
| 375 for datatype in ALL_TYPES: | |
| 376 self._migrations[datatype] = [1] | |
| 377 self._next_migration_version = 2 | |
| 378 | |
| 379 def GetLatestVersion(self, datatype): | |
| 380 return self._migrations[datatype][-1] | |
| 381 | |
| 382 def CheckAllCurrent(self, versions_map): | |
| 383 """Raises an error if any the provided versions are out of date. | |
| 384 | |
| 385 This function intentionally returns migrations in the order that they were | |
| 386 triggered. Doing it this way allows the client to queue up two migrations | |
| 387 in a row, so the second one is received while responding to the first. | |
| 388 | |
| 389 Arguments: | |
| 390 version_map: a map whose keys are datatypes and whose values are versions. | |
| 391 | |
| 392 Raises: | |
| 393 MigrationDoneError: if a mismatch is found. | |
| 394 """ | |
| 395 problems = {} | |
| 396 for datatype, client_migration in versions_map.iteritems(): | |
| 397 for server_migration in self._migrations[datatype]: | |
| 398 if client_migration < server_migration: | |
| 399 problems.setdefault(server_migration, []).append(datatype) | |
| 400 if problems: | |
| 401 raise MigrationDoneError(problems[min(problems.keys())]) | |
| 402 | |
| 403 def Bump(self, datatypes): | |
| 404 """Add a record of a migration, to cause errors on future requests.""" | |
| 405 for idx, datatype in enumerate(datatypes): | |
| 406 self._migrations[datatype].append(self._next_migration_version) | |
| 407 self._next_migration_version += 1 | |
| 408 | |
| 409 | |
| 410 class UpdateSieve(object): | |
| 411 """A filter to remove items the client has already seen.""" | |
| 412 def __init__(self, request, migration_history=None): | |
| 413 self._original_request = request | |
| 414 self._state = {} | |
| 415 self._migration_history = migration_history or MigrationHistory() | |
| 416 self._migration_versions_to_check = {} | |
| 417 if request.from_progress_marker: | |
| 418 for marker in request.from_progress_marker: | |
| 419 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id) | |
| 420 if marker.HasField('timestamp_token_for_migration'): | |
| 421 timestamp = marker.timestamp_token_for_migration | |
| 422 if timestamp: | |
| 423 self._migration_versions_to_check[data_type] = 1 | |
| 424 elif marker.token: | |
| 425 (timestamp, version) = pickle.loads(marker.token) | |
| 426 self._migration_versions_to_check[data_type] = version | |
| 427 elif marker.HasField('token'): | |
| 428 timestamp = 0 | |
| 429 else: | |
| 430 raise ValueError('No timestamp information in progress marker.') | |
| 431 data_type = ProtocolDataTypeIdToSyncType(marker.data_type_id) | |
| 432 self._state[data_type] = timestamp | |
| 433 elif request.HasField('from_timestamp'): | |
| 434 for data_type in GetEntryTypesFromSpecifics(request.requested_types): | |
| 435 self._state[data_type] = request.from_timestamp | |
| 436 self._migration_versions_to_check[data_type] = 1 | |
| 437 if self._state: | |
| 438 self._state[TOP_LEVEL] = min(self._state.itervalues()) | |
| 439 | |
| 440 def SummarizeRequest(self): | |
| 441 timestamps = {} | |
| 442 for data_type, timestamp in self._state.iteritems(): | |
| 443 if data_type == TOP_LEVEL: | |
| 444 continue | |
| 445 timestamps.setdefault(timestamp, []).append(data_type) | |
| 446 return ', '.join('<%s>@%d' % (ShortDatatypeListSummary(types), stamp) | |
| 447 for stamp, types in sorted(timestamps.iteritems())) | |
| 448 | |
| 449 def CheckMigrationState(self): | |
| 450 self._migration_history.CheckAllCurrent(self._migration_versions_to_check) | |
| 451 | |
| 452 def ClientWantsItem(self, item): | |
| 453 """Return true if the client hasn't already seen an item.""" | |
| 454 return self._state.get(GetEntryType(item), sys.maxint) < item.version | |
| 455 | |
| 456 def HasAnyTimestamp(self): | |
| 457 """Return true if at least one datatype was requested.""" | |
| 458 return bool(self._state) | |
| 459 | |
| 460 def GetMinTimestamp(self): | |
| 461 """Return true the smallest timestamp requested across all datatypes.""" | |
| 462 return min(self._state.itervalues()) | |
| 463 | |
| 464 def GetFirstTimeTypes(self): | |
| 465 """Return a list of datatypes requesting updates from timestamp zero.""" | |
| 466 return [datatype for datatype, timestamp in self._state.iteritems() | |
| 467 if timestamp == 0] | |
| 468 | |
| 469 def GetCreateMobileBookmarks(self): | |
| 470 """Return true if the client has requested to create the 'Mobile Bookmarks' | |
| 471 folder. | |
| 472 """ | |
| 473 return (self._original_request.HasField('create_mobile_bookmarks_folder') | |
| 474 and self._original_request.create_mobile_bookmarks_folder) | |
| 475 | |
| 476 def SaveProgress(self, new_timestamp, get_updates_response): | |
| 477 """Write the new_timestamp or new_progress_marker fields to a response.""" | |
| 478 if self._original_request.from_progress_marker: | |
| 479 for data_type, old_timestamp in self._state.iteritems(): | |
| 480 if data_type == TOP_LEVEL: | |
| 481 continue | |
| 482 new_marker = sync_pb2.DataTypeProgressMarker() | |
| 483 new_marker.data_type_id = SyncTypeToProtocolDataTypeId(data_type) | |
| 484 final_stamp = max(old_timestamp, new_timestamp) | |
| 485 final_migration = self._migration_history.GetLatestVersion(data_type) | |
| 486 new_marker.token = pickle.dumps((final_stamp, final_migration)) | |
| 487 get_updates_response.new_progress_marker.add().MergeFrom(new_marker) | |
| 488 elif self._original_request.HasField('from_timestamp'): | |
| 489 if self._original_request.from_timestamp < new_timestamp: | |
| 490 get_updates_response.new_timestamp = new_timestamp | |
| 491 | |
| 492 | |
| 493 class SyncDataModel(object): | |
| 494 """Models the account state of one sync user.""" | |
| 495 _BATCH_SIZE = 100 | |
| 496 | |
| 497 # Specify all the permanent items that a model might need. | |
| 498 _PERMANENT_ITEM_SPECS = [ | |
| 499 PermanentItem('google_chrome_apps', name='Apps', | |
| 500 parent_tag=ROOT_ID, sync_type=APPS), | |
| 501 PermanentItem('google_chrome_app_list', name='App List', | |
| 502 parent_tag=ROOT_ID, sync_type=APP_LIST), | |
| 503 PermanentItem('google_chrome_app_notifications', name='App Notifications', | |
| 504 parent_tag=ROOT_ID, sync_type=APP_NOTIFICATION), | |
| 505 PermanentItem('google_chrome_app_settings', | |
| 506 name='App Settings', | |
| 507 parent_tag=ROOT_ID, sync_type=APP_SETTINGS), | |
| 508 PermanentItem('google_chrome_arc_package', name='Arc Package', | |
| 509 parent_tag=ROOT_ID, sync_type=ARC_PACKAGE), | |
| 510 PermanentItem('google_chrome_bookmarks', name='Bookmarks', | |
| 511 parent_tag=ROOT_ID, sync_type=BOOKMARK), | |
| 512 PermanentItem('bookmark_bar', name='Bookmark Bar', | |
| 513 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK), | |
| 514 PermanentItem('other_bookmarks', name='Other Bookmarks', | |
| 515 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK), | |
| 516 PermanentItem('synced_bookmarks', name='Synced Bookmarks', | |
| 517 parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK, | |
| 518 create_by_default=False), | |
| 519 PermanentItem('google_chrome_autofill', name='Autofill', | |
| 520 parent_tag=ROOT_ID, sync_type=AUTOFILL), | |
| 521 PermanentItem('google_chrome_autofill_profiles', name='Autofill Profiles', | |
| 522 parent_tag=ROOT_ID, sync_type=AUTOFILL_PROFILE), | |
| 523 PermanentItem('google_chrome_autofill_wallet', | |
| 524 name='Autofill Wallet Items', parent_tag=ROOT_ID, | |
| 525 sync_type=AUTOFILL_WALLET), | |
| 526 PermanentItem('google_chrome_autofill_wallet_metadata', | |
| 527 name='Autofill Wallet Metadata', parent_tag=ROOT_ID, | |
| 528 sync_type=AUTOFILL_WALLET_METADATA), | |
| 529 PermanentItem('google_chrome_device_info', name='Device Info', | |
| 530 parent_tag=ROOT_ID, sync_type=DEVICE_INFO), | |
| 531 PermanentItem('google_chrome_experiments', name='Experiments', | |
| 532 parent_tag=ROOT_ID, sync_type=EXPERIMENTS), | |
| 533 PermanentItem('google_chrome_extension_settings', | |
| 534 name='Extension Settings', | |
| 535 parent_tag=ROOT_ID, sync_type=EXTENSION_SETTINGS), | |
| 536 PermanentItem('google_chrome_extensions', name='Extensions', | |
| 537 parent_tag=ROOT_ID, sync_type=EXTENSIONS), | |
| 538 PermanentItem('google_chrome_history_delete_directives', | |
| 539 name='History Delete Directives', | |
| 540 parent_tag=ROOT_ID, | |
| 541 sync_type=HISTORY_DELETE_DIRECTIVE), | |
| 542 PermanentItem('google_chrome_favicon_images', | |
| 543 name='Favicon Images', | |
| 544 parent_tag=ROOT_ID, | |
| 545 sync_type=FAVICON_IMAGES), | |
| 546 PermanentItem('google_chrome_favicon_tracking', | |
| 547 name='Favicon Tracking', | |
| 548 parent_tag=ROOT_ID, | |
| 549 sync_type=FAVICON_TRACKING), | |
| 550 PermanentItem('google_chrome_managed_user_settings', | |
| 551 name='Managed User Settings', | |
| 552 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SETTING), | |
| 553 PermanentItem('google_chrome_managed_users', | |
| 554 name='Managed Users', | |
| 555 parent_tag=ROOT_ID, sync_type=MANAGED_USER), | |
| 556 PermanentItem('google_chrome_managed_user_shared_settings', | |
| 557 name='Managed User Shared Settings', | |
| 558 parent_tag=ROOT_ID, sync_type=MANAGED_USER_SHARED_SETTING), | |
| 559 PermanentItem('google_chrome_managed_user_whitelists', | |
| 560 name='Managed User Whitelists', parent_tag=ROOT_ID, | |
| 561 sync_type=MANAGED_USER_WHITELIST), | |
| 562 PermanentItem('google_chrome_nigori', name='Nigori', | |
| 563 parent_tag=ROOT_ID, sync_type=NIGORI), | |
| 564 PermanentItem('google_chrome_passwords', name='Passwords', | |
| 565 parent_tag=ROOT_ID, sync_type=PASSWORD), | |
| 566 PermanentItem('google_chrome_preferences', name='Preferences', | |
| 567 parent_tag=ROOT_ID, sync_type=PREFERENCE), | |
| 568 PermanentItem('google_chrome_priority_preferences', | |
| 569 name='Priority Preferences', | |
| 570 parent_tag=ROOT_ID, sync_type=PRIORITY_PREFERENCE), | |
| 571 PermanentItem('google_chrome_synced_notifications', | |
| 572 name='Synced Notifications', | |
| 573 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION), | |
| 574 PermanentItem('google_chrome_synced_notification_app_info', | |
| 575 name='Synced Notification App Info', | |
| 576 parent_tag=ROOT_ID, sync_type=SYNCED_NOTIFICATION_APP_INFO), | |
| 577 PermanentItem('google_chrome_search_engines', name='Search Engines', | |
| 578 parent_tag=ROOT_ID, sync_type=SEARCH_ENGINE), | |
| 579 PermanentItem('google_chrome_sessions', name='Sessions', | |
| 580 parent_tag=ROOT_ID, sync_type=SESSION), | |
| 581 PermanentItem('google_chrome_themes', name='Themes', | |
| 582 parent_tag=ROOT_ID, sync_type=THEME), | |
| 583 PermanentItem('google_chrome_typed_urls', name='Typed URLs', | |
| 584 parent_tag=ROOT_ID, sync_type=TYPED_URL), | |
| 585 PermanentItem('google_chrome_wifi_credentials', name='WiFi Credentials', | |
| 586 parent_tag=ROOT_ID, sync_type=WIFI_CREDENTIAL), | |
| 587 PermanentItem('google_chrome_dictionary', name='Dictionary', | |
| 588 parent_tag=ROOT_ID, sync_type=DICTIONARY), | |
| 589 PermanentItem('google_chrome_articles', name='Articles', | |
| 590 parent_tag=ROOT_ID, sync_type=ARTICLE), | |
| 591 ] | |
| 592 | |
| 593 def __init__(self): | |
| 594 # Monotonically increasing version number. The next object change will | |
| 595 # take on this value + 1. | |
| 596 self._version = 0 | |
| 597 | |
| 598 # The definitive copy of this client's items: a map from ID string to a | |
| 599 # SyncEntity protocol buffer. | |
| 600 self._entries = {} | |
| 601 | |
| 602 self.ResetStoreBirthday() | |
| 603 self.migration_history = MigrationHistory() | |
| 604 self.induced_error = sync_pb2.ClientToServerResponse.Error() | |
| 605 self.induced_error_frequency = 0 | |
| 606 self.sync_count_before_errors = 0 | |
| 607 self.acknowledge_managed_users = False | |
| 608 self._keys = [MakeNewKeystoreKey()] | |
| 609 | |
| 610 def _SaveEntry(self, entry): | |
| 611 """Insert or update an entry in the change log, and give it a new version. | |
| 612 | |
| 613 The ID fields of this entry are assumed to be valid server IDs. This | |
| 614 entry will be updated with a new version number and sync_timestamp. | |
| 615 | |
| 616 Args: | |
| 617 entry: The entry to be added or updated. | |
| 618 """ | |
| 619 self._version += 1 | |
| 620 # Maintain a global (rather than per-item) sequence number and use it | |
| 621 # both as the per-entry version as well as the update-progress timestamp. | |
| 622 # This simulates the behavior of the original server implementation. | |
| 623 entry.version = self._version | |
| 624 entry.sync_timestamp = self._version | |
| 625 | |
| 626 # Preserve the originator info, which the client is not required to send | |
| 627 # when updating. | |
| 628 base_entry = self._entries.get(entry.id_string) | |
| 629 if base_entry: | |
| 630 entry.originator_cache_guid = base_entry.originator_cache_guid | |
| 631 entry.originator_client_item_id = base_entry.originator_client_item_id | |
| 632 | |
| 633 self._entries[entry.id_string] = copy.deepcopy(entry) | |
| 634 | |
| 635 def _ServerTagToId(self, tag): | |
| 636 """Determine the server ID from a server-unique tag. | |
| 637 | |
| 638 The resulting value is guaranteed not to collide with the other ID | |
| 639 generation methods. | |
| 640 | |
| 641 Args: | |
| 642 tag: The unique, known-to-the-client tag of a server-generated item. | |
| 643 Returns: | |
| 644 The string value of the computed server ID. | |
| 645 """ | |
| 646 if not tag or tag == ROOT_ID: | |
| 647 return tag | |
| 648 spec = [x for x in self._PERMANENT_ITEM_SPECS if x.tag == tag][0] | |
| 649 return self._MakeCurrentId(spec.sync_type, '<server tag>%s' % tag) | |
| 650 | |
| 651 def _TypeToTypeRootId(self, model_type): | |
| 652 """Returns the server ID for the type root node of the given type.""" | |
| 653 tag = [x.tag for x in self._PERMANENT_ITEM_SPECS | |
| 654 if x.sync_type == model_type][0] | |
| 655 return self._ServerTagToId(tag) | |
| 656 | |
| 657 def _ClientTagToId(self, datatype, tag): | |
| 658 """Determine the server ID from a client-unique tag. | |
| 659 | |
| 660 The resulting value is guaranteed not to collide with the other ID | |
| 661 generation methods. | |
| 662 | |
| 663 Args: | |
| 664 datatype: The sync type (python enum) of the identified object. | |
| 665 tag: The unique, opaque-to-the-server tag of a client-tagged item. | |
| 666 Returns: | |
| 667 The string value of the computed server ID. | |
| 668 """ | |
| 669 return self._MakeCurrentId(datatype, '<client tag>%s' % tag) | |
| 670 | |
| 671 def _ClientIdToId(self, datatype, client_guid, client_item_id): | |
| 672 """Compute a unique server ID from a client-local ID tag. | |
| 673 | |
| 674 The resulting value is guaranteed not to collide with the other ID | |
| 675 generation methods. | |
| 676 | |
| 677 Args: | |
| 678 datatype: The sync type (python enum) of the identified object. | |
| 679 client_guid: A globally unique ID that identifies the client which | |
| 680 created this item. | |
| 681 client_item_id: An ID that uniquely identifies this item on the client | |
| 682 which created it. | |
| 683 Returns: | |
| 684 The string value of the computed server ID. | |
| 685 """ | |
| 686 # Using the client ID info is not required here (we could instead generate | |
| 687 # a random ID), but it's useful for debugging. | |
| 688 return self._MakeCurrentId(datatype, | |
| 689 '<server ID originally>%s/%s' % (client_guid, client_item_id)) | |
| 690 | |
| 691 def _MakeCurrentId(self, datatype, inner_id): | |
| 692 return '%d^%d^%s' % (datatype, | |
| 693 self.migration_history.GetLatestVersion(datatype), | |
| 694 inner_id) | |
| 695 | |
| 696 def _ExtractIdInfo(self, id_string): | |
| 697 if not id_string or id_string == ROOT_ID: | |
| 698 return None | |
| 699 datatype_string, separator, remainder = id_string.partition('^') | |
| 700 migration_version_string, separator, inner_id = remainder.partition('^') | |
| 701 return (int(datatype_string), int(migration_version_string), inner_id) | |
| 702 | |
| 703 def _WritePosition(self, entry, parent_id): | |
| 704 """Ensure the entry has an absolute, numeric position and parent_id. | |
| 705 | |
| 706 Historically, clients would specify positions using the predecessor-based | |
| 707 references in the insert_after_item_id field; starting July 2011, this | |
| 708 was changed and Chrome now sends up the absolute position. The server | |
| 709 must store a position_in_parent value and must not maintain | |
| 710 insert_after_item_id. | |
| 711 Starting in Jan 2013, the client will also send up a unique_position field | |
| 712 which should be saved and returned on subsequent GetUpdates. | |
| 713 | |
| 714 Args: | |
| 715 entry: The entry for which to write a position. Its ID field are | |
| 716 assumed to be server IDs. This entry will have its parent_id_string, | |
| 717 position_in_parent and unique_position fields updated; its | |
| 718 insert_after_item_id field will be cleared. | |
| 719 parent_id: The ID of the entry intended as the new parent. | |
| 720 """ | |
| 721 | |
| 722 entry.parent_id_string = parent_id | |
| 723 if not entry.HasField('position_in_parent'): | |
| 724 entry.position_in_parent = 1337 # A debuggable, distinctive default. | |
| 725 entry.ClearField('insert_after_item_id') | |
| 726 | |
| 727 def _ItemExists(self, id_string): | |
| 728 """Determine whether an item exists in the changelog.""" | |
| 729 return id_string in self._entries | |
| 730 | |
| 731 def _CreatePermanentItem(self, spec): | |
| 732 """Create one permanent item from its spec, if it doesn't exist. | |
| 733 | |
| 734 The resulting item is added to the changelog. | |
| 735 | |
| 736 Args: | |
| 737 spec: A PermanentItem object holding the properties of the item to create. | |
| 738 """ | |
| 739 id_string = self._ServerTagToId(spec.tag) | |
| 740 if self._ItemExists(id_string): | |
| 741 return | |
| 742 print 'Creating permanent item: %s' % spec.name | |
| 743 entry = sync_pb2.SyncEntity() | |
| 744 entry.id_string = id_string | |
| 745 entry.non_unique_name = spec.name | |
| 746 entry.name = spec.name | |
| 747 entry.server_defined_unique_tag = spec.tag | |
| 748 entry.folder = True | |
| 749 entry.deleted = False | |
| 750 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type)) | |
| 751 self._WritePosition(entry, self._ServerTagToId(spec.parent_tag)) | |
| 752 self._SaveEntry(entry) | |
| 753 | |
| 754 def _CreateDefaultPermanentItems(self, requested_types): | |
| 755 """Ensure creation of all default permanent items for a given set of types. | |
| 756 | |
| 757 Args: | |
| 758 requested_types: A list of sync data types from ALL_TYPES. | |
| 759 All default permanent items of only these types will be created. | |
| 760 """ | |
| 761 for spec in self._PERMANENT_ITEM_SPECS: | |
| 762 if spec.sync_type in requested_types and spec.create_by_default: | |
| 763 self._CreatePermanentItem(spec) | |
| 764 | |
| 765 def ResetStoreBirthday(self): | |
| 766 """Resets the store birthday to a random value.""" | |
| 767 # TODO(nick): uuid.uuid1() is better, but python 2.5 only. | |
| 768 self.store_birthday = '%0.30f' % random.random() | |
| 769 | |
| 770 def StoreBirthday(self): | |
| 771 """Gets the store birthday.""" | |
| 772 return self.store_birthday | |
| 773 | |
| 774 def GetChanges(self, sieve): | |
| 775 """Get entries which have changed, oldest first. | |
| 776 | |
| 777 The returned entries are limited to being _BATCH_SIZE many. The entries | |
| 778 are returned in strict version order. | |
| 779 | |
| 780 Args: | |
| 781 sieve: An update sieve to use to filter out updates the client | |
| 782 has already seen. | |
| 783 Returns: | |
| 784 A tuple of (version, entries, changes_remaining). Version is a new | |
| 785 timestamp value, which should be used as the starting point for the | |
| 786 next query. Entries is the batch of entries meeting the current | |
| 787 timestamp query. Changes_remaining indicates the number of changes | |
| 788 left on the server after this batch. | |
| 789 """ | |
| 790 if not sieve.HasAnyTimestamp(): | |
| 791 return (0, [], 0) | |
| 792 min_timestamp = sieve.GetMinTimestamp() | |
| 793 first_time_types = sieve.GetFirstTimeTypes() | |
| 794 self._CreateDefaultPermanentItems(first_time_types) | |
| 795 # Mobile bookmark folder is not created by default, create it only when | |
| 796 # client requested it. | |
| 797 if (sieve.GetCreateMobileBookmarks() and | |
| 798 first_time_types.count(BOOKMARK) > 0): | |
| 799 self.TriggerCreateSyncedBookmarks() | |
| 800 | |
| 801 self.TriggerAcknowledgeManagedUsers() | |
| 802 | |
| 803 change_log = sorted(self._entries.values(), | |
| 804 key=operator.attrgetter('version')) | |
| 805 new_changes = [x for x in change_log if x.version > min_timestamp] | |
| 806 # Pick batch_size new changes, and then filter them. This matches | |
| 807 # the RPC behavior of the production sync server. | |
| 808 batch = new_changes[:self._BATCH_SIZE] | |
| 809 if not batch: | |
| 810 # Client is up to date. | |
| 811 return (min_timestamp, [], 0) | |
| 812 | |
| 813 # Restrict batch to requested types. Tombstones are untyped | |
| 814 # and will always get included. | |
| 815 filtered = [copy.deepcopy(item) for item in batch | |
| 816 if item.deleted or sieve.ClientWantsItem(item)] | |
| 817 | |
| 818 # The new client timestamp is the timestamp of the last item in the | |
| 819 # batch, even if that item was filtered out. | |
| 820 return (batch[-1].version, filtered, len(new_changes) - len(batch)) | |
| 821 | |
| 822 def GetKeystoreKeys(self): | |
| 823 """Returns the encryption keys for this account.""" | |
| 824 print "Returning encryption keys: %s" % self._keys | |
| 825 return self._keys | |
| 826 | |
| 827 def _CopyOverImmutableFields(self, entry): | |
| 828 """Preserve immutable fields by copying pre-commit state. | |
| 829 | |
| 830 Args: | |
| 831 entry: A sync entity from the client. | |
| 832 """ | |
| 833 if entry.id_string in self._entries: | |
| 834 if self._entries[entry.id_string].HasField( | |
| 835 'server_defined_unique_tag'): | |
| 836 entry.server_defined_unique_tag = ( | |
| 837 self._entries[entry.id_string].server_defined_unique_tag) | |
| 838 | |
| 839 def _CheckVersionForCommit(self, entry): | |
| 840 """Perform an optimistic concurrency check on the version number. | |
| 841 | |
| 842 Clients are only allowed to commit if they report having seen the most | |
| 843 recent version of an object. | |
| 844 | |
| 845 Args: | |
| 846 entry: A sync entity from the client. It is assumed that ID fields | |
| 847 have been converted to server IDs. | |
| 848 Returns: | |
| 849 A boolean value indicating whether the client's version matches the | |
| 850 newest server version for the given entry. | |
| 851 """ | |
| 852 if entry.id_string in self._entries: | |
| 853 # Allow edits/deletes if the version matches, and any undeletion. | |
| 854 return (self._entries[entry.id_string].version == entry.version or | |
| 855 self._entries[entry.id_string].deleted) | |
| 856 else: | |
| 857 # Allow unknown ID only if the client thinks it's new too. | |
| 858 return entry.version == 0 | |
| 859 | |
| 860 def _CheckParentIdForCommit(self, entry): | |
| 861 """Check that the parent ID referenced in a SyncEntity actually exists. | |
| 862 | |
| 863 Args: | |
| 864 entry: A sync entity from the client. It is assumed that ID fields | |
| 865 have been converted to server IDs. | |
| 866 Returns: | |
| 867 A boolean value indicating whether the entity's parent ID is an object | |
| 868 that actually exists (and is not deleted) in the current account state. | |
| 869 """ | |
| 870 if entry.parent_id_string == ROOT_ID: | |
| 871 # This is generally allowed. | |
| 872 return True | |
| 873 if (not entry.HasField('parent_id_string') and | |
| 874 entry.HasField('client_defined_unique_tag')): | |
| 875 return True # Unique client tag items do not need to specify a parent. | |
| 876 if entry.parent_id_string not in self._entries: | |
| 877 print 'Warning: Client sent unknown ID. Should never happen.' | |
| 878 return False | |
| 879 if entry.parent_id_string == entry.id_string: | |
| 880 print 'Warning: Client sent circular reference. Should never happen.' | |
| 881 return False | |
| 882 if self._entries[entry.parent_id_string].deleted: | |
| 883 # This can happen in a race condition between two clients. | |
| 884 return False | |
| 885 if not self._entries[entry.parent_id_string].folder: | |
| 886 print 'Warning: Client sent non-folder parent. Should never happen.' | |
| 887 return False | |
| 888 return True | |
| 889 | |
| 890 def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session): | |
| 891 """Convert ID fields in a client sync entry to server IDs. | |
| 892 | |
| 893 A commit batch sent by a client may contain new items for which the | |
| 894 server has not generated IDs yet. And within a commit batch, later | |
| 895 items are allowed to refer to earlier items. This method will | |
| 896 generate server IDs for new items, as well as rewrite references | |
| 897 to items whose server IDs were generated earlier in the batch. | |
| 898 | |
| 899 Args: | |
| 900 entry: The client sync entry to modify. | |
| 901 cache_guid: The globally unique ID of the client that sent this | |
| 902 commit request. | |
| 903 commit_session: A dictionary mapping the original IDs to the new server | |
| 904 IDs, for any items committed earlier in the batch. | |
| 905 """ | |
| 906 if entry.version == 0: | |
| 907 data_type = GetEntryType(entry) | |
| 908 if entry.HasField('client_defined_unique_tag'): | |
| 909 # When present, this should determine the item's ID. | |
| 910 new_id = self._ClientTagToId(data_type, entry.client_defined_unique_tag) | |
| 911 else: | |
| 912 new_id = self._ClientIdToId(data_type, cache_guid, entry.id_string) | |
| 913 entry.originator_cache_guid = cache_guid | |
| 914 entry.originator_client_item_id = entry.id_string | |
| 915 commit_session[entry.id_string] = new_id # Remember the remapping. | |
| 916 entry.id_string = new_id | |
| 917 if entry.parent_id_string in commit_session: | |
| 918 entry.parent_id_string = commit_session[entry.parent_id_string] | |
| 919 if entry.insert_after_item_id in commit_session: | |
| 920 entry.insert_after_item_id = commit_session[entry.insert_after_item_id] | |
| 921 | |
| 922 def ValidateCommitEntries(self, entries): | |
| 923 """Raise an exception if a commit batch contains any global errors. | |
| 924 | |
| 925 Arguments: | |
| 926 entries: an iterable containing commit-form SyncEntity protocol buffers. | |
| 927 | |
| 928 Raises: | |
| 929 MigrationDoneError: if any of the entries reference a recently-migrated | |
| 930 datatype. | |
| 931 """ | |
| 932 server_ids_in_commit = set() | |
| 933 local_ids_in_commit = set() | |
| 934 for entry in entries: | |
| 935 if entry.version: | |
| 936 server_ids_in_commit.add(entry.id_string) | |
| 937 else: | |
| 938 local_ids_in_commit.add(entry.id_string) | |
| 939 if entry.HasField('parent_id_string'): | |
| 940 if entry.parent_id_string not in local_ids_in_commit: | |
| 941 server_ids_in_commit.add(entry.parent_id_string) | |
| 942 | |
| 943 versions_present = {} | |
| 944 for server_id in server_ids_in_commit: | |
| 945 parsed = self._ExtractIdInfo(server_id) | |
| 946 if parsed: | |
| 947 datatype, version, _ = parsed | |
| 948 versions_present.setdefault(datatype, []).append(version) | |
| 949 | |
| 950 self.migration_history.CheckAllCurrent( | |
| 951 dict((k, min(v)) for k, v in versions_present.iteritems())) | |
| 952 | |
| 953 def CommitEntry(self, entry, cache_guid, commit_session): | |
| 954 """Attempt to commit one entry to the user's account. | |
| 955 | |
| 956 Args: | |
| 957 entry: A SyncEntity protobuf representing desired object changes. | |
| 958 cache_guid: A string value uniquely identifying the client; this | |
| 959 is used for ID generation and will determine the originator_cache_guid | |
| 960 if the entry is new. | |
| 961 commit_session: A dictionary mapping client IDs to server IDs for any | |
| 962 objects committed earlier this session. If the entry gets a new ID | |
| 963 during commit, the change will be recorded here. | |
| 964 Returns: | |
| 965 A SyncEntity reflecting the post-commit value of the entry, or None | |
| 966 if the entry was not committed due to an error. | |
| 967 """ | |
| 968 entry = copy.deepcopy(entry) | |
| 969 | |
| 970 # Generate server IDs for this entry, and write generated server IDs | |
| 971 # from earlier entries into the message's fields, as appropriate. The | |
| 972 # ID generation state is stored in 'commit_session'. | |
| 973 self._RewriteIdsAsServerIds(entry, cache_guid, commit_session) | |
| 974 | |
| 975 # Sets the parent ID field for a client-tagged item. The client is allowed | |
| 976 # to not specify parents for these types of items. The server can figure | |
| 977 # out on its own what the parent ID for this entry should be. | |
| 978 self._RewriteParentIdForUniqueClientEntry(entry) | |
| 979 | |
| 980 # Perform the optimistic concurrency check on the entry's version number. | |
| 981 # Clients are not allowed to commit unless they indicate that they've seen | |
| 982 # the most recent version of an object. | |
| 983 if not self._CheckVersionForCommit(entry): | |
| 984 return None | |
| 985 | |
| 986 # Check the validity of the parent ID; it must exist at this point. | |
| 987 # TODO(nick): Implement cycle detection and resolution. | |
| 988 if not self._CheckParentIdForCommit(entry): | |
| 989 return None | |
| 990 | |
| 991 self._CopyOverImmutableFields(entry); | |
| 992 | |
| 993 # At this point, the commit is definitely going to happen. | |
| 994 | |
| 995 # Deletion works by storing a limited record for an entry, called a | |
| 996 # tombstone. A sync server must track deleted IDs forever, since it does | |
| 997 # not keep track of client knowledge (there's no deletion ACK event). | |
| 998 if entry.deleted: | |
| 999 def MakeTombstone(id_string, datatype): | |
| 1000 """Make a tombstone entry that will replace the entry being deleted. | |
| 1001 | |
| 1002 Args: | |
| 1003 id_string: Index of the SyncEntity to be deleted. | |
| 1004 Returns: | |
| 1005 A new SyncEntity reflecting the fact that the entry is deleted. | |
| 1006 """ | |
| 1007 # Only the ID, version and deletion state are preserved on a tombstone. | |
| 1008 tombstone = sync_pb2.SyncEntity() | |
| 1009 tombstone.id_string = id_string | |
| 1010 tombstone.deleted = True | |
| 1011 tombstone.name = '' | |
| 1012 tombstone.specifics.CopyFrom(GetDefaultEntitySpecifics(datatype)) | |
| 1013 return tombstone | |
| 1014 | |
| 1015 def IsChild(child_id): | |
| 1016 """Check if a SyncEntity is a child of entry, or any of its children. | |
| 1017 | |
| 1018 Args: | |
| 1019 child_id: Index of the SyncEntity that is a possible child of entry. | |
| 1020 Returns: | |
| 1021 True if it is a child; false otherwise. | |
| 1022 """ | |
| 1023 if child_id not in self._entries: | |
| 1024 return False | |
| 1025 if self._entries[child_id].parent_id_string == entry.id_string: | |
| 1026 return True | |
| 1027 return IsChild(self._entries[child_id].parent_id_string) | |
| 1028 | |
| 1029 # Identify any children entry might have. | |
| 1030 child_ids = [child.id_string for child in self._entries.itervalues() | |
| 1031 if IsChild(child.id_string)] | |
| 1032 | |
| 1033 # Mark all children that were identified as deleted. | |
| 1034 for child_id in child_ids: | |
| 1035 datatype = GetEntryType(self._entries[child_id]) | |
| 1036 self._SaveEntry(MakeTombstone(child_id, datatype)) | |
| 1037 | |
| 1038 # Delete entry itself. | |
| 1039 datatype = GetEntryType(self._entries[entry.id_string]) | |
| 1040 entry = MakeTombstone(entry.id_string, datatype) | |
| 1041 else: | |
| 1042 # Comments in sync.proto detail how the representation of positional | |
| 1043 # ordering works. | |
| 1044 # | |
| 1045 # We've almost fully deprecated the 'insert_after_item_id' field. | |
| 1046 # The 'position_in_parent' field is also deprecated, but as of Jan 2013 | |
| 1047 # is still in common use. The 'unique_position' field is the latest | |
| 1048 # and greatest in positioning technology. | |
| 1049 # | |
| 1050 # This server supports 'position_in_parent' and 'unique_position'. | |
| 1051 self._WritePosition(entry, entry.parent_id_string) | |
| 1052 | |
| 1053 # Preserve the originator info, which the client is not required to send | |
| 1054 # when updating. | |
| 1055 base_entry = self._entries.get(entry.id_string) | |
| 1056 if base_entry and not entry.HasField('originator_cache_guid'): | |
| 1057 entry.originator_cache_guid = base_entry.originator_cache_guid | |
| 1058 entry.originator_client_item_id = base_entry.originator_client_item_id | |
| 1059 | |
| 1060 # Store the current time since the Unix epoch in milliseconds. | |
| 1061 entry.mtime = (int((time.mktime(time.gmtime()) - | |
| 1062 (time.mktime(FIRST_DAY_UNIX_TIME_EPOCH) - ONE_DAY_SECONDS))*1000)) | |
| 1063 | |
| 1064 # Commit the change. This also updates the version number. | |
| 1065 self._SaveEntry(entry) | |
| 1066 return entry | |
| 1067 | |
| 1068 def _RewriteVersionInId(self, id_string): | |
| 1069 """Rewrites an ID so that its migration version becomes current.""" | |
| 1070 parsed_id = self._ExtractIdInfo(id_string) | |
| 1071 if not parsed_id: | |
| 1072 return id_string | |
| 1073 datatype, old_migration_version, inner_id = parsed_id | |
| 1074 return self._MakeCurrentId(datatype, inner_id) | |
| 1075 | |
| 1076 def _RewriteParentIdForUniqueClientEntry(self, entry): | |
| 1077 """Sets the entry's parent ID field to the appropriate value. | |
| 1078 | |
| 1079 The client must always set enough of the specifics of the entries it sends | |
| 1080 up such that the server can identify its type. (See crbug.com/373859) | |
| 1081 | |
| 1082 The client is under no obligation to set the parent ID field. The server | |
| 1083 can always infer what the appropriate parent for this model type should be. | |
| 1084 Having the client not send the parent ID is a step towards the removal of | |
| 1085 type root nodes. (See crbug.com/373869) | |
| 1086 | |
| 1087 This server implements these features by "faking" the existing of a parent | |
| 1088 ID early on in the commit processing. | |
| 1089 | |
| 1090 This function has no effect on non-client-tagged items. | |
| 1091 """ | |
| 1092 if not entry.HasField('client_defined_unique_tag'): | |
| 1093 return # Skip this processing for non-client-tagged types. | |
| 1094 data_type = GetEntryType(entry) | |
| 1095 entry.parent_id_string = self._TypeToTypeRootId(data_type) | |
| 1096 | |
| 1097 def TriggerMigration(self, datatypes): | |
| 1098 """Cause a migration to occur for a set of datatypes on this account. | |
| 1099 | |
| 1100 Clients will see the MIGRATION_DONE error for these datatypes until they | |
| 1101 resync them. | |
| 1102 """ | |
| 1103 versions_to_remap = self.migration_history.Bump(datatypes) | |
| 1104 all_entries = self._entries.values() | |
| 1105 self._entries.clear() | |
| 1106 for entry in all_entries: | |
| 1107 new_id = self._RewriteVersionInId(entry.id_string) | |
| 1108 entry.id_string = new_id | |
| 1109 if entry.HasField('parent_id_string'): | |
| 1110 entry.parent_id_string = self._RewriteVersionInId( | |
| 1111 entry.parent_id_string) | |
| 1112 self._entries[entry.id_string] = entry | |
| 1113 | |
| 1114 def TriggerSyncTabFavicons(self): | |
| 1115 """Set the 'sync_tab_favicons' field to this account's nigori node. | |
| 1116 | |
| 1117 If the field is not currently set, will write a new nigori node entry | |
| 1118 with the field set. Else does nothing. | |
| 1119 """ | |
| 1120 | |
| 1121 nigori_tag = "google_chrome_nigori" | |
| 1122 nigori_original = self._entries.get(self._ServerTagToId(nigori_tag)) | |
| 1123 if (nigori_original.specifics.nigori.sync_tab_favicons): | |
| 1124 return | |
| 1125 nigori_new = copy.deepcopy(nigori_original) | |
| 1126 nigori_new.specifics.nigori.sync_tabs = True | |
| 1127 self._SaveEntry(nigori_new) | |
| 1128 | |
| 1129 def TriggerCreateSyncedBookmarks(self): | |
| 1130 """Create the Synced Bookmarks folder under the Bookmarks permanent item. | |
| 1131 | |
| 1132 Clients will then receive the Synced Bookmarks folder on future | |
| 1133 GetUpdates, and new bookmarks can be added within the Synced Bookmarks | |
| 1134 folder. | |
| 1135 """ | |
| 1136 | |
| 1137 synced_bookmarks_spec, = [spec for spec in self._PERMANENT_ITEM_SPECS | |
| 1138 if spec.name == "Synced Bookmarks"] | |
| 1139 self._CreatePermanentItem(synced_bookmarks_spec) | |
| 1140 | |
| 1141 def TriggerEnableKeystoreEncryption(self): | |
| 1142 """Create the keystore_encryption experiment entity and enable it. | |
| 1143 | |
| 1144 A new entity within the EXPERIMENTS datatype is created with the unique | |
| 1145 client tag "keystore_encryption" if it doesn't already exist. The | |
| 1146 keystore_encryption message is then filled with |enabled| set to true. | |
| 1147 """ | |
| 1148 | |
| 1149 experiment_id = self._ServerTagToId("google_chrome_experiments") | |
| 1150 keystore_encryption_id = self._ClientTagToId( | |
| 1151 EXPERIMENTS, | |
| 1152 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG) | |
| 1153 keystore_entry = self._entries.get(keystore_encryption_id) | |
| 1154 if keystore_entry is None: | |
| 1155 keystore_entry = sync_pb2.SyncEntity() | |
| 1156 keystore_entry.id_string = keystore_encryption_id | |
| 1157 keystore_entry.name = "Keystore Encryption" | |
| 1158 keystore_entry.client_defined_unique_tag = ( | |
| 1159 KEYSTORE_ENCRYPTION_EXPERIMENT_TAG) | |
| 1160 keystore_entry.folder = False | |
| 1161 keystore_entry.deleted = False | |
| 1162 keystore_entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS)) | |
| 1163 self._WritePosition(keystore_entry, experiment_id) | |
| 1164 | |
| 1165 keystore_entry.specifics.experiments.keystore_encryption.enabled = True | |
| 1166 | |
| 1167 self._SaveEntry(keystore_entry) | |
| 1168 | |
| 1169 def TriggerRotateKeystoreKeys(self): | |
| 1170 """Rotate the current set of keystore encryption keys. | |
| 1171 | |
| 1172 |self._keys| will have a new random encryption key appended to it. We touch | |
| 1173 the nigori node so that each client will receive the new encryption keys | |
| 1174 only once. | |
| 1175 """ | |
| 1176 | |
| 1177 # Add a new encryption key. | |
| 1178 self._keys += [MakeNewKeystoreKey(), ] | |
| 1179 | |
| 1180 # Increment the nigori node's timestamp, so clients will get the new keys | |
| 1181 # on their next GetUpdates (any time the nigori node is sent back, we also | |
| 1182 # send back the keystore keys). | |
| 1183 nigori_tag = "google_chrome_nigori" | |
| 1184 self._SaveEntry(self._entries.get(self._ServerTagToId(nigori_tag))) | |
| 1185 | |
| 1186 def TriggerAcknowledgeManagedUsers(self): | |
| 1187 """Set the "acknowledged" flag for any managed user entities that don't have | |
| 1188 it set already. | |
| 1189 """ | |
| 1190 | |
| 1191 if not self.acknowledge_managed_users: | |
| 1192 return | |
| 1193 | |
| 1194 managed_users = [copy.deepcopy(entry) for entry in self._entries.values() | |
| 1195 if entry.specifics.HasField('managed_user') | |
| 1196 and not entry.specifics.managed_user.acknowledged] | |
| 1197 for user in managed_users: | |
| 1198 user.specifics.managed_user.acknowledged = True | |
| 1199 self._SaveEntry(user) | |
| 1200 | |
| 1201 def TriggerEnablePreCommitGetUpdateAvoidance(self): | |
| 1202 """Sets the experiment to enable pre-commit GetUpdate avoidance.""" | |
| 1203 experiment_id = self._ServerTagToId("google_chrome_experiments") | |
| 1204 pre_commit_gu_avoidance_id = self._ClientTagToId( | |
| 1205 EXPERIMENTS, | |
| 1206 PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG) | |
| 1207 entry = self._entries.get(pre_commit_gu_avoidance_id) | |
| 1208 if entry is None: | |
| 1209 entry = sync_pb2.SyncEntity() | |
| 1210 entry.id_string = pre_commit_gu_avoidance_id | |
| 1211 entry.name = "Pre-commit GU avoidance" | |
| 1212 entry.client_defined_unique_tag = PRE_COMMIT_GU_AVOIDANCE_EXPERIMENT_TAG | |
| 1213 entry.folder = False | |
| 1214 entry.deleted = False | |
| 1215 entry.specifics.CopyFrom(GetDefaultEntitySpecifics(EXPERIMENTS)) | |
| 1216 self._WritePosition(entry, experiment_id) | |
| 1217 entry.specifics.experiments.pre_commit_update_avoidance.enabled = True | |
| 1218 self._SaveEntry(entry) | |
| 1219 | |
| 1220 def SetInducedError(self, error, error_frequency, | |
| 1221 sync_count_before_errors): | |
| 1222 self.induced_error = error | |
| 1223 self.induced_error_frequency = error_frequency | |
| 1224 self.sync_count_before_errors = sync_count_before_errors | |
| 1225 | |
| 1226 def GetInducedError(self): | |
| 1227 return self.induced_error | |
| 1228 | |
| 1229 def _GetNextVersionNumber(self): | |
| 1230 """Set the version to one more than the greatest version number seen.""" | |
| 1231 entries = sorted(self._entries.values(), key=operator.attrgetter('version')) | |
| 1232 if len(entries) < 1: | |
| 1233 raise ClientNotConnectedError | |
| 1234 return entries[-1].version + 1 | |
| 1235 | |
| 1236 | |
| 1237 class TestServer(object): | |
| 1238 """An object to handle requests for one (and only one) Chrome Sync account. | |
| 1239 | |
| 1240 TestServer consumes the sync command messages that are the outermost | |
| 1241 layers of the protocol, performs the corresponding actions on its | |
| 1242 SyncDataModel, and constructs an appropriate response message. | |
| 1243 """ | |
| 1244 | |
| 1245 def __init__(self): | |
| 1246 # The implementation supports exactly one account; its state is here. | |
| 1247 self.account = SyncDataModel() | |
| 1248 self.account_lock = threading.Lock() | |
| 1249 # Clients that have talked to us: a map from the full client ID | |
| 1250 # to its nickname. | |
| 1251 self.clients = {} | |
| 1252 self.client_name_generator = ('+' * times + chr(c) | |
| 1253 for times in xrange(0, sys.maxint) for c in xrange(ord('A'), ord('Z'))) | |
| 1254 self.transient_error = False | |
| 1255 self.sync_count = 0 | |
| 1256 # Gaia OAuth2 Token fields and their default values. | |
| 1257 self.response_code = 200 | |
| 1258 self.request_token = 'rt1' | |
| 1259 self.access_token = 'at1' | |
| 1260 self.expires_in = 3600 | |
| 1261 self.token_type = 'Bearer' | |
| 1262 # The ClientCommand to send back on each ServerToClientResponse. If set to | |
| 1263 # None, no ClientCommand should be sent. | |
| 1264 self._client_command = None | |
| 1265 | |
| 1266 | |
| 1267 def GetShortClientName(self, query): | |
| 1268 parsed = cgi.parse_qs(query[query.find('?')+1:]) | |
| 1269 client_id = parsed.get('client_id') | |
| 1270 if not client_id: | |
| 1271 return '?' | |
| 1272 client_id = client_id[0] | |
| 1273 if client_id not in self.clients: | |
| 1274 self.clients[client_id] = self.client_name_generator.next() | |
| 1275 return self.clients[client_id] | |
| 1276 | |
| 1277 def CheckStoreBirthday(self, request): | |
| 1278 """Raises StoreBirthdayError if the request's birthday is a mismatch.""" | |
| 1279 if not request.HasField('store_birthday'): | |
| 1280 return | |
| 1281 if self.account.StoreBirthday() != request.store_birthday: | |
| 1282 raise StoreBirthdayError | |
| 1283 | |
| 1284 def CheckTransientError(self): | |
| 1285 """Raises TransientError if transient_error variable is set.""" | |
| 1286 if self.transient_error: | |
| 1287 raise TransientError | |
| 1288 | |
| 1289 def CheckSendError(self): | |
| 1290 """Raises SyncInducedError if needed.""" | |
| 1291 if (self.account.induced_error.error_type != | |
| 1292 sync_enums_pb2.SyncEnums.UNKNOWN): | |
| 1293 # Always means return the given error for all requests. | |
| 1294 if self.account.induced_error_frequency == ERROR_FREQUENCY_ALWAYS: | |
| 1295 raise SyncInducedError | |
| 1296 # This means the FIRST 2 requests of every 3 requests | |
| 1297 # return an error. Don't switch the order of failures. There are | |
| 1298 # test cases that rely on the first 2 being the failure rather than | |
| 1299 # the last 2. | |
| 1300 elif (self.account.induced_error_frequency == | |
| 1301 ERROR_FREQUENCY_TWO_THIRDS): | |
| 1302 if (((self.sync_count - | |
| 1303 self.account.sync_count_before_errors) % 3) != 0): | |
| 1304 raise SyncInducedError | |
| 1305 else: | |
| 1306 raise InducedErrorFrequencyNotDefined | |
| 1307 | |
| 1308 def HandleMigrate(self, path): | |
| 1309 query = urlparse.urlparse(path)[4] | |
| 1310 code = 200 | |
| 1311 self.account_lock.acquire() | |
| 1312 try: | |
| 1313 datatypes = [DataTypeStringToSyncTypeLoose(x) | |
| 1314 for x in urlparse.parse_qs(query).get('type',[])] | |
| 1315 if datatypes: | |
| 1316 self.account.TriggerMigration(datatypes) | |
| 1317 response = 'Migrated datatypes %s' % ( | |
| 1318 ' and '.join(SyncTypeToString(x).upper() for x in datatypes)) | |
| 1319 else: | |
| 1320 response = 'Please specify one or more <i>type=name</i> parameters' | |
| 1321 code = 400 | |
| 1322 except DataTypeIdNotRecognized, error: | |
| 1323 response = 'Could not interpret datatype name' | |
| 1324 code = 400 | |
| 1325 finally: | |
| 1326 self.account_lock.release() | |
| 1327 return (code, '<html><title>Migration: %d</title><H1>%d %s</H1></html>' % | |
| 1328 (code, code, response)) | |
| 1329 | |
| 1330 def HandleSetInducedError(self, path): | |
| 1331 query = urlparse.urlparse(path)[4] | |
| 1332 self.account_lock.acquire() | |
| 1333 code = 200 | |
| 1334 response = 'Success' | |
| 1335 error = sync_pb2.ClientToServerResponse.Error() | |
| 1336 try: | |
| 1337 error_type = urlparse.parse_qs(query)['error'] | |
| 1338 action = urlparse.parse_qs(query)['action'] | |
| 1339 error.error_type = int(error_type[0]) | |
| 1340 error.action = int(action[0]) | |
| 1341 try: | |
| 1342 error.url = (urlparse.parse_qs(query)['url'])[0] | |
| 1343 except KeyError: | |
| 1344 error.url = '' | |
| 1345 try: | |
| 1346 error.error_description =( | |
| 1347 (urlparse.parse_qs(query)['error_description'])[0]) | |
| 1348 except KeyError: | |
| 1349 error.error_description = '' | |
| 1350 try: | |
| 1351 error_frequency = int((urlparse.parse_qs(query)['frequency'])[0]) | |
| 1352 except KeyError: | |
| 1353 error_frequency = ERROR_FREQUENCY_ALWAYS | |
| 1354 self.account.SetInducedError(error, error_frequency, self.sync_count) | |
| 1355 response = ('Error = %d, action = %d, url = %s, description = %s' % | |
| 1356 (error.error_type, error.action, | |
| 1357 error.url, | |
| 1358 error.error_description)) | |
| 1359 except error: | |
| 1360 response = 'Could not parse url' | |
| 1361 code = 400 | |
| 1362 finally: | |
| 1363 self.account_lock.release() | |
| 1364 return (code, '<html><title>SetError: %d</title><H1>%d %s</H1></html>' % | |
| 1365 (code, code, response)) | |
| 1366 | |
| 1367 def HandleCreateBirthdayError(self): | |
| 1368 self.account.ResetStoreBirthday() | |
| 1369 return ( | |
| 1370 200, | |
| 1371 '<html><title>Birthday error</title><H1>Birthday error</H1></html>') | |
| 1372 | |
| 1373 def HandleSetTransientError(self): | |
| 1374 self.transient_error = True | |
| 1375 return ( | |
| 1376 200, | |
| 1377 '<html><title>Transient error</title><H1>Transient error</H1></html>') | |
| 1378 | |
| 1379 def HandleSetSyncTabFavicons(self): | |
| 1380 """Set 'sync_tab_favicons' field of the nigori node for this account.""" | |
| 1381 self.account.TriggerSyncTabFavicons() | |
| 1382 return ( | |
| 1383 200, | |
| 1384 '<html><title>Tab Favicons</title><H1>Tab Favicons</H1></html>') | |
| 1385 | |
| 1386 def HandleCreateSyncedBookmarks(self): | |
| 1387 """Create the Synced Bookmarks folder under Bookmarks.""" | |
| 1388 self.account.TriggerCreateSyncedBookmarks() | |
| 1389 return ( | |
| 1390 200, | |
| 1391 '<html><title>Synced Bookmarks</title><H1>Synced Bookmarks</H1></html>') | |
| 1392 | |
| 1393 def HandleEnableKeystoreEncryption(self): | |
| 1394 """Enables the keystore encryption experiment.""" | |
| 1395 self.account.TriggerEnableKeystoreEncryption() | |
| 1396 return ( | |
| 1397 200, | |
| 1398 '<html><title>Enable Keystore Encryption</title>' | |
| 1399 '<H1>Enable Keystore Encryption</H1></html>') | |
| 1400 | |
| 1401 def HandleRotateKeystoreKeys(self): | |
| 1402 """Rotate the keystore encryption keys.""" | |
| 1403 self.account.TriggerRotateKeystoreKeys() | |
| 1404 return ( | |
| 1405 200, | |
| 1406 '<html><title>Rotate Keystore Keys</title>' | |
| 1407 '<H1>Rotate Keystore Keys</H1></html>') | |
| 1408 | |
| 1409 def HandleEnableManagedUserAcknowledgement(self): | |
| 1410 """Enable acknowledging newly created managed users.""" | |
| 1411 self.account.acknowledge_managed_users = True | |
| 1412 return ( | |
| 1413 200, | |
| 1414 '<html><title>Enable Managed User Acknowledgement</title>' | |
| 1415 '<h1>Enable Managed User Acknowledgement</h1></html>') | |
| 1416 | |
| 1417 def HandleEnablePreCommitGetUpdateAvoidance(self): | |
| 1418 """Enables the pre-commit GU avoidance experiment.""" | |
| 1419 self.account.TriggerEnablePreCommitGetUpdateAvoidance() | |
| 1420 return ( | |
| 1421 200, | |
| 1422 '<html><title>Enable pre-commit GU avoidance</title>' | |
| 1423 '<H1>Enable pre-commit GU avoidance</H1></html>') | |
| 1424 | |
| 1425 def HandleCommand(self, query, raw_request): | |
| 1426 """Decode and handle a sync command from a raw input of bytes. | |
| 1427 | |
| 1428 This is the main entry point for this class. It is safe to call this | |
| 1429 method from multiple threads. | |
| 1430 | |
| 1431 Args: | |
| 1432 raw_request: An iterable byte sequence to be interpreted as a sync | |
| 1433 protocol command. | |
| 1434 Returns: | |
| 1435 A tuple (response_code, raw_response); the first value is an HTTP | |
| 1436 result code, while the second value is a string of bytes which is the | |
| 1437 serialized reply to the command. | |
| 1438 """ | |
| 1439 self.account_lock.acquire() | |
| 1440 self.sync_count += 1 | |
| 1441 def print_context(direction): | |
| 1442 print '[Client %s %s %s.py]' % (self.GetShortClientName(query), direction, | |
| 1443 __name__), | |
| 1444 | |
| 1445 try: | |
| 1446 request = sync_pb2.ClientToServerMessage() | |
| 1447 request.MergeFromString(raw_request) | |
| 1448 contents = request.message_contents | |
| 1449 | |
| 1450 response = sync_pb2.ClientToServerResponse() | |
| 1451 response.error_code = sync_enums_pb2.SyncEnums.SUCCESS | |
| 1452 | |
| 1453 if self._client_command: | |
| 1454 response.client_command.CopyFrom(self._client_command) | |
| 1455 | |
| 1456 self.CheckStoreBirthday(request) | |
| 1457 response.store_birthday = self.account.store_birthday | |
| 1458 self.CheckTransientError() | |
| 1459 self.CheckSendError() | |
| 1460 | |
| 1461 print_context('->') | |
| 1462 | |
| 1463 if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE: | |
| 1464 print 'Authenticate' | |
| 1465 # We accept any authentication token, and support only one account. | |
| 1466 # TODO(nick): Mock out the GAIA authentication as well; hook up here. | |
| 1467 response.authenticate.user.email = 'syncjuser@chromium' | |
| 1468 response.authenticate.user.display_name = 'Sync J User' | |
| 1469 elif contents == sync_pb2.ClientToServerMessage.COMMIT: | |
| 1470 print 'Commit %d item(s)' % len(request.commit.entries) | |
| 1471 self.HandleCommit(request.commit, response.commit) | |
| 1472 elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES: | |
| 1473 print 'GetUpdates', | |
| 1474 self.HandleGetUpdates(request.get_updates, response.get_updates) | |
| 1475 print_context('<-') | |
| 1476 print '%d update(s)' % len(response.get_updates.entries) | |
| 1477 else: | |
| 1478 print 'Unrecognizable sync request!' | |
| 1479 return (400, None) # Bad request. | |
| 1480 return (200, response.SerializeToString()) | |
| 1481 except MigrationDoneError, error: | |
| 1482 print_context('<-') | |
| 1483 print 'MIGRATION_DONE: <%s>' % (ShortDatatypeListSummary(error.datatypes)) | |
| 1484 response = sync_pb2.ClientToServerResponse() | |
| 1485 response.store_birthday = self.account.store_birthday | |
| 1486 response.error_code = sync_enums_pb2.SyncEnums.MIGRATION_DONE | |
| 1487 response.migrated_data_type_id[:] = [ | |
| 1488 SyncTypeToProtocolDataTypeId(x) for x in error.datatypes] | |
| 1489 return (200, response.SerializeToString()) | |
| 1490 except StoreBirthdayError, error: | |
| 1491 print_context('<-') | |
| 1492 print 'NOT_MY_BIRTHDAY' | |
| 1493 response = sync_pb2.ClientToServerResponse() | |
| 1494 response.store_birthday = self.account.store_birthday | |
| 1495 response.error_code = sync_enums_pb2.SyncEnums.NOT_MY_BIRTHDAY | |
| 1496 return (200, response.SerializeToString()) | |
| 1497 except TransientError, error: | |
| 1498 ### This is deprecated now. Would be removed once test cases are removed. | |
| 1499 print_context('<-') | |
| 1500 print 'TRANSIENT_ERROR' | |
| 1501 response.store_birthday = self.account.store_birthday | |
| 1502 response.error_code = sync_enums_pb2.SyncEnums.TRANSIENT_ERROR | |
| 1503 return (200, response.SerializeToString()) | |
| 1504 except SyncInducedError, error: | |
| 1505 print_context('<-') | |
| 1506 print 'INDUCED_ERROR' | |
| 1507 response.store_birthday = self.account.store_birthday | |
| 1508 error = self.account.GetInducedError() | |
| 1509 response.error.error_type = error.error_type | |
| 1510 response.error.url = error.url | |
| 1511 response.error.error_description = error.error_description | |
| 1512 response.error.action = error.action | |
| 1513 return (200, response.SerializeToString()) | |
| 1514 finally: | |
| 1515 self.account_lock.release() | |
| 1516 | |
| 1517 def HandleCommit(self, commit_message, commit_response): | |
| 1518 """Respond to a Commit request by updating the user's account state. | |
| 1519 | |
| 1520 Commit attempts stop after the first error, returning a CONFLICT result | |
| 1521 for any unattempted entries. | |
| 1522 | |
| 1523 Args: | |
| 1524 commit_message: A sync_pb.CommitMessage protobuf holding the content | |
| 1525 of the client's request. | |
| 1526 commit_response: A sync_pb.CommitResponse protobuf into which a reply | |
| 1527 to the client request will be written. | |
| 1528 """ | |
| 1529 commit_response.SetInParent() | |
| 1530 batch_failure = False | |
| 1531 session = {} # Tracks ID renaming during the commit operation. | |
| 1532 guid = commit_message.cache_guid | |
| 1533 | |
| 1534 self.account.ValidateCommitEntries(commit_message.entries) | |
| 1535 | |
| 1536 for entry in commit_message.entries: | |
| 1537 server_entry = None | |
| 1538 if not batch_failure: | |
| 1539 # Try to commit the change to the account. | |
| 1540 server_entry = self.account.CommitEntry(entry, guid, session) | |
| 1541 | |
| 1542 # An entryresponse is returned in both success and failure cases. | |
| 1543 reply = commit_response.entryresponse.add() | |
| 1544 if not server_entry: | |
| 1545 reply.response_type = sync_pb2.CommitResponse.CONFLICT | |
| 1546 reply.error_message = 'Conflict.' | |
| 1547 batch_failure = True # One failure halts the batch. | |
| 1548 else: | |
| 1549 reply.response_type = sync_pb2.CommitResponse.SUCCESS | |
| 1550 # These are the properties that the server is allowed to override | |
| 1551 # during commit; the client wants to know their values at the end | |
| 1552 # of the operation. | |
| 1553 reply.id_string = server_entry.id_string | |
| 1554 if not server_entry.deleted: | |
| 1555 # Note: the production server doesn't actually send the | |
| 1556 # parent_id_string on commit responses, so we don't either. | |
| 1557 reply.position_in_parent = server_entry.position_in_parent | |
| 1558 reply.version = server_entry.version | |
| 1559 reply.name = server_entry.name | |
| 1560 reply.non_unique_name = server_entry.non_unique_name | |
| 1561 else: | |
| 1562 reply.version = entry.version + 1 | |
| 1563 | |
| 1564 def HandleGetUpdates(self, update_request, update_response): | |
| 1565 """Respond to a GetUpdates request by querying the user's account. | |
| 1566 | |
| 1567 Args: | |
| 1568 update_request: A sync_pb.GetUpdatesMessage protobuf holding the content | |
| 1569 of the client's request. | |
| 1570 update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply | |
| 1571 to the client request will be written. | |
| 1572 """ | |
| 1573 update_response.SetInParent() | |
| 1574 update_sieve = UpdateSieve(update_request, self.account.migration_history) | |
| 1575 | |
| 1576 print CallerInfoToString(update_request.caller_info.source), | |
| 1577 print update_sieve.SummarizeRequest() | |
| 1578 | |
| 1579 update_sieve.CheckMigrationState() | |
| 1580 | |
| 1581 new_timestamp, entries, remaining = self.account.GetChanges(update_sieve) | |
| 1582 | |
| 1583 update_response.changes_remaining = remaining | |
| 1584 sending_nigori_node = False | |
| 1585 for entry in entries: | |
| 1586 if entry.name == 'Nigori': | |
| 1587 sending_nigori_node = True | |
| 1588 reply = update_response.entries.add() | |
| 1589 reply.CopyFrom(entry) | |
| 1590 update_sieve.SaveProgress(new_timestamp, update_response) | |
| 1591 | |
| 1592 if update_request.need_encryption_key or sending_nigori_node: | |
| 1593 update_response.encryption_keys.extend(self.account.GetKeystoreKeys()) | |
| 1594 | |
| 1595 def HandleGetOauth2Token(self): | |
| 1596 return (int(self.response_code), | |
| 1597 '{\n' | |
| 1598 ' \"refresh_token\": \"' + self.request_token + '\",\n' | |
| 1599 ' \"access_token\": \"' + self.access_token + '\",\n' | |
| 1600 ' \"expires_in\": ' + str(self.expires_in) + ',\n' | |
| 1601 ' \"token_type\": \"' + self.token_type +'\"\n' | |
| 1602 '}') | |
| 1603 | |
| 1604 def HandleSetOauth2Token(self, response_code, request_token, access_token, | |
| 1605 expires_in, token_type): | |
| 1606 if response_code != 0: | |
| 1607 self.response_code = response_code | |
| 1608 if request_token != '': | |
| 1609 self.request_token = request_token | |
| 1610 if access_token != '': | |
| 1611 self.access_token = access_token | |
| 1612 if expires_in != 0: | |
| 1613 self.expires_in = expires_in | |
| 1614 if token_type != '': | |
| 1615 self.token_type = token_type | |
| 1616 | |
| 1617 return (200, | |
| 1618 '<html><title>Set OAuth2 Token</title>' | |
| 1619 '<H1>This server will now return the OAuth2 Token:</H1>' | |
| 1620 '<p>response_code: ' + str(self.response_code) + '</p>' | |
| 1621 '<p>request_token: ' + self.request_token + '</p>' | |
| 1622 '<p>access_token: ' + self.access_token + '</p>' | |
| 1623 '<p>expires_in: ' + str(self.expires_in) + '</p>' | |
| 1624 '<p>token_type: ' + self.token_type + '</p>' | |
| 1625 '</html>') | |
| 1626 | |
| 1627 def CustomizeClientCommand(self, sessions_commit_delay_seconds): | |
| 1628 """Customizes the value of the ClientCommand of ServerToClientResponse. | |
| 1629 | |
| 1630 Currently, this only allows for changing the sessions_commit_delay_seconds | |
| 1631 field. | |
| 1632 | |
| 1633 Args: | |
| 1634 sessions_commit_delay_seconds: The desired sync delay time for sessions. | |
| 1635 """ | |
| 1636 if not self._client_command: | |
| 1637 self._client_command = client_commands_pb2.ClientCommand() | |
| 1638 | |
| 1639 self._client_command.sessions_commit_delay_seconds = \ | |
| 1640 sessions_commit_delay_seconds | |
| 1641 return self._client_command | |
| OLD | NEW |