Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(233)

Side by Side Diff: net/tools/testserver/chromiumsync_test.py

Issue 1622012: Python sync server impl, for test (Closed)
Patch Set: Fixed gyp bug ( :) ) Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « net/tools/testserver/chromiumsync.py ('k') | net/tools/testserver/testserver.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/python2.4
2 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Tests exercising chromiumsync and SyncDataModel."""
7
8 import unittest
9
10 from google.protobuf import text_format
11
12 import chromiumsync
13 import sync_pb2
14
15 class SyncDataModelTest(unittest.TestCase):
16 def setUp(self):
17 self.model = chromiumsync.SyncDataModel()
18
19 def AddToModel(self, proto):
20 self.model._entries[proto.id_string] = proto
21
22 def testPermanentItemSpecs(self):
23 SPECS = chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS
24 # parent_tags must be declared before use.
25 declared_specs = set(['0'])
26 for spec in SPECS:
27 self.assertTrue(spec.parent_tag in declared_specs)
28 declared_specs.add(spec.tag)
29 # Every sync datatype should have a permanent folder associated with it.
30 unique_datatypes = set([x.sync_type for x in SPECS])
31 self.assertEqual(unique_datatypes,
32 set(chromiumsync.ALL_TYPES))
33
34 def testSaveEntry(self):
35 proto = sync_pb2.SyncEntity()
36 proto.id_string = 'abcd';
37 proto.version = 0;
38 self.assertFalse(self.model._ItemExists(proto.id_string))
39 self.model._SaveEntry(proto)
40 self.assertEqual(1, proto.version)
41 self.assertTrue(self.model._ItemExists(proto.id_string))
42 self.model._SaveEntry(proto)
43 self.assertEqual(2, proto.version)
44 proto.version = 0
45 self.assertTrue(self.model._ItemExists(proto.id_string))
46 self.assertEqual(2, self.model._entries[proto.id_string].version)
47
48 def testWritePosition(self):
49 def MakeProto(id_string, parent, position):
50 proto = sync_pb2.SyncEntity()
51 proto.id_string = id_string
52 proto.position_in_parent = position
53 proto.parent_id_string = parent
54 self.AddToModel(proto)
55
56 MakeProto('a', 'X', 1000)
57 MakeProto('b', 'X', 1800)
58 MakeProto('c', 'X', 2600)
59 MakeProto('a1', 'Z', 1007)
60 MakeProto('a2', 'Z', 1807)
61 MakeProto('a3', 'Z', 2607)
62 MakeProto('s', 'Y', 10000)
63
64 def AssertPositionResult(my_id, parent_id, prev_id, expected_position):
65 entry = sync_pb2.SyncEntity()
66 entry.id_string = my_id
67 self.model._WritePosition(entry, parent_id, prev_id)
68 self.assertEqual(expected_position, entry.position_in_parent)
69 self.assertEqual(parent_id, entry.parent_id_string)
70 self.assertFalse(entry.HasField('insert_after_item_id'))
71
72 AssertPositionResult('new', 'new_parent', '', 0)
73 AssertPositionResult('new', 'Y', '', 10000 - (2 ** 20))
74 AssertPositionResult('new', 'Y', 's', 10000 + (2 ** 20))
75 AssertPositionResult('s', 'Y', '', 10000)
76 AssertPositionResult('s', 'Y', 's', 10000)
77 AssertPositionResult('a1', 'Z', '', 1007)
78
79 AssertPositionResult('new', 'X', '', 1000 - (2 ** 20))
80 AssertPositionResult('new', 'X', 'a', 1100)
81 AssertPositionResult('new', 'X', 'b', 1900)
82 AssertPositionResult('new', 'X', 'c', 2600 + (2 ** 20))
83
84 AssertPositionResult('a1', 'X', '', 1000 - (2 ** 20))
85 AssertPositionResult('a1', 'X', 'a', 1100)
86 AssertPositionResult('a1', 'X', 'b', 1900)
87 AssertPositionResult('a1', 'X', 'c', 2600 + (2 ** 20))
88
89 AssertPositionResult('a', 'X', '', 1000)
90 AssertPositionResult('a', 'X', 'b', 1900)
91 AssertPositionResult('a', 'X', 'c', 2600 + (2 ** 20))
92
93 AssertPositionResult('b', 'X', '', 1000 - (2 ** 20))
94 AssertPositionResult('b', 'X', 'a', 1800)
95 AssertPositionResult('b', 'X', 'c', 2600 + (2 ** 20))
96
97 AssertPositionResult('c', 'X', '', 1000 - (2 ** 20))
98 AssertPositionResult('c', 'X', 'a', 1100)
99 AssertPositionResult('c', 'X', 'b', 2600)
100
101 def testCreatePermanentItems(self):
102 self.model._CreatePermanentItems(chromiumsync.ALL_TYPES)
103 self.assertEqual(len(chromiumsync.ALL_TYPES) + 2,
104 len(self.model._entries))
105
106 def ExpectedPermanentItemCount(self, sync_type):
107 if sync_type == chromiumsync.BOOKMARK:
108 return 4
109 elif sync_type == chromiumsync.TOP_LEVEL:
110 return 1
111 else:
112 return 2
113
114 def testGetChangesFromTimestampZeroForEachType(self):
115 for sync_type in chromiumsync.ALL_TYPES:
116 self.model = chromiumsync.SyncDataModel()
117 request_types = [sync_type, chromiumsync.TOP_LEVEL]
118
119 version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
120
121 expected_count = self.ExpectedPermanentItemCount(sync_type)
122 self.assertEqual(expected_count, version)
123 self.assertEqual(expected_count, len(changes))
124 self.assertEqual('google_chrome', changes[0].server_defined_unique_tag)
125 for change in changes:
126 self.assertTrue(change.HasField('server_defined_unique_tag'))
127 self.assertEqual(change.version, change.sync_timestamp)
128 self.assertTrue(change.version <= version)
129
130 # Test idempotence: another GetUpdates from ts=0 shouldn't recreate.
131 version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
132 self.assertEqual(expected_count, version)
133 self.assertEqual(expected_count, len(changes))
134
135 # Doing a wider GetUpdates from timestamp zero shouldn't recreate either.
136 new_version, changes = self.model.GetChangesFromTimestamp(
137 chromiumsync.ALL_TYPES, 0)
138 self.assertEqual(len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS),
139 new_version)
140 self.assertEqual(new_version, len(changes))
141 version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
142 self.assertEqual(new_version, version)
143 self.assertEqual(expected_count, len(changes))
144
145 def testBatchSize(self):
146 for sync_type in chromiumsync.ALL_TYPES[1:]:
147 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
148 self.model = chromiumsync.SyncDataModel()
149 request_types = [sync_type, chromiumsync.TOP_LEVEL]
150
151 for i in range(self.model._BATCH_SIZE*3):
152 entry = sync_pb2.SyncEntity()
153 entry.id_string = 'batch test %d' % i
154 entry.specifics.CopyFrom(specifics)
155 self.model._SaveEntry(entry)
156 version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
157 self.assertEqual(self.model._BATCH_SIZE, version)
158 version, changes = self.model.GetChangesFromTimestamp(request_types,
159 version)
160 self.assertEqual(self.model._BATCH_SIZE*2, version)
161 version, changes = self.model.GetChangesFromTimestamp(request_types,
162 version)
163 self.assertEqual(self.model._BATCH_SIZE*3, version)
164 expected_dingleberry = self.ExpectedPermanentItemCount(sync_type)
165 version, changes = self.model.GetChangesFromTimestamp(request_types,
166 version)
167 self.assertEqual(self.model._BATCH_SIZE*3 + expected_dingleberry,
168 version)
169
170 # Now delete a third of the items.
171 for i in xrange(self.model._BATCH_SIZE*3 - 1, 0, -3):
172 entry = sync_pb2.SyncEntity()
173 entry.id_string = 'batch test %d' % i
174 entry.deleted = True
175 self.model._SaveEntry(entry)
176
177 # The batch counts shouldn't change.
178 version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
179 self.assertEqual(self.model._BATCH_SIZE, len(changes))
180 version, changes = self.model.GetChangesFromTimestamp(request_types,
181 version)
182 self.assertEqual(self.model._BATCH_SIZE, len(changes))
183 version, changes = self.model.GetChangesFromTimestamp(request_types,
184 version)
185 self.assertEqual(self.model._BATCH_SIZE, len(changes))
186 expected_dingleberry = self.ExpectedPermanentItemCount(sync_type)
187 version, changes = self.model.GetChangesFromTimestamp(request_types,
188 version)
189 self.assertEqual(expected_dingleberry, len(changes))
190 self.assertEqual(self.model._BATCH_SIZE*4 + expected_dingleberry, version)
191
192 def testCommitEachDataType(self):
193 for sync_type in chromiumsync.ALL_TYPES[1:]:
194 specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
195 self.model = chromiumsync.SyncDataModel()
196 my_cache_guid = '112358132134'
197 parent = 'foobar'
198 commit_session = {}
199
200 # Start with a GetUpdates from timestamp 0, to populate permanent items.
201 original_version, original_changes = (
202 self.model.GetChangesFromTimestamp([sync_type], 0))
203
204 def DoCommit(original=None, id='', name=None, parent=None, prev=None):
205 proto = sync_pb2.SyncEntity()
206 if original is not None:
207 proto.version = original.version
208 proto.id_string = original.id_string
209 proto.parent_id_string = original.parent_id_string
210 proto.name = original.name
211 else:
212 proto.id_string = id
213 proto.version = 0
214 proto.specifics.CopyFrom(specifics)
215 if name is not None:
216 proto.name = name
217 if parent:
218 proto.parent_id_string = parent.id_string
219 if prev:
220 proto.insert_after_item_id = prev.id_string
221 else:
222 proto.insert_after_item_id = ''
223 proto.folder = True
224 proto.deleted = False
225 result = self.model.CommitEntry(proto, my_cache_guid, commit_session)
226 self.assertTrue(result)
227 return (proto, result)
228
229 # Commit a new item.
230 proto1, result1 = DoCommit(name='namae', id='Foo',
231 parent=original_changes[-1])
232 # Commit an item whose parent is another item (referenced via the
233 # pre-commit ID).
234 proto2, result2 = DoCommit(name='Secondo', id='Bar',
235 parent=proto1)
236 # Commit a sibling of the second item.
237 proto3, result3 = DoCommit(name='Third!', id='Baz',
238 parent=proto1, prev=proto2)
239
240 self.assertEqual(3, len(commit_session))
241 for p, r in [(proto1, result1), (proto2, result2), (proto3, result3)]:
242 self.assertNotEqual(r.id_string, p.id_string)
243 self.assertEqual(r.originator_client_item_id, p.id_string)
244 self.assertEqual(r.originator_cache_guid, my_cache_guid)
245 self.assertTrue(r is not self.model._entries[r.id_string],
246 "Commit result didn't make a defensive copy.")
247 self.assertTrue(p is not self.model._entries[r.id_string],
248 "Commit result didn't make a defensive copy.")
249 self.assertEqual(commit_session.get(p.id_string), r.id_string)
250 self.assertTrue(r.version > original_version)
251 self.assertEqual(result1.parent_id_string, proto1.parent_id_string)
252 self.assertEqual(result2.parent_id_string, result1.id_string)
253 version, changes = self.model.GetChangesFromTimestamp([sync_type],
254 original_version)
255 self.assertEqual(3, len(changes))
256 self.assertEqual(original_version + 3, version)
257 self.assertEqual([result1, result2, result3], changes)
258 for c in changes:
259 self.assertTrue(c is not self.model._entries[c.id_string],
260 "GetChanges didn't make a defensive copy.")
261 self.assertTrue(result2.position_in_parent < result3.position_in_parent)
262 self.assertEqual(0, result2.position_in_parent)
263
264 # Now update the items so that the second item is the parent of the
265 # first; with the first sandwiched between two new items (4 and 5).
266 # Do this in a new commit session, meaning we'll reference items from
267 # the first batch by their post-commit, server IDs.
268 commit_session = {}
269 old_cache_guid = my_cache_guid
270 my_cache_guid = 'A different GUID'
271 proto2b, result2b = DoCommit(original=result2,
272 parent=original_changes[-1])
273 proto4, result4 = DoCommit(id='ID4', name='Four',
274 parent=result2, prev=None)
275 proto1b, result1b = DoCommit(original=result1,
276 parent=result2, prev=proto4)
277 proto5, result5 = DoCommit(id='ID5', name='Five', parent=result2,
278 prev=result1)
279
280 self.assertEqual(2, len(commit_session),
281 'Only new items in second batch should be in the session')
282 for p, r, original in [(proto2b, result2b, proto2),
283 (proto4, result4, proto4),
284 (proto1b, result1b, proto1),
285 (proto5, result5, proto5)]:
286 self.assertEqual(r.originator_client_item_id, original.id_string)
287 if original is not p:
288 self.assertEqual(r.id_string, p.id_string,
289 'Ids should be stable after first commit')
290 self.assertEqual(r.originator_cache_guid, old_cache_guid)
291 else:
292 self.assertNotEqual(r.id_string, p.id_string)
293 self.assertEqual(r.originator_cache_guid, my_cache_guid)
294 self.assertEqual(commit_session.get(p.id_string), r.id_string)
295 self.assertTrue(r is not self.model._entries[r.id_string],
296 "Commit result didn't make a defensive copy.")
297 self.assertTrue(p is not self.model._entries[r.id_string],
298 "Commit didn't make a defensive copy.")
299 self.assertTrue(r.version > p.version)
300 version, changes = self.model.GetChangesFromTimestamp([sync_type],
301 original_version)
302 self.assertEqual(5, len(changes))
303 self.assertEqual(original_version + 7, version)
304 self.assertEqual([result3, result2b, result4, result1b, result5], changes)
305 for c in changes:
306 self.assertTrue(c is not self.model._entries[c.id_string],
307 "GetChanges didn't make a defensive copy.")
308 self.assertTrue(result4.parent_id_string ==
309 result1b.parent_id_string ==
310 result5.parent_id_string ==
311 result2b.id_string)
312 self.assertTrue(result4.position_in_parent <
313 result1b.position_in_parent <
314 result5.position_in_parent)
315
316 if __name__ == '__main__':
317 unittest.main()
OLDNEW
« no previous file with comments | « net/tools/testserver/chromiumsync.py ('k') | net/tools/testserver/testserver.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698