87
118
f = self.reopen_file(create=True)
121
def test_get_record_stream_empty(self):
122
"""get_record_stream is a replacement for get_data_stream."""
124
entries = f.get_record_stream([], 'unordered', False)
125
self.assertEqual([], list(entries))
127
def assertValidStorageKind(self, storage_kind):
128
"""Assert that storage_kind is a valid storage_kind."""
129
self.assertSubset([storage_kind],
130
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
131
'knit-ft', 'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
132
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'])
134
def capture_stream(self, f, entries, on_seen, parents):
135
"""Capture a stream for testing."""
136
for factory in entries:
138
self.assertValidStorageKind(factory.storage_kind)
139
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
140
self.assertEqual(parents[factory.key[0]], factory.parents)
141
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
144
def test_get_record_stream_interface(self):
145
"""Each item in a stream has to provide a regular interface."""
146
f, parents = get_diamond_vf(self.get_file())
147
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
150
self.capture_stream(f, entries, seen.add, parents)
151
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
154
def test_get_record_stream_interface_ordered(self):
155
"""Each item in a stream has to provide a regular interface."""
156
f, parents = get_diamond_vf(self.get_file())
157
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
158
'topological', False)
160
self.capture_stream(f, entries, seen.append, parents)
161
self.assertSubset([tuple(seen)],
163
(('base',), ('left',), ('right',), ('merged',)),
164
(('base',), ('right',), ('left',), ('merged',)),
167
def test_get_record_stream_interface_ordered_with_delta_closure(self):
168
"""Each item in a stream has to provide a regular interface."""
169
f, parents = get_diamond_vf(self.get_file())
170
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
173
for factory in entries:
174
seen.append(factory.key)
175
self.assertValidStorageKind(factory.storage_kind)
176
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
177
self.assertEqual(parents[factory.key[0]], factory.parents)
178
self.assertEqual(f.get_text(factory.key[0]),
179
factory.get_bytes_as('fulltext'))
180
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
182
self.assertSubset([tuple(seen)],
184
(('base',), ('left',), ('right',), ('merged',)),
185
(('base',), ('right',), ('left',), ('merged',)),
188
def test_get_record_stream_unknown_storage_kind_raises(self):
189
"""Asking for a storage kind that the stream cannot supply raises."""
190
f, parents = get_diamond_vf(self.get_file())
191
entries = f.get_record_stream(['merged', 'left', 'right', 'base'],
193
# We track the contents because we should be able to try, fail a
194
# particular kind and then ask for one that works and continue.
196
for factory in entries:
197
seen.add(factory.key)
198
self.assertValidStorageKind(factory.storage_kind)
199
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
200
self.assertEqual(parents[factory.key[0]], factory.parents)
201
# currently no stream emits mpdiff
202
self.assertRaises(errors.UnavailableRepresentation,
203
factory.get_bytes_as, 'mpdiff')
204
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
206
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
209
def test_get_record_stream_missing_records_are_absent(self):
210
f, parents = get_diamond_vf(self.get_file())
211
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
213
self.assertAbsentRecord(f, parents, entries)
214
entries = f.get_record_stream(['merged', 'left', 'right', 'or', 'base'],
215
'topological', False)
216
self.assertAbsentRecord(f, parents, entries)
218
def assertAbsentRecord(self, f, parents, entries):
219
"""Helper for test_get_record_stream_missing_records_are_absent."""
221
for factory in entries:
222
seen.add(factory.key)
223
if factory.key == ('or',):
224
self.assertEqual('absent', factory.storage_kind)
225
self.assertEqual(None, factory.sha1)
226
self.assertEqual(None, factory.parents)
228
self.assertValidStorageKind(factory.storage_kind)
229
self.assertEqual(f.get_sha1s([factory.key[0]])[0], factory.sha1)
230
self.assertEqual(parents[factory.key[0]], factory.parents)
231
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
234
set([('base',), ('left',), ('right',), ('merged',), ('or',)]),
237
def test_filter_absent_records(self):
238
"""Requested missing records can be filter trivially."""
239
f, parents = get_diamond_vf(self.get_file())
240
entries = f.get_record_stream(['merged', 'left', 'right', 'extra', 'base'],
243
self.capture_stream(f, versionedfile.filter_absent(entries), seen.add,
245
self.assertEqual(set([('base',), ('left',), ('right',), ('merged',)]),
248
def test_insert_record_stream_empty(self):
249
"""Inserting an empty record stream should work."""
252
f.insert_record_stream([])
254
def assertIdenticalVersionedFile(self, left, right):
255
"""Assert that left and right have the same contents."""
256
self.assertEqual(set(left.versions()), set(right.versions()))
257
self.assertEqual(left.get_parent_map(left.versions()),
258
right.get_parent_map(right.versions()))
259
for v in left.versions():
260
self.assertEqual(left.get_text(v), right.get_text(v))
262
def test_insert_record_stream_fulltexts(self):
263
"""Any file should accept a stream of fulltexts."""
265
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
266
create=True, get_scope=self.get_transaction)
267
source, _ = get_diamond_vf(weave_vf)
268
stream = source.get_record_stream(source.versions(), 'topological',
270
f.insert_record_stream(stream)
271
self.assertIdenticalVersionedFile(f, source)
273
def test_insert_record_stream_fulltexts_noeol(self):
274
"""Any file should accept a stream of fulltexts."""
276
weave_vf = WeaveFile('source', get_transport(self.get_url('.')),
277
create=True, get_scope=self.get_transaction)
278
source, _ = get_diamond_vf(weave_vf, trailing_eol=False)
279
stream = source.get_record_stream(source.versions(), 'topological',
281
f.insert_record_stream(stream)
282
self.assertIdenticalVersionedFile(f, source)
284
def test_insert_record_stream_annotated_knits(self):
285
"""Any file should accept a stream from plain knits."""
287
source = make_file_knit('source', get_transport(self.get_url('.')),
289
get_diamond_vf(source)
290
stream = source.get_record_stream(source.versions(), 'topological',
292
f.insert_record_stream(stream)
293
self.assertIdenticalVersionedFile(f, source)
295
def test_insert_record_stream_annotated_knits_noeol(self):
296
"""Any file should accept a stream from plain knits."""
298
source = make_file_knit('source', get_transport(self.get_url('.')),
300
get_diamond_vf(source, trailing_eol=False)
301
stream = source.get_record_stream(source.versions(), 'topological',
303
f.insert_record_stream(stream)
304
self.assertIdenticalVersionedFile(f, source)
306
def test_insert_record_stream_plain_knits(self):
307
"""Any file should accept a stream from plain knits."""
309
source = make_file_knit('source', get_transport(self.get_url('.')),
310
create=True, factory=KnitPlainFactory())
311
get_diamond_vf(source)
312
stream = source.get_record_stream(source.versions(), 'topological',
314
f.insert_record_stream(stream)
315
self.assertIdenticalVersionedFile(f, source)
317
def test_insert_record_stream_plain_knits_noeol(self):
318
"""Any file should accept a stream from plain knits."""
320
source = make_file_knit('source', get_transport(self.get_url('.')),
321
create=True, factory=KnitPlainFactory())
322
get_diamond_vf(source, trailing_eol=False)
323
stream = source.get_record_stream(source.versions(), 'topological',
325
f.insert_record_stream(stream)
326
self.assertIdenticalVersionedFile(f, source)
328
def test_insert_record_stream_existing_keys(self):
329
"""Inserting keys already in a file should not error."""
331
source = make_file_knit('source', get_transport(self.get_url('.')),
332
create=True, factory=KnitPlainFactory())
333
get_diamond_vf(source)
334
# insert some keys into f.
335
get_diamond_vf(f, left_only=True)
336
stream = source.get_record_stream(source.versions(), 'topological',
338
f.insert_record_stream(stream)
339
self.assertIdenticalVersionedFile(f, source)
341
def test_insert_record_stream_missing_keys(self):
342
"""Inserting a stream with absent keys should raise an error."""
344
source = make_file_knit('source', get_transport(self.get_url('.')),
345
create=True, factory=KnitPlainFactory())
346
stream = source.get_record_stream(['missing'], 'topological',
348
self.assertRaises(errors.RevisionNotPresent, f.insert_record_stream,
351
def test_insert_record_stream_out_of_order(self):
352
"""An out of order stream can either error or work."""
353
f, parents = get_diamond_vf(self.get_file())
354
origin_entries = f.get_record_stream(['origin'], 'unordered', False)
355
end_entries = f.get_record_stream(['merged', 'left'],
356
'topological', False)
357
start_entries = f.get_record_stream(['right', 'base'],
358
'topological', False)
359
entries = chain(origin_entries, end_entries, start_entries)
360
target = self.get_file('target')
362
target.insert_record_stream(entries)
363
except RevisionNotPresent:
364
# Must not have corrupted the file.
367
self.assertIdenticalVersionedFile(f, target)
369
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
370
"""Insertion where a needed basis is not included aborts safely."""
371
# Annotated source - deltas can be used in any knit.
372
source = make_file_knit('source', get_transport(self.get_url('.')),
374
get_diamond_vf(source)
375
entries = source.get_record_stream(['origin', 'merged'], 'unordered', False)
377
self.assertRaises(RevisionNotPresent, f.insert_record_stream, entries)
379
self.assertFalse(f.has_version('merged'))
90
381
def test_adds_with_parent_texts(self):
91
382
f = self.get_file()
1244
1537
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1245
1538
'xxx', '>>>>>>> ', 'bbb']
1541
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1543
def test_select_adaptor(self):
1544
"""Test expected adapters exist."""
1545
# One scenario for each lookup combination we expect to use.
1546
# Each is source_kind, requested_kind, adapter class
1548
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1549
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1550
('knit-annotated-delta-gz', 'knit-delta-gz',
1551
_mod_knit.DeltaAnnotatedToUnannotated),
1552
('knit-annotated-delta-gz', 'fulltext',
1553
_mod_knit.DeltaAnnotatedToFullText),
1554
('knit-annotated-ft-gz', 'knit-ft-gz',
1555
_mod_knit.FTAnnotatedToUnannotated),
1556
('knit-annotated-ft-gz', 'fulltext',
1557
_mod_knit.FTAnnotatedToFullText),
1559
for source, requested, klass in scenarios:
1560
adapter_factory = versionedfile.adapter_registry.get(
1561
(source, requested))
1562
adapter = adapter_factory(None)
1563
self.assertIsInstance(adapter, klass)
1565
def get_knit(self, annotated=True):
1567
factory = KnitAnnotateFactory()
1569
factory = KnitPlainFactory()
1570
return make_file_knit('knit', self.get_transport('.'), delta=True,
1571
create=True, factory=factory)
1573
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1574
"""Grab the interested adapted texts for tests."""
1575
# origin is a fulltext
1576
entries = f.get_record_stream(['origin'], 'unordered', False)
1577
base = entries.next()
1578
ft_data = ft_adapter.get_bytes(base, base.get_bytes_as(base.storage_kind))
1579
# merged is both a delta and multiple parents.
1580
entries = f.get_record_stream(['merged'], 'unordered', False)
1581
merged = entries.next()
1582
delta_data = delta_adapter.get_bytes(merged,
1583
merged.get_bytes_as(merged.storage_kind))
1584
return ft_data, delta_data
1586
def test_deannotation_noeol(self):
1587
"""Test converting annotated knits to unannotated knits."""
1588
# we need a full text, and a delta
1589
f, parents = get_diamond_vf(self.get_knit(), trailing_eol=False)
1590
ft_data, delta_data = self.helpGetBytes(f,
1591
_mod_knit.FTAnnotatedToUnannotated(None),
1592
_mod_knit.DeltaAnnotatedToUnannotated(None))
1594
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1597
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1599
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1600
'1,2,3\nleft\nright\nmerged\nend merged\n',
1601
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1603
def test_deannotation(self):
1604
"""Test converting annotated knits to unannotated knits."""
1605
# we need a full text, and a delta
1606
f, parents = get_diamond_vf(self.get_knit())
1607
ft_data, delta_data = self.helpGetBytes(f,
1608
_mod_knit.FTAnnotatedToUnannotated(None),
1609
_mod_knit.DeltaAnnotatedToUnannotated(None))
1611
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1614
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1616
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1617
'2,2,2\nright\nmerged\nend merged\n',
1618
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1620
def test_annotated_to_fulltext_no_eol(self):
1621
"""Test adapting annotated knits to full texts (for -> weaves)."""
1622
# we need a full text, and a delta
1623
f, parents = get_diamond_vf(self.get_knit(), trailing_eol=False)
1624
# Reconstructing a full text requires a backing versioned file, and it
1625
# must have the base lines requested from it.
1626
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1627
ft_data, delta_data = self.helpGetBytes(f,
1628
_mod_knit.FTAnnotatedToFullText(None),
1629
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1630
self.assertEqual('origin', ft_data)
1631
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1632
self.assertEqual([('get_lines', 'left')], logged_vf.calls)
1634
def test_annotated_to_fulltext(self):
1635
"""Test adapting annotated knits to full texts (for -> weaves)."""
1636
# we need a full text, and a delta
1637
f, parents = get_diamond_vf(self.get_knit())
1638
# Reconstructing a full text requires a backing versioned file, and it
1639
# must have the base lines requested from it.
1640
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1641
ft_data, delta_data = self.helpGetBytes(f,
1642
_mod_knit.FTAnnotatedToFullText(None),
1643
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1644
self.assertEqual('origin\n', ft_data)
1645
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1646
self.assertEqual([('get_lines', 'left')], logged_vf.calls)
1648
def test_unannotated_to_fulltext(self):
1649
"""Test adapting unannotated knits to full texts.
1651
This is used for -> weaves, and for -> annotated knits.
1653
# we need a full text, and a delta
1654
f, parents = get_diamond_vf(self.get_knit(annotated=False))
1655
# Reconstructing a full text requires a backing versioned file, and it
1656
# must have the base lines requested from it.
1657
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1658
ft_data, delta_data = self.helpGetBytes(f,
1659
_mod_knit.FTPlainToFullText(None),
1660
_mod_knit.DeltaPlainToFullText(logged_vf))
1661
self.assertEqual('origin\n', ft_data)
1662
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1663
self.assertEqual([('get_lines', 'left')], logged_vf.calls)
1665
def test_unannotated_to_fulltext_no_eol(self):
1666
"""Test adapting unannotated knits to full texts.
1668
This is used for -> weaves, and for -> annotated knits.
1670
# we need a full text, and a delta
1671
f, parents = get_diamond_vf(self.get_knit(annotated=False),
1673
# Reconstructing a full text requires a backing versioned file, and it
1674
# must have the base lines requested from it.
1675
logged_vf = versionedfile.RecordingVersionedFileDecorator(f)
1676
ft_data, delta_data = self.helpGetBytes(f,
1677
_mod_knit.FTPlainToFullText(None),
1678
_mod_knit.DeltaPlainToFullText(logged_vf))
1679
self.assertEqual('origin', ft_data)
1680
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1681
self.assertEqual([('get_lines', 'left')], logged_vf.calls)