1581
1581
# All texts should be output.
1582
1582
self.assertEqual(set(keys), seen)
1584
def test_clear_cache(self):
1585
files = self.get_versionedfiles()
1584
1588
def test_construct(self):
1585
1589
"""Each parameterised test can be constructed on a transport."""
1586
1590
files = self.get_versionedfiles()
2435
2439
self.assertIdenticalVersionedFile(source, files)
2441
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2442
"""An out of order stream can either error or work."""
2444
raise TestNotApplicable('ancestry info only relevant with graph.')
2445
# Create a reasonably long chain of records based on each other, where
2446
# most will be deltas.
2447
source = self.get_versionedfiles('source')
2450
content = [('same same %d\n' % n) for n in range(500)]
2451
for letter in 'abcdefghijklmnopqrstuvwxyz':
2452
key = ('key-' + letter,)
2453
if self.key_length == 2:
2454
key = ('prefix',) + key
2455
content.append('content for ' + letter + '\n')
2456
source.add_lines(key, parents, content)
2459
# Create a stream of these records, excluding the first record that the
2460
# rest ultimately depend upon, and insert it into a new vf.
2462
for key in reversed(keys):
2463
streams.append(source.get_record_stream([key], 'unordered', False))
2464
deltas = chain(*streams[:-1])
2465
files = self.get_versionedfiles()
2467
files.insert_record_stream(deltas)
2468
except RevisionNotPresent:
2469
# Must not have corrupted the file.
2472
# Must only report either just the first key as a missing parent,
2473
# no key as missing (for nodelta scenarios).
2474
missing = set(files.get_missing_compression_parent_keys())
2475
missing.discard(keys[0])
2476
self.assertEqual(set(), missing)
2437
2478
def get_knit_delta_source(self):
2438
2479
"""Get a source that can produce a stream with knit delta records,
2439
2480
regardless of this test's scenario.