~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_knit.py

  • Committer: Martin Pool
  • Date: 2009-07-01 07:22:00 UTC
  • mto: This revision was merged to the branch mainline in revision 4502.
  • Revision ID: mbp@sourcefrog.net-20090701072200-hh21x94g1g11dll7
ReadVFile copes if readv result isn't an iter; also better errors

Show diffs side-by-side

added added

removed removed

Lines of Context:
73
73
 
74
74
    def _probe(self):
75
75
        try:
76
 
            import bzrlib._knit_load_data_pyx
 
76
            import bzrlib._knit_load_data_c
77
77
        except ImportError:
78
78
            return False
79
79
        return True
80
80
 
81
81
    def feature_name(self):
82
 
        return 'bzrlib._knit_load_data_pyx'
 
82
        return 'bzrlib._knit_load_data_c'
83
83
 
84
84
CompiledKnitFeature = _CompiledKnitFeature()
85
85
 
366
366
        :return: (versioned_file, reload_counter)
367
367
            versioned_file  a KnitVersionedFiles using the packs for access
368
368
        """
369
 
        builder = self.make_branch_builder('.', format="1.9")
370
 
        builder.start_series()
371
 
        builder.build_snapshot('rev-1', None, [
372
 
            ('add', ('', 'root-id', 'directory', None)),
373
 
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
374
 
            ])
375
 
        builder.build_snapshot('rev-2', ['rev-1'], [
376
 
            ('modify', ('file-id', 'content\nrev 2\n')),
377
 
            ])
378
 
        builder.build_snapshot('rev-3', ['rev-2'], [
379
 
            ('modify', ('file-id', 'content\nrev 3\n')),
380
 
            ])
381
 
        builder.finish_series()
382
 
        b = builder.get_branch()
383
 
        b.lock_write()
384
 
        self.addCleanup(b.unlock)
 
369
        tree = self.make_branch_and_memory_tree('tree')
 
370
        tree.lock_write()
 
371
        self.addCleanup(tree.unlock)
 
372
        tree.add([''], ['root-id'])
 
373
        tree.commit('one', rev_id='rev-1')
 
374
        tree.commit('two', rev_id='rev-2')
 
375
        tree.commit('three', rev_id='rev-3')
385
376
        # Pack these three revisions into another pack file, but don't remove
386
377
        # the originals
387
 
        repo = b.repository
 
378
        repo = tree.branch.repository
388
379
        collection = repo._pack_collection
389
380
        collection.ensure_loaded()
390
381
        orig_packs = collection.packs
393
384
        # forget about the new pack
394
385
        collection.reset()
395
386
        repo.refresh_data()
396
 
        vf = repo.revisions
 
387
        vf = tree.branch.repository.revisions
397
388
        # Set up a reload() function that switches to using the new pack file
398
389
        new_index = new_pack.revision_index
399
390
        access_tuple = new_pack.access_tuple()
1316
1307
        def reset():
1317
1308
            knit._load_data = orig
1318
1309
        self.addCleanup(reset)
1319
 
        from bzrlib._knit_load_data_pyx import _load_data_c
 
1310
        from bzrlib._knit_load_data_c import _load_data_c
1320
1311
        knit._load_data = _load_data_c
1321
1312
        allow_writes = lambda: mode == 'w'
1322
1313
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
1323
1314
 
1324
1315
 
1325
 
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1326
 
 
1327
 
    def make_annotator(self):
1328
 
        factory = knit.make_pack_factory(True, True, 1)
1329
 
        vf = factory(self.get_transport())
1330
 
        return knit._KnitAnnotator(vf)
1331
 
 
1332
 
    def test__expand_fulltext(self):
1333
 
        ann = self.make_annotator()
1334
 
        rev_key = ('rev-id',)
1335
 
        ann._num_compression_children[rev_key] = 1
1336
 
        res = ann._expand_record(rev_key, (('parent-id',),), None,
1337
 
                           ['line1\n', 'line2\n'], ('fulltext', True))
1338
 
        # The content object and text lines should be cached appropriately
1339
 
        self.assertEqual(['line1\n', 'line2'], res)
1340
 
        content_obj = ann._content_objects[rev_key]
1341
 
        self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1342
 
        self.assertEqual(res, content_obj.text())
1343
 
        self.assertEqual(res, ann._text_cache[rev_key])
1344
 
 
1345
 
    def test__expand_delta_comp_parent_not_available(self):
1346
 
        # Parent isn't available yet, so we return nothing, but queue up this
1347
 
        # node for later processing
1348
 
        ann = self.make_annotator()
1349
 
        rev_key = ('rev-id',)
1350
 
        parent_key = ('parent-id',)
1351
 
        record = ['0,1,1\n', 'new-line\n']
1352
 
        details = ('line-delta', False)
1353
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1354
 
                                 record, details)
1355
 
        self.assertEqual(None, res)
1356
 
        self.assertTrue(parent_key in ann._pending_deltas)
1357
 
        pending = ann._pending_deltas[parent_key]
1358
 
        self.assertEqual(1, len(pending))
1359
 
        self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1360
 
 
1361
 
    def test__expand_record_tracks_num_children(self):
1362
 
        ann = self.make_annotator()
1363
 
        rev_key = ('rev-id',)
1364
 
        rev2_key = ('rev2-id',)
1365
 
        parent_key = ('parent-id',)
1366
 
        record = ['0,1,1\n', 'new-line\n']
1367
 
        details = ('line-delta', False)
1368
 
        ann._num_compression_children[parent_key] = 2
1369
 
        ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1370
 
                           ('fulltext', False))
1371
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1372
 
                                 record, details)
1373
 
        self.assertEqual({parent_key: 1}, ann._num_compression_children)
1374
 
        # Expanding the second child should remove the content object, and the
1375
 
        # num_compression_children entry
1376
 
        res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1377
 
                                 record, details)
1378
 
        self.assertFalse(parent_key in ann._content_objects)
1379
 
        self.assertEqual({}, ann._num_compression_children)
1380
 
        # We should not cache the content_objects for rev2 and rev, because
1381
 
        # they do not have compression children of their own.
1382
 
        self.assertEqual({}, ann._content_objects)
1383
 
 
1384
 
    def test__expand_delta_records_blocks(self):
1385
 
        ann = self.make_annotator()
1386
 
        rev_key = ('rev-id',)
1387
 
        parent_key = ('parent-id',)
1388
 
        record = ['0,1,1\n', 'new-line\n']
1389
 
        details = ('line-delta', True)
1390
 
        ann._num_compression_children[parent_key] = 2
1391
 
        ann._expand_record(parent_key, (), None,
1392
 
                           ['line1\n', 'line2\n', 'line3\n'],
1393
 
                           ('fulltext', False))
1394
 
        ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1395
 
        self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1396
 
                         ann._matching_blocks)
1397
 
        rev2_key = ('rev2-id',)
1398
 
        record = ['0,1,1\n', 'new-line\n']
1399
 
        details = ('line-delta', False)
1400
 
        ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1401
 
        self.assertEqual([(1, 1, 2), (3, 3, 0)],
1402
 
                         ann._matching_blocks[(rev2_key, parent_key)])
1403
 
 
1404
 
    def test__get_parent_ann_uses_matching_blocks(self):
1405
 
        ann = self.make_annotator()
1406
 
        rev_key = ('rev-id',)
1407
 
        parent_key = ('parent-id',)
1408
 
        parent_ann = [(parent_key,)]*3
1409
 
        block_key = (rev_key, parent_key)
1410
 
        ann._annotations_cache[parent_key] = parent_ann
1411
 
        ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1412
 
        # We should not try to access any parent_lines content, because we know
1413
 
        # we already have the matching blocks
1414
 
        par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1415
 
                                        ['1\n', '2\n', '3\n'], parent_key)
1416
 
        self.assertEqual(parent_ann, par_ann)
1417
 
        self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1418
 
        self.assertEqual({}, ann._matching_blocks)
1419
 
 
1420
 
    def test__process_pending(self):
1421
 
        ann = self.make_annotator()
1422
 
        rev_key = ('rev-id',)
1423
 
        p1_key = ('p1-id',)
1424
 
        p2_key = ('p2-id',)
1425
 
        record = ['0,1,1\n', 'new-line\n']
1426
 
        details = ('line-delta', False)
1427
 
        p1_record = ['line1\n', 'line2\n']
1428
 
        ann._num_compression_children[p1_key] = 1
1429
 
        res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1430
 
                                 record, details)
1431
 
        self.assertEqual(None, res)
1432
 
        # self.assertTrue(p1_key in ann._pending_deltas)
1433
 
        self.assertEqual({}, ann._pending_annotation)
1434
 
        # Now insert p1, and we should be able to expand the delta
1435
 
        res = ann._expand_record(p1_key, (), None, p1_record,
1436
 
                                 ('fulltext', False))
1437
 
        self.assertEqual(p1_record, res)
1438
 
        ann._annotations_cache[p1_key] = [(p1_key,)]*2
1439
 
        res = ann._process_pending(p1_key)
1440
 
        self.assertEqual([], res)
1441
 
        self.assertFalse(p1_key in ann._pending_deltas)
1442
 
        self.assertTrue(p2_key in ann._pending_annotation)
1443
 
        self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1444
 
                         ann._pending_annotation)
1445
 
        # Now fill in parent 2, and pending annotation should be satisfied
1446
 
        res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1447
 
        ann._annotations_cache[p2_key] = []
1448
 
        res = ann._process_pending(p2_key)
1449
 
        self.assertEqual([rev_key], res)
1450
 
        self.assertEqual({}, ann._pending_annotation)
1451
 
        self.assertEqual({}, ann._pending_deltas)
1452
 
 
1453
 
    def test_record_delta_removes_basis(self):
1454
 
        ann = self.make_annotator()
1455
 
        ann._expand_record(('parent-id',), (), None,
1456
 
                           ['line1\n', 'line2\n'], ('fulltext', False))
1457
 
        ann._num_compression_children['parent-id'] = 2
1458
 
 
1459
 
    def test_annotate_special_text(self):
1460
 
        ann = self.make_annotator()
1461
 
        vf = ann._vf
1462
 
        rev1_key = ('rev-1',)
1463
 
        rev2_key = ('rev-2',)
1464
 
        rev3_key = ('rev-3',)
1465
 
        spec_key = ('special:',)
1466
 
        vf.add_lines(rev1_key, [], ['initial content\n'])
1467
 
        vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1468
 
                                            'common content\n',
1469
 
                                            'content in 2\n'])
1470
 
        vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1471
 
                                            'common content\n',
1472
 
                                            'content in 3\n'])
1473
 
        spec_text = ('initial content\n'
1474
 
                     'common content\n'
1475
 
                     'content in 2\n'
1476
 
                     'content in 3\n')
1477
 
        ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1478
 
        anns, lines = ann.annotate(spec_key)
1479
 
        self.assertEqual([(rev1_key,),
1480
 
                          (rev2_key, rev3_key),
1481
 
                          (rev2_key,),
1482
 
                          (rev3_key,),
1483
 
                         ], anns)
1484
 
        self.assertEqualDiff(spec_text, ''.join(lines))
1485
 
 
1486
 
 
1487
1316
class KnitTests(TestCaseWithTransport):
1488
1317
    """Class containing knit test helper routines."""
1489
1318
 
2230
2059
        # self.assertEqual([("annotate", key_basis)], basis.calls)
2231
2060
        self.assertEqual([('get_parent_map', set([key_basis])),
2232
2061
            ('get_parent_map', set([key_basis])),
2233
 
            ('get_record_stream', [key_basis], 'topological', True)],
 
2062
            ('get_record_stream', [key_basis], 'unordered', True)],
2234
2063
            basis.calls)
2235
2064
 
2236
2065
    def test_check(self):
2342
2171
        # ask which fallbacks have which parents.
2343
2172
        self.assertEqual([
2344
2173
            ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2345
 
            # topological is requested from the fallback, because that is what
2346
 
            # was requested at the top level.
2347
 
            ("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
 
2174
            # unordered is asked for by the underlying worker as it still
 
2175
            # buffers everything while answering - which is a problem!
 
2176
            ("get_record_stream", [key_basis_2, key_basis], 'unordered', True)],
2348
2177
            calls)
2349
2178
 
2350
2179
    def test_get_record_stream_unordered_deltas(self):
2571
2400
        last_call = basis.calls[-1]
2572
2401
        self.assertEqual('get_record_stream', last_call[0])
2573
2402
        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2574
 
        self.assertEqual('topological', last_call[2])
 
2403
        self.assertEqual('unordered', last_call[2])
2575
2404
        self.assertEqual(True, last_call[3])
2576
2405
 
2577
2406