~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/plugins/multiparent.py

  • Committer: Aaron Bentley
  • Date: 2008-03-03 16:52:41 UTC
  • mfrom: (3144.3.11 fix-conflict-handling)
  • mto: This revision was merged to the branch mainline in revision 3250.
  • Revision ID: aaron@aaronbentley.com-20080303165241-0k2c7ggs6kr9q6hf
Merge with fix-conflict-handling

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
"""Implementation of multiparent diffs for versionedfile-like storage
2
 
 
3
 
Provides mp-regen and mp-extract commands.
4
 
Focus is on comparing size/performance to knits.
5
 
"""
6
 
 
7
 
from bzrlib.lazy_import import lazy_import
8
 
 
9
 
lazy_import(globals(), """
10
 
import (
11
 
        errno,
12
 
        os,
13
 
        sys,
14
 
        time,
15
 
        )
16
 
 
17
 
from bzrlib import (
18
 
    commands,
19
 
    urlutils
20
 
    )
21
 
from bzrlib.workingtree import WorkingTree
22
 
from bzrlib.tests import TestUtil
23
 
 
24
 
from bzrlib.plugins.multiparent.multiparent import (
25
 
    MultiVersionedFile,
26
 
    MultiMemoryVersionedFile,
27
 
    )
28
 
""")
29
 
 
30
 
class cmd_mp_regen(commands.Command):
31
 
    """Generate a multiparent versionedfile"""
32
 
 
33
 
    hidden = True
34
 
 
35
 
    takes_args = ['file?']
36
 
 
37
 
    takes_options = [commands.Option('sync-snapshots',
38
 
                                     help='Snapshots follow source.'),
39
 
                     commands.Option('snapshot-interval', type=int,
40
 
                                     help='Take snapshots every x revisions.'),
41
 
                     commands.Option('outfile', type=unicode,
42
 
                                     help='Write pseudo-knit to this file.'),
43
 
                     commands.Option('memory', help='Use memory, not disk.'),
44
 
                     commands.Option('extract', help='Test extract time.'),
45
 
                     commands.Option('single', help='Use a single parent.'),
46
 
                     commands.Option('verify', help='Verify added texts.'),
47
 
                     commands.Option('cache', help='Aggresively cache.'),
48
 
                     commands.Option('size', help='Aggressive size.'),
49
 
                     commands.Option('build', help='Aggressive build.'),
50
 
                    ]
51
 
    hidden = True
52
 
 
53
 
    def run(self, file=None, sync_snapshots=False, snapshot_interval=26,
54
 
            lsprof_timed=False, dump=False, extract=False, single=False,
55
 
            verify=False, outfile=None, memory=False, cache=False,
56
 
            size=False, build=False):
57
 
        file_weave = get_file_weave(file)
58
 
        url = file_weave.transport.abspath(file_weave.filename)
59
 
        sys.stderr.write('Importing: %s\n' % \
60
 
            urlutils.local_path_from_url(url))
61
 
        if sync_snapshots:
62
 
            sys.stderr.write('Snapshots follow input\n')
63
 
        else:
64
 
            sys.stderr.write('Snapshot interval: %d\n' % snapshot_interval)
65
 
        if not memory:
66
 
            if outfile is None:
67
 
                filename = 'pknit'
68
 
            else:
69
 
                filename = outfile
70
 
            vf = MultiVersionedFile(filename, snapshot_interval)
71
 
        else:
72
 
            vf = MultiMemoryVersionedFile(snapshot_interval)
73
 
        vf.destroy()
74
 
        old_snapshots = set(r for r in file_weave.versions() if
75
 
                        file_weave._index.get_method(r) == 'fulltext')
76
 
        if sync_snapshots:
77
 
            to_sync = old_snapshots
78
 
        elif size or build:
79
 
            assert memory
80
 
            to_sync = set()
81
 
        else:
82
 
            to_sync = vf.select_snapshots(file_weave)
83
 
        sys.stderr.write("%d fulltext(s)\n" % len(old_snapshots))
84
 
        sys.stderr.write("%d planned snapshots\n" % len(to_sync))
85
 
 
86
 
        try:
87
 
            vf.import_versionedfile(file_weave, to_sync, single_parent=single,
88
 
                                    verify=verify, no_cache=not cache)
89
 
            if size:
90
 
                snapshots = vf.select_by_size(len(old_snapshots))
91
 
                for version_id in snapshots:
92
 
                    vf.make_snapshot(version_id)
93
 
            if build:
94
 
                ranking = vf.get_build_ranking()
95
 
                snapshots = ranking[:len(old_snapshots) -\
96
 
                    len(vf._snapshots)]
97
 
                for version_id in snapshots:
98
 
                    vf.make_snapshot(version_id)
99
 
        except:
100
 
            vf.destroy()
101
 
            raise
102
 
        try:
103
 
            sys.stderr.write("%d actual snapshots\n" % len(vf._snapshots))
104
 
            if not cache:
105
 
                vf.clear_cache()
106
 
            if memory:
107
 
                if outfile is not None:
108
 
                    vf_file = MultiVersionedFile(outfile)
109
 
                vf_file.import_diffs(vf)
110
 
            else:
111
 
                vf_file = vf
112
 
        finally:
113
 
            if outfile is None:
114
 
                vf.destroy()
115
 
            else:
116
 
                vf_file.save()
117
 
 
118
 
class cmd_mp_extract(commands.Command):
119
 
    """Test extraction time multiparent knits"""
120
 
 
121
 
    hidden = True
122
 
 
123
 
    takes_options = [
124
 
        commands.Option('lsprof-timed', help='Use lsprof.'),
125
 
        commands.Option('parallel', help='Extract multiple versions at once.'),
126
 
        commands.Option('count', help='Number of cycles to do.', type=int),
127
 
        ]
128
 
 
129
 
    takes_args = ['filename', 'vfile?']
130
 
 
131
 
    def run(self, filename, vfile=None, lsprof_timed=False, count=1000,
132
 
            parallel=False):
133
 
        vf = MultiVersionedFile(filename)
134
 
        vf.load()
135
 
        snapshots = [r for r in vf.versions() if vf.get_diff(r).is_snapshot()]
136
 
        print '%d snapshots' % len(snapshots)
137
 
        revisions = list(vf.versions())
138
 
        revisions = revisions[-count:]
139
 
        print 'Testing extract time of %d revisions' % len(revisions)
140
 
        if parallel:
141
 
            revisions_list = [revisions]
142
 
        else:
143
 
            revisions_list = [[r] for r in revisions]
144
 
        start = time.clock()
145
 
        for revisions in revisions_list:
146
 
            vf = MultiVersionedFile(filename)
147
 
            vf.load()
148
 
            vf.get_line_list(revisions)
149
 
        sys.stderr.write(time.clock() - start)
150
 
        sys.stderr.write('\n')
151
 
        if lsprof_timed:
152
 
            from bzrlib.lsprof import profile
153
 
            vf.clear_cache()
154
 
            ret, stats = profile(vf.get_line_list, revisions_list[-1][-1])
155
 
            stats.sort()
156
 
            stats.pprint()
157
 
        start = time.clock()
158
 
        for revisions in revisions_list:
159
 
            file_weave = get_file_weave(vfile)
160
 
            file_weave.get_line_list(revisions)
161
 
        sys.stderr.write(time.clock() - start)
162
 
        sys.stderr.write('\n')
163
 
 
164
 
 
165
 
def get_file_weave(filename=None, wt=None):
166
 
    if filename is None:
167
 
        wt, path = WorkingTree.open_containing('.')
168
 
        return wt.branch.repository.get_inventory_weave()
169
 
    else:
170
 
        wt, path = WorkingTree.open_containing(filename)
171
 
        file_id = wt.path2id(path)
172
 
        bt = wt.branch.repository.revision_tree(wt.last_revision())
173
 
        return bt.get_weave(file_id)
174
 
 
175
 
 
176
 
commands.register_command(cmd_mp_regen)
177
 
commands.register_command(cmd_mp_extract)