~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/plugins/multiparent.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2007-07-19 16:09:34 UTC
  • mfrom: (2520.4.135 bzr.mpbundle)
  • Revision ID: pqm@pqm.ubuntu.com-20070719160934-d51fyijw69oto88p
Add new bundle and merge-directive formats

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
"""Implementation of multiparent diffs for versionedfile-like storage
 
2
 
 
3
Provides mp-regen and mp-extract commands.
 
4
Focus is on comparing size/performance to knits.
 
5
"""
 
6
 
 
7
from bzrlib.lazy_import import lazy_import
 
8
 
 
9
lazy_import(globals(), """
 
10
import (
 
11
        errno,
 
12
        os,
 
13
        sys,
 
14
        time,
 
15
        )
 
16
 
 
17
from bzrlib import (
 
18
    commands,
 
19
    urlutils
 
20
    )
 
21
from bzrlib.workingtree import WorkingTree
 
22
from bzrlib.tests import TestUtil
 
23
 
 
24
from bzrlib.plugins.multiparent.multiparent import (
 
25
    MultiVersionedFile,
 
26
    MultiMemoryVersionedFile,
 
27
    )
 
28
""")
 
29
 
 
30
class cmd_mp_regen(commands.Command):
 
31
    """Generate a multiparent versionedfile"""
 
32
 
 
33
    hidden = True
 
34
 
 
35
    takes_args = ['file?']
 
36
 
 
37
    takes_options = [commands.Option('sync-snapshots',
 
38
                                     help='Snapshots follow source.'),
 
39
                     commands.Option('snapshot-interval', type=int,
 
40
                                     help='Take snapshots every x revisions.'),
 
41
                     commands.Option('outfile', type=unicode,
 
42
                                     help='Write pseudo-knit to this file.'),
 
43
                     commands.Option('memory', help='Use memory, not disk.'),
 
44
                     commands.Option('extract', help='Test extract time.'),
 
45
                     commands.Option('single', help='Use a single parent.'),
 
46
                     commands.Option('verify', help='Verify added texts.'),
 
47
                     commands.Option('cache', help='Aggresively cache.'),
 
48
                     commands.Option('size', help='Aggressive size.'),
 
49
                     commands.Option('build', help='Aggressive build.'),
 
50
                    ]
 
51
    hidden = True
 
52
 
 
53
    def run(self, file=None, sync_snapshots=False, snapshot_interval=26,
 
54
            lsprof_timed=False, dump=False, extract=False, single=False,
 
55
            verify=False, outfile=None, memory=False, cache=False,
 
56
            size=False, build=False):
 
57
        file_weave = get_file_weave(file)
 
58
        url = file_weave.transport.abspath(file_weave.filename)
 
59
        print >> sys.stderr, 'Importing: %s' % \
 
60
            urlutils.local_path_from_url(url)
 
61
        if sync_snapshots:
 
62
            print >> sys.stderr, 'Snapshots follow input'
 
63
        else:
 
64
            print >> sys.stderr, 'Snapshot interval: %d' % snapshot_interval
 
65
        if not memory:
 
66
            if outfile is None:
 
67
                filename = 'pknit'
 
68
            else:
 
69
                filename = outfile
 
70
            vf = MultiVersionedFile(filename, snapshot_interval)
 
71
        else:
 
72
            vf = MultiMemoryVersionedFile(snapshot_interval)
 
73
        vf.destroy()
 
74
        old_snapshots = set(r for r in file_weave.versions() if
 
75
                        file_weave._index.get_method(r) == 'fulltext')
 
76
        if sync_snapshots:
 
77
            to_sync = old_snapshots
 
78
        elif size or build:
 
79
            assert memory
 
80
            to_sync = set()
 
81
        else:
 
82
            to_sync = vf.select_snapshots(file_weave)
 
83
        print >> sys.stderr, "%d fulltext(s)" % len(old_snapshots)
 
84
        print >> sys.stderr, "%d planned snapshots" % len(to_sync)
 
85
 
 
86
        try:
 
87
            vf.import_versionedfile(file_weave, to_sync, single_parent=single,
 
88
                                    verify=verify, no_cache=not cache)
 
89
            if size:
 
90
                snapshots = vf.select_by_size(len(old_snapshots))
 
91
                for version_id in snapshots:
 
92
                    vf.make_snapshot(version_id)
 
93
            if build:
 
94
                ranking = vf.get_build_ranking()
 
95
                snapshots = ranking[:len(old_snapshots) -\
 
96
                    len(vf._snapshots)]
 
97
                for version_id in snapshots:
 
98
                    vf.make_snapshot(version_id)
 
99
        except:
 
100
            vf.destroy()
 
101
            raise
 
102
        try:
 
103
            print >> sys.stderr, "%d actual snapshots" % len(vf._snapshots)
 
104
            if not cache:
 
105
                vf.clear_cache()
 
106
            if memory:
 
107
                if outfile is not None:
 
108
                    vf_file = MultiVersionedFile(outfile)
 
109
                vf_file.import_diffs(vf)
 
110
            else:
 
111
                vf_file = vf
 
112
        finally:
 
113
            if outfile is None:
 
114
                vf.destroy()
 
115
            else:
 
116
                vf_file.save()
 
117
 
 
118
class cmd_mp_extract(commands.Command):
 
119
    """Test extraction time multiparent knits"""
 
120
 
 
121
    hidden = True
 
122
 
 
123
    takes_options = [
 
124
        commands.Option('lsprof-timed', help='Use lsprof.'),
 
125
        commands.Option('parallel', help='Extract multiple versions at once.'),
 
126
        commands.Option('count', help='Number of cycles to do.', type=int),
 
127
        ]
 
128
 
 
129
    takes_args = ['filename', 'vfile?']
 
130
 
 
131
    def run(self, filename, vfile=None, lsprof_timed=False, count=1000,
 
132
            parallel=False):
 
133
        vf = MultiVersionedFile(filename)
 
134
        vf.load()
 
135
        snapshots = [r for r in vf.versions() if vf.get_diff(r).is_snapshot()]
 
136
        print '%d snapshots' % len(snapshots)
 
137
        revisions = list(vf.versions())
 
138
        revisions = revisions[-count:]
 
139
        print 'Testing extract time of %d revisions' % len(revisions)
 
140
        if parallel:
 
141
            revisions_list = [revisions]
 
142
        else:
 
143
            revisions_list = [[r] for r in revisions]
 
144
        start = time.clock()
 
145
        for revisions in revisions_list:
 
146
            vf = MultiVersionedFile(filename)
 
147
            vf.load()
 
148
            vf.get_line_list(revisions)
 
149
        print >> sys.stderr, time.clock() - start
 
150
        if lsprof_timed:
 
151
            from bzrlib.lsprof import profile
 
152
            vf.clear_cache()
 
153
            ret, stats = profile(vf.get_line_list, revisions_list[-1][-1])
 
154
            stats.sort()
 
155
            stats.pprint()
 
156
        start = time.clock()
 
157
        for revisions in revisions_list:
 
158
            file_weave = get_file_weave(vfile)
 
159
            file_weave.get_line_list(revisions)
 
160
        print >> sys.stderr, time.clock() - start
 
161
 
 
162
 
 
163
def get_file_weave(filename=None, wt=None):
 
164
    if filename is None:
 
165
        wt, path = WorkingTree.open_containing('.')
 
166
        return wt.branch.repository.get_inventory_weave()
 
167
    else:
 
168
        wt, path = WorkingTree.open_containing(filename)
 
169
        file_id = wt.path2id(path)
 
170
        bt = wt.branch.repository.revision_tree(wt.last_revision())
 
171
        return bt.get_weave(file_id)
 
172
 
 
173
 
 
174
commands.register_command(cmd_mp_regen)
 
175
commands.register_command(cmd_mp_extract)