~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/transform.py

Merge bzr.dev 4187, and revert the change to fix refcycle issues.

I apparently didn't run the smart fetch tests. Which show that we access inv+chk pages
as a fulltext, and then insert the stream, which expects to get the block as a compressed
block. :(.
Need to rethink how to do it, possibly with weakrefs.


This also brings in CommitBuilder.record_iter_changes() and the updates to btree_index
and backing indices.

Show diffs side-by-side

added added

removed removed

Lines of Context:
36
36
                           ReusingTransform, NotVersionedError, CantMoveRoot,
37
37
                           ExistingLimbo, ImmortalLimbo, NoFinalPath,
38
38
                           UnableCreateSymlink)
 
39
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
39
40
from bzrlib.inventory import InventoryEntry
40
41
from bzrlib.osutils import (
41
42
    delete_any,
2113
2114
                    executable = tree.is_executable(file_id, tree_path)
2114
2115
                    if executable:
2115
2116
                        tt.set_executability(executable, trans_id)
2116
 
                    deferred_contents.append((file_id, trans_id))
 
2117
                    trans_data = (trans_id, tree_path)
 
2118
                    deferred_contents.append((file_id, trans_data))
2117
2119
                else:
2118
2120
                    file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2119
2121
                                                          tree)
2158
2160
                         in iter if not (c or e[0] != e[1]))
2159
2161
        new_desired_files = []
2160
2162
        count = 0
2161
 
        for file_id, trans_id in desired_files:
 
2163
        for file_id, (trans_id, tree_path) in desired_files:
2162
2164
            accelerator_path = unchanged.get(file_id)
2163
2165
            if accelerator_path is None:
2164
 
                new_desired_files.append((file_id, trans_id))
 
2166
                new_desired_files.append((file_id, (trans_id, tree_path)))
2165
2167
                continue
2166
2168
            pb.update('Adding file contents', count + offset, total)
2167
2169
            if hardlink:
2169
2171
                                   trans_id)
2170
2172
            else:
2171
2173
                contents = accelerator_tree.get_file(file_id, accelerator_path)
 
2174
                if tree.supports_content_filtering():
 
2175
                    filters = tree._content_filter_stack(tree_path)
 
2176
                    contents = filtered_output_bytes(contents, filters,
 
2177
                        ContentFilterContext(tree_path, tree))
2172
2178
                try:
2173
2179
                    tt.create_file(contents, trans_id)
2174
2180
                finally:
2175
 
                    contents.close()
 
2181
                    try:
 
2182
                        contents.close()
 
2183
                    except AttributeError:
 
2184
                        # after filtering, contents may no longer be file-like
 
2185
                        pass
2176
2186
            count += 1
2177
2187
        offset += count
2178
 
    for count, (trans_id, contents) in enumerate(tree.iter_files_bytes(
2179
 
                                                 new_desired_files)):
 
2188
    for count, ((trans_id, tree_path), contents) in enumerate(
 
2189
            tree.iter_files_bytes(new_desired_files)):
 
2190
        if tree.supports_content_filtering():
 
2191
            filters = tree._content_filter_stack(tree_path)
 
2192
            contents = filtered_output_bytes(contents, filters,
 
2193
                ContentFilterContext(tree_path, tree))
2180
2194
        tt.create_file(contents, trans_id)
2181
2195
        pb.update('Adding file contents', count + offset, total)
2182
2196