90
121
src/wibble/wibble.c
91
122
>>> i.id2path('2326')
92
123
'src/wibble/wibble.c'
94
TODO: Maybe also keep the full path of the entry, and the children?
95
But those depend on its position within a particular inventory, and
96
it would be nice not to need to hold the backpointer here.
99
# TODO: split InventoryEntry into subclasses for files,
100
# directories, etc etc.
102
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
103
'text_id', 'parent_id', 'children',
104
'text_version', 'entry_version', ]
107
def __init__(self, file_id, name, kind, parent_id, text_id=None):
126
# Constants returned by describe_change()
128
# TODO: These should probably move to some kind of FileChangeDescription
129
# class; that's like what's inside a TreeDelta but we want to be able to
130
# generate them just for one file at a time.
132
MODIFIED_AND_RENAMED = 'modified and renamed'
134
__slots__ = ['file_id', 'revision', 'parent_id', 'name']
136
# Attributes that all InventoryEntry instances are expected to have, but
137
# that don't vary for all kinds of entry. (e.g. symlink_target is only
138
# relevant to InventoryLink, so there's no reason to make every
139
# InventoryFile instance allocate space to hold a value for it.)
140
# Attributes that only vary for files: executable, text_sha1, text_size,
146
# Attributes that only vary for symlinks: symlink_target
147
symlink_target = None
148
# Attributes that only vary for tree-references: reference_revision
149
reference_revision = None
152
def detect_changes(self, old_entry):
153
"""Return a (text_modified, meta_modified) from this to old_entry.
155
_read_tree_state must have been called on self and old_entry prior to
156
calling detect_changes.
160
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
161
output_to, reverse=False):
162
"""Perform a diff between two entries of the same kind."""
164
def parent_candidates(self, previous_inventories):
165
"""Find possible per-file graph parents.
167
This is currently defined by:
168
- Select the last changed revision in the parent inventory.
169
- Do deal with a short lived bug in bzr 0.8's development two entries
170
that have the same last changed but different 'x' bit settings are
173
# revision:ie mapping for each ie found in previous_inventories.
175
# identify candidate head revision ids.
176
for inv in previous_inventories:
177
if inv.has_id(self.file_id):
178
ie = inv[self.file_id]
179
if ie.revision in candidates:
180
# same revision value in two different inventories:
181
# correct possible inconsistencies:
182
# * there was a bug in revision updates with 'x' bit
185
if candidates[ie.revision].executable != ie.executable:
186
candidates[ie.revision].executable = False
187
ie.executable = False
188
except AttributeError:
191
# add this revision as a candidate.
192
candidates[ie.revision] = ie
196
"""Return true if the object this entry represents has textual data.
198
Note that textual data includes binary content.
200
Also note that all entries get weave files created for them.
201
This attribute is primarily used when upgrading from old trees that
202
did not have the weave index for all inventory entries.
206
def __init__(self, file_id, name, parent_id):
108
207
"""Create an InventoryEntry
110
209
The filename must be a single component, relative to the
111
210
parent directory; it cannot be a whole path or relative name.
113
>>> e = InventoryEntry('123', 'hello.c', 'file', ROOT_ID)
212
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
118
>>> e = InventoryEntry('123', 'src/hello.c', 'file', ROOT_ID)
217
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
119
218
Traceback (most recent call last):
120
BzrCheckError: InventoryEntry name 'src/hello.c' is invalid
219
InvalidEntryName: Invalid entry name: src/hello.c
122
assert isinstance(name, basestring), name
123
221
if '/' in name or '\\' in name:
124
raise BzrCheckError('InventoryEntry name %r is invalid' % name)
126
self.text_version = None
127
self.entry_version = None
128
self.text_sha1 = None
129
self.text_size = None
222
raise errors.InvalidEntryName(name=name)
130
223
self.file_id = file_id
133
self.text_id = text_id
134
226
self.parent_id = parent_id
135
if kind == 'directory':
140
raise BzrError("unhandled entry kind %r" % kind)
228
def kind_character(self):
229
"""Return a short kind indicator useful for appending to names."""
230
raise errors.BzrError('unknown kind %r' % self.kind)
232
known_kinds = ('file', 'directory', 'symlink')
144
234
def sorted_children(self):
145
l = self.children.items()
151
other = InventoryEntry(self.file_id, self.name, self.kind,
152
self.parent_id, text_id=self.text_id)
153
other.text_sha1 = self.text_sha1
154
other.text_size = self.text_size
235
return sorted(self.children.items())
238
def versionable_kind(kind):
239
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
241
def check(self, checker, rev_id, inv):
242
"""Check this inventory entry is intact.
244
This is a template method, override _check for kind specific
247
:param checker: Check object providing context for the checks;
248
can be used to find out what parts of the repository have already
250
:param rev_id: Revision id from which this InventoryEntry was loaded.
251
Not necessarily the last-changed revision for this file.
252
:param inv: Inventory from which the entry was loaded.
254
if self.parent_id is not None:
255
if not inv.has_id(self.parent_id):
256
raise errors.BzrCheckError(
257
'missing parent {%s} in inventory for revision {%s}' % (
258
self.parent_id, rev_id))
259
checker._add_entry_to_text_key_references(inv, self)
260
self._check(checker, rev_id)
262
def _check(self, checker, rev_id):
263
"""Check this inventory entry for kind specific errors."""
264
checker._report_items.append(
265
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
268
"""Clone this inventory entry."""
269
raise NotImplementedError
272
def describe_change(old_entry, new_entry):
273
"""Describe the change between old_entry and this.
275
This smells of being an InterInventoryEntry situation, but as its
276
the first one, we're making it a static method for now.
278
An entry with a different parent, or different name is considered
279
to be renamed. Reparenting is an internal detail.
280
Note that renaming the parent does not trigger a rename for the
283
# TODO: Perhaps return an object rather than just a string
284
if old_entry is new_entry:
285
# also the case of both being None
287
elif old_entry is None:
289
elif new_entry is None:
291
if old_entry.kind != new_entry.kind:
293
text_modified, meta_modified = new_entry.detect_changes(old_entry)
294
if text_modified or meta_modified:
298
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
299
if old_entry.parent_id != new_entry.parent_id:
301
elif old_entry.name != new_entry.name:
305
if renamed and not modified:
306
return InventoryEntry.RENAMED
307
if modified and not renamed:
309
if modified and renamed:
310
return InventoryEntry.MODIFIED_AND_RENAMED
314
return ("%s(%r, %r, parent_id=%r, revision=%r)"
315
% (self.__class__.__name__,
321
def __eq__(self, other):
323
# For the case when objects are cached
325
if not isinstance(other, InventoryEntry):
326
return NotImplemented
328
return ((self.file_id == other.file_id)
329
and (self.name == other.name)
330
and (other.symlink_target == self.symlink_target)
331
and (self.text_sha1 == other.text_sha1)
332
and (self.text_size == other.text_size)
333
and (self.text_id == other.text_id)
334
and (self.parent_id == other.parent_id)
335
and (self.kind == other.kind)
336
and (self.revision == other.revision)
337
and (self.executable == other.executable)
338
and (self.reference_revision == other.reference_revision)
341
def __ne__(self, other):
342
return not (self == other)
345
raise ValueError('not hashable')
347
def _unchanged(self, previous_ie):
348
"""Has this entry changed relative to previous_ie.
350
This method should be overridden in child classes.
353
# different inv parent
354
if previous_ie.parent_id != self.parent_id:
357
elif previous_ie.name != self.name:
359
elif previous_ie.kind != self.kind:
363
def _read_tree_state(self, path, work_tree):
364
"""Populate fields in the inventory entry from the given tree.
366
Note that this should be modified to be a noop on virtual trees
367
as all entries created there are prepopulated.
369
# TODO: Rather than running this manually, we should check the
370
# working sha1 and other expensive properties when they're
371
# first requested, or preload them if they're already known
372
pass # nothing to do by default
374
def _forget_tree_state(self):
378
class InventoryDirectory(InventoryEntry):
379
"""A directory in an inventory."""
381
__slots__ = ['children']
385
def _check(self, checker, rev_id):
386
"""See InventoryEntry._check"""
387
# In non rich root repositories we do not expect a file graph for the
389
if self.name == '' and not checker.rich_roots:
391
# Directories are stored as an empty file, but the file should exist
392
# to provide a per-fileid log. The hash of every directory content is
393
# "da..." below (the sha1sum of '').
394
checker.add_pending_item(rev_id,
395
('texts', self.file_id, self.revision), 'text',
396
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
399
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
400
other.revision = self.revision
155
401
# note that children are *not* copied; they're pulled across when
156
402
# others are added
161
return ("%s(%r, %r, kind=%r, parent_id=%r)"
162
% (self.__class__.__name__,
169
def __eq__(self, other):
170
if not isinstance(other, InventoryEntry):
171
return NotImplemented
173
return (self.file_id == other.file_id) \
174
and (self.name == other.name) \
175
and (self.text_sha1 == other.text_sha1) \
176
and (self.text_size == other.text_size) \
177
and (self.text_id == other.text_id) \
178
and (self.parent_id == other.parent_id) \
179
and (self.kind == other.kind) \
180
and (self.text_version == other.text_version) \
181
and (self.entry_version == other.entry_version)
184
def __ne__(self, other):
185
return not (self == other)
188
raise ValueError('not hashable')
192
class RootEntry(InventoryEntry):
193
def __init__(self, file_id):
194
self.file_id = file_id
405
def __init__(self, file_id, name, parent_id):
406
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
195
407
self.children = {}
196
self.kind = 'root_directory'
197
self.parent_id = None
200
def __eq__(self, other):
201
if not isinstance(other, RootEntry):
202
return NotImplemented
204
return (self.file_id == other.file_id) \
205
and (self.children == other.children)
209
class Inventory(object):
210
"""Inventory of versioned files in a tree.
212
This describes which file_id is present at each point in the tree,
213
and possibly the SHA-1 or other information about the file.
409
def kind_character(self):
410
"""See InventoryEntry.kind_character."""
414
class InventoryFile(InventoryEntry):
415
"""A file in an inventory."""
417
__slots__ = ['text_sha1', 'text_size', 'text_id', 'executable']
421
def __init__(self, file_id, name, parent_id):
422
super(InventoryFile, self).__init__(file_id, name, parent_id)
423
self.text_sha1 = None
424
self.text_size = None
426
self.executable = False
428
def _check(self, checker, tree_revision_id):
429
"""See InventoryEntry._check"""
430
# TODO: check size too.
431
checker.add_pending_item(tree_revision_id,
432
('texts', self.file_id, self.revision), 'text',
434
if self.text_size is None:
435
checker._report_items.append(
436
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
440
other = InventoryFile(self.file_id, self.name, self.parent_id)
441
other.executable = self.executable
442
other.text_id = self.text_id
443
other.text_sha1 = self.text_sha1
444
other.text_size = self.text_size
445
other.revision = self.revision
448
def detect_changes(self, old_entry):
449
"""See InventoryEntry.detect_changes."""
450
text_modified = (self.text_sha1 != old_entry.text_sha1)
451
meta_modified = (self.executable != old_entry.executable)
452
return text_modified, meta_modified
454
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
455
output_to, reverse=False):
456
"""See InventoryEntry._diff."""
457
from bzrlib.diff import DiffText
458
from_file_id = self.file_id
460
to_file_id = to_entry.file_id
464
to_file_id, from_file_id = from_file_id, to_file_id
465
tree, to_tree = to_tree, tree
466
from_label, to_label = to_label, from_label
467
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
469
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
472
"""See InventoryEntry.has_text."""
475
def kind_character(self):
476
"""See InventoryEntry.kind_character."""
479
def _read_tree_state(self, path, work_tree):
480
"""See InventoryEntry._read_tree_state."""
481
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
482
# FIXME: 20050930 probe for the text size when getting sha1
483
# in _read_tree_state
484
self.executable = work_tree.is_executable(self.file_id, path=path)
487
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
488
% (self.__class__.__name__,
496
def _forget_tree_state(self):
497
self.text_sha1 = None
499
def _unchanged(self, previous_ie):
500
"""See InventoryEntry._unchanged."""
501
compatible = super(InventoryFile, self)._unchanged(previous_ie)
502
if self.text_sha1 != previous_ie.text_sha1:
505
# FIXME: 20050930 probe for the text size when getting sha1
506
# in _read_tree_state
507
self.text_size = previous_ie.text_size
508
if self.executable != previous_ie.executable:
513
class InventoryLink(InventoryEntry):
514
"""A file in an inventory."""
516
__slots__ = ['symlink_target']
520
def __init__(self, file_id, name, parent_id):
521
super(InventoryLink, self).__init__(file_id, name, parent_id)
522
self.symlink_target = None
524
def _check(self, checker, tree_revision_id):
525
"""See InventoryEntry._check"""
526
if self.symlink_target is None:
527
checker._report_items.append(
528
'symlink {%s} has no target in revision {%s}'
529
% (self.file_id, tree_revision_id))
530
# Symlinks are stored as ''
531
checker.add_pending_item(tree_revision_id,
532
('texts', self.file_id, self.revision), 'text',
533
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
536
other = InventoryLink(self.file_id, self.name, self.parent_id)
537
other.symlink_target = self.symlink_target
538
other.revision = self.revision
541
def detect_changes(self, old_entry):
542
"""See InventoryEntry.detect_changes."""
543
# FIXME: which _modified field should we use ? RBC 20051003
544
text_modified = (self.symlink_target != old_entry.symlink_target)
546
trace.mutter(" symlink target changed")
547
meta_modified = False
548
return text_modified, meta_modified
550
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
551
output_to, reverse=False):
552
"""See InventoryEntry._diff."""
553
from bzrlib.diff import DiffSymlink
554
old_target = self.symlink_target
555
if to_entry is not None:
556
new_target = to_entry.symlink_target
565
new_target, old_target = old_target, new_target
566
differ = DiffSymlink(old_tree, new_tree, output_to)
567
return differ.diff_symlink(old_target, new_target)
569
def kind_character(self):
570
"""See InventoryEntry.kind_character."""
573
def _read_tree_state(self, path, work_tree):
574
"""See InventoryEntry._read_tree_state."""
575
self.symlink_target = work_tree.get_symlink_target(self.file_id)
577
def _forget_tree_state(self):
578
self.symlink_target = None
580
def _unchanged(self, previous_ie):
581
"""See InventoryEntry._unchanged."""
582
compatible = super(InventoryLink, self)._unchanged(previous_ie)
583
if self.symlink_target != previous_ie.symlink_target:
588
class TreeReference(InventoryEntry):
590
__slots__ = ['reference_revision']
592
kind = 'tree-reference'
594
def __init__(self, file_id, name, parent_id, revision=None,
595
reference_revision=None):
596
InventoryEntry.__init__(self, file_id, name, parent_id)
597
self.revision = revision
598
self.reference_revision = reference_revision
601
return TreeReference(self.file_id, self.name, self.parent_id,
602
self.revision, self.reference_revision)
604
def _read_tree_state(self, path, work_tree):
605
"""Populate fields in the inventory entry from the given tree.
607
self.reference_revision = work_tree.get_reference_revision(
610
def _forget_tree_state(self):
611
self.reference_revision = None
613
def _unchanged(self, previous_ie):
614
"""See InventoryEntry._unchanged."""
615
compatible = super(TreeReference, self)._unchanged(previous_ie)
616
if self.reference_revision != previous_ie.reference_revision:
621
class CommonInventory(object):
622
"""Basic inventory logic, defined in terms of primitives like has_id.
624
An inventory is the metadata about the contents of a tree.
626
This is broadly a map from file_id to entries such as directories, files,
627
symlinks and tree references. Each entry maintains its own metadata like
628
SHA1 and length for files, or children for a directory.
214
630
Entries can be looked up either by path or by file_id.
216
The inventory represents a typical unix file tree, with
217
directories containing files and subdirectories. We never store
218
the full path to a file, because renaming a directory implicitly
219
moves all of its contents. This class internally maintains a
632
InventoryEntry objects must not be modified after they are
633
inserted, other than through the Inventory API.
636
@deprecated_method(deprecated_in((2, 4, 0)))
637
def __contains__(self, file_id):
638
"""True if this entry contains a file with given id.
640
>>> inv = Inventory()
641
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
642
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
643
>>> inv.has_id('123')
645
>>> inv.has_id('456')
648
Note that this method along with __iter__ are not encouraged for use as
649
they are less clear than specific query methods - they may be rmeoved
652
return self.has_id(file_id)
654
def has_filename(self, filename):
655
return bool(self.path2id(filename))
657
def id2path(self, file_id):
658
"""Return as a string the path to file_id.
661
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
662
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
663
>>> print i.id2path('foo-id')
666
:raises NoSuchId: If file_id is not present in the inventory.
668
# get all names, skipping root
669
return '/'.join(reversed(
670
[parent.name for parent in
671
self._iter_file_id_parents(file_id)][:-1]))
673
def iter_entries(self, from_dir=None, recursive=True):
674
"""Return (path, entry) pairs, in order by name.
676
:param from_dir: if None, start from the root,
677
otherwise start from this directory (either file-id or entry)
678
:param recursive: recurse into directories or not
681
if self.root is None:
685
elif isinstance(from_dir, basestring):
686
from_dir = self[from_dir]
688
# unrolling the recursive called changed the time from
689
# 440ms/663ms (inline/total) to 116ms/116ms
690
children = from_dir.children.items()
693
for name, ie in children:
696
children = collections.deque(children)
697
stack = [(u'', children)]
699
from_dir_relpath, children = stack[-1]
702
name, ie = children.popleft()
704
# we know that from_dir_relpath never ends in a slash
705
# and 'f' doesn't begin with one, we can do a string op, rather
706
# than the checks of pathjoin(), though this means that all paths
708
path = from_dir_relpath + '/' + name
712
if ie.kind != 'directory':
715
# But do this child first
716
new_children = ie.children.items()
718
new_children = collections.deque(new_children)
719
stack.append((path, new_children))
720
# Break out of inner loop, so that we start outer loop with child
723
# if we finished all children, pop it off the stack
726
def _preload_cache(self):
727
"""Populate any caches, we are about to access all items.
729
The default implementation does nothing, because CommonInventory doesn't
734
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
735
yield_parents=False):
736
"""Iterate over the entries in a directory first order.
738
This returns all entries for a directory before returning
739
the entries for children of a directory. This is not
740
lexicographically sorted order, and is a hybrid between
741
depth-first and breadth-first.
743
:param yield_parents: If True, yield the parents from the root leading
744
down to specific_file_ids that have been requested. This has no
745
impact if specific_file_ids is None.
746
:return: This yields (path, entry) pairs
748
if specific_file_ids and not isinstance(specific_file_ids, set):
749
specific_file_ids = set(specific_file_ids)
750
# TODO? Perhaps this should return the from_dir so that the root is
751
# yielded? or maybe an option?
752
if from_dir is None and specific_file_ids is None:
753
# They are iterating from the root, and have not specified any
754
# specific entries to look at. All current callers fully consume the
755
# iterator, so we can safely assume we are accessing all entries
756
self._preload_cache()
758
if self.root is None:
760
# Optimize a common case
761
if (not yield_parents and specific_file_ids is not None and
762
len(specific_file_ids) == 1):
763
file_id = list(specific_file_ids)[0]
764
if self.has_id(file_id):
765
yield self.id2path(file_id), self[file_id]
768
if (specific_file_ids is None or yield_parents or
769
self.root.file_id in specific_file_ids):
771
elif isinstance(from_dir, basestring):
772
from_dir = self[from_dir]
774
if specific_file_ids is not None:
775
# TODO: jam 20070302 This could really be done as a loop rather
776
# than a bunch of recursive calls.
779
def add_ancestors(file_id):
780
if not byid.has_id(file_id):
782
parent_id = byid[file_id].parent_id
783
if parent_id is None:
785
if parent_id not in parents:
786
parents.add(parent_id)
787
add_ancestors(parent_id)
788
for file_id in specific_file_ids:
789
add_ancestors(file_id)
793
stack = [(u'', from_dir)]
795
cur_relpath, cur_dir = stack.pop()
798
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
800
child_relpath = cur_relpath + child_name
802
if (specific_file_ids is None or
803
child_ie.file_id in specific_file_ids or
804
(yield_parents and child_ie.file_id in parents)):
805
yield child_relpath, child_ie
807
if child_ie.kind == 'directory':
808
if parents is None or child_ie.file_id in parents:
809
child_dirs.append((child_relpath+'/', child_ie))
810
stack.extend(reversed(child_dirs))
812
def _make_delta(self, old):
813
"""Make an inventory delta from two inventories."""
816
adds = new_ids - old_ids
817
deletes = old_ids - new_ids
818
common = old_ids.intersection(new_ids)
820
for file_id in deletes:
821
delta.append((old.id2path(file_id), None, file_id, None))
823
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
824
for file_id in common:
825
if old[file_id] != self[file_id]:
826
delta.append((old.id2path(file_id), self.id2path(file_id),
827
file_id, self[file_id]))
830
def make_entry(self, kind, name, parent_id, file_id=None):
831
"""Simple thunk to bzrlib.inventory.make_entry."""
832
return make_entry(kind, name, parent_id, file_id)
835
"""Return list of (path, ie) for all entries except the root.
837
This may be faster than iter_entries.
840
def descend(dir_ie, dir_path):
841
kids = dir_ie.children.items()
843
for name, ie in kids:
844
child_path = osutils.pathjoin(dir_path, name)
845
accum.append((child_path, ie))
846
if ie.kind == 'directory':
847
descend(ie, child_path)
849
if self.root is not None:
850
descend(self.root, u'')
853
def path2id(self, relpath):
854
"""Walk down through directories to return entry of last component.
856
:param relpath: may be either a list of path components, or a single
857
string, in which case it is automatically split.
859
This returns the entry of the last component in the path,
860
which may be either a file or a directory.
862
Returns None IFF the path is not found.
864
if isinstance(relpath, basestring):
865
names = osutils.splitpath(relpath)
871
except errors.NoSuchId:
872
# root doesn't exist yet so nothing else can
878
children = getattr(parent, 'children', None)
887
return parent.file_id
889
def filter(self, specific_fileids):
890
"""Get an inventory view filtered against a set of file-ids.
892
Children of directories and parents are included.
894
The result may or may not reference the underlying inventory
895
so it should be treated as immutable.
897
interesting_parents = set()
898
for fileid in specific_fileids:
900
interesting_parents.update(self.get_idpath(fileid))
901
except errors.NoSuchId:
902
# This fileid is not in the inventory - that's ok
904
entries = self.iter_entries()
905
if self.root is None:
906
return Inventory(root_id=None)
907
other = Inventory(entries.next()[1].file_id)
908
other.root.revision = self.root.revision
909
other.revision_id = self.revision_id
910
directories_to_expand = set()
911
for path, entry in entries:
912
file_id = entry.file_id
913
if (file_id in specific_fileids
914
or entry.parent_id in directories_to_expand):
915
if entry.kind == 'directory':
916
directories_to_expand.add(file_id)
917
elif file_id not in interesting_parents:
919
other.add(entry.copy())
922
def get_idpath(self, file_id):
923
"""Return a list of file_ids for the path to an entry.
925
The list contains one element for each directory followed by
926
the id of the file itself. So the length of the returned list
927
is equal to the depth of the file in the tree, counting the
928
root directory as depth 1.
931
for parent in self._iter_file_id_parents(file_id):
932
p.insert(0, parent.file_id)
936
class Inventory(CommonInventory):
937
"""Mutable dict based in-memory inventory.
939
We never store the full path to a file, because renaming a directory
940
implicitly moves all of its contents. This class internally maintains a
220
941
lookup tree that allows the children under a directory to be
221
942
returned quickly.
223
InventoryEntry objects must not be modified after they are
224
inserted, other than through the Inventory API.
226
944
>>> inv = Inventory()
227
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
228
InventoryEntry('123-123', 'hello.c', kind='file', parent_id='TREE_ROOT')
945
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
946
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
229
947
>>> inv['123-123'].name
232
May be treated as an iterator or set to look up file ids:
234
>>> bool(inv.path2id('hello.c'))
239
May also look up by name:
241
>>> [x[0] for x in inv.iter_entries()]
243
>>> inv = Inventory('TREE_ROOT-12345678-12345678')
244
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
245
InventoryEntry('123-123', 'hello.c', kind='file', parent_id='TREE_ROOT-12345678-12345678')
950
Id's may be looked up from paths:
952
>>> inv.path2id('hello.c')
954
>>> inv.has_id('123-123')
957
There are iterators over the contents:
959
>>> [entry[0] for entry in inv.iter_entries()]
247
def __init__(self, root_id=ROOT_ID):
963
def __init__(self, root_id=ROOT_ID, revision_id=None):
248
964
"""Create or read an inventory.
250
966
If a working directory is specified, the inventory is read
254
970
The inventory is created with a default root directory, with
257
# We are letting Branch(init=True) create a unique inventory
258
# root id. Rather than generating a random one here.
260
# root_id = bzrlib.branch.gen_file_id('TREE_ROOT')
261
self.root = RootEntry(root_id)
973
if root_id is not None:
974
self._set_root(InventoryDirectory(root_id, u'', None))
978
self.revision_id = revision_id
981
# More than one page of ouput is not useful anymore to debug
984
contents = repr(self._byid)
985
if len(contents) > max_len:
986
contents = contents[:(max_len-len(closing))] + closing
987
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
989
def apply_delta(self, delta):
990
"""Apply a delta to this inventory.
992
See the inventory developers documentation for the theory behind
995
If delta application fails the inventory is left in an indeterminate
996
state and must not be used.
998
:param delta: A list of changes to apply. After all the changes are
999
applied the final inventory must be internally consistent, but it
1000
is ok to supply changes which, if only half-applied would have an
1001
invalid result - such as supplying two changes which rename two
1002
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
1003
('B', 'A', 'B-id', b_entry)].
1005
Each change is a tuple, of the form (old_path, new_path, file_id,
1008
When new_path is None, the change indicates the removal of an entry
1009
from the inventory and new_entry will be ignored (using None is
1010
appropriate). If new_path is not None, then new_entry must be an
1011
InventoryEntry instance, which will be incorporated into the
1012
inventory (and replace any existing entry with the same file id).
1014
When old_path is None, the change indicates the addition of
1015
a new entry to the inventory.
1017
When neither new_path nor old_path are None, the change is a
1018
modification to an entry, such as a rename, reparent, kind change
1021
The children attribute of new_entry is ignored. This is because
1022
this method preserves children automatically across alterations to
1023
the parent of the children, and cases where the parent id of a
1024
child is changing require the child to be passed in as a separate
1025
change regardless. E.g. in the recursive deletion of a directory -
1026
the directory's children must be included in the delta, or the
1027
final inventory will be invalid.
1029
Note that a file_id must only appear once within a given delta.
1030
An AssertionError is raised otherwise.
1032
# Check that the delta is legal. It would be nice if this could be
1033
# done within the loops below but it's safer to validate the delta
1034
# before starting to mutate the inventory, as there isn't a rollback
1036
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1037
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1038
_check_delta_ids_are_valid(
1039
_check_delta_new_path_entry_both_or_None(
1043
# Remove all affected items which were in the original inventory,
1044
# starting with the longest paths, thus ensuring parents are examined
1045
# after their children, which means that everything we examine has no
1046
# modified children remaining by the time we examine it.
1047
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1048
if op is not None), reverse=True):
1049
# Preserve unaltered children of file_id for later reinsertion.
1050
file_id_children = getattr(self[file_id], 'children', {})
1051
if len(file_id_children):
1052
children[file_id] = file_id_children
1053
if self.id2path(file_id) != old_path:
1054
raise errors.InconsistentDelta(old_path, file_id,
1055
"Entry was at wrong other path %r." % self.id2path(file_id))
1056
# Remove file_id and the unaltered children. If file_id is not
1057
# being deleted it will be reinserted back later.
1058
self.remove_recursive_id(file_id)
1059
# Insert all affected which should be in the new inventory, reattaching
1060
# their children if they had any. This is done from shortest path to
1061
# longest, ensuring that items which were modified and whose parents in
1062
# the resulting inventory were also modified, are inserted after their
1064
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1065
delta if np is not None):
1066
if new_entry.kind == 'directory':
1067
# Pop the child which to allow detection of children whose
1068
# parents were deleted and which were not reattached to a new
1070
replacement = InventoryDirectory(new_entry.file_id,
1071
new_entry.name, new_entry.parent_id)
1072
replacement.revision = new_entry.revision
1073
replacement.children = children.pop(replacement.file_id, {})
1074
new_entry = replacement
1077
except errors.DuplicateFileId:
1078
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1079
"New id is already present in target.")
1080
except AttributeError:
1081
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1082
"Parent is not a directory.")
1083
if self.id2path(new_entry.file_id) != new_path:
1084
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1085
"New path is not consistent with parent path.")
1087
# Get the parent id that was deleted
1088
parent_id, children = children.popitem()
1089
raise errors.InconsistentDelta("<deleted>", parent_id,
1090
"The file id was deleted but its children were not deleted.")
1092
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1093
propagate_caches=False):
1094
"""See CHKInventory.create_by_apply_delta()"""
1095
new_inv = self.copy()
1096
new_inv.apply_delta(inventory_delta)
1097
new_inv.revision_id = new_revision_id
1100
def _set_root(self, ie):
262
1102
self._byid = {self.root.file_id: self.root}
1105
# TODO: jam 20051218 Should copy also copy the revision_id?
1106
entries = self.iter_entries()
1107
if self.root is None:
1108
return Inventory(root_id=None)
1109
other = Inventory(entries.next()[1].file_id)
1110
other.root.revision = self.root.revision
1111
# copy recursively so we know directories will be added before
1112
# their children. There are more efficient ways than this...
1113
for path, entry in entries:
1114
other.add(entry.copy())
265
1117
def __iter__(self):
1118
"""Iterate over all file-ids."""
266
1119
return iter(self._byid)
1121
def iter_just_entries(self):
1122
"""Iterate over all entries.
1124
Unlike iter_entries(), just the entries are returned (not (path, ie))
1125
and the order of entries is undefined.
1127
XXX: We may not want to merge this into bzr.dev.
1129
if self.root is None:
1131
for _, ie in self._byid.iteritems():
269
1134
def __len__(self):
270
1135
"""Returns number of entries."""
271
1136
return len(self._byid)
274
def iter_entries(self, from_dir=None):
275
"""Return (path, entry) pairs, in order by name."""
279
elif isinstance(from_dir, basestring):
280
from_dir = self._byid[from_dir]
282
kids = from_dir.children.items()
284
for name, ie in kids:
286
if ie.kind == 'directory':
287
for cn, cie in self.iter_entries(from_dir=ie.file_id):
288
yield os.path.join(name, cn), cie
292
"""Return list of (path, ie) for all entries except the root.
294
This may be faster than iter_entries.
297
def descend(dir_ie, dir_path):
298
kids = dir_ie.children.items()
300
for name, ie in kids:
301
child_path = os.path.join(dir_path, name)
302
accum.append((child_path, ie))
303
if ie.kind == 'directory':
304
descend(ie, child_path)
306
descend(self.root, '')
310
def directories(self):
311
"""Return (path, entry) pairs for all directories, including the root.
314
def descend(parent_ie, parent_path):
315
accum.append((parent_path, parent_ie))
317
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
320
for name, child_ie in kids:
321
child_path = os.path.join(parent_path, name)
322
descend(child_ie, child_path)
323
descend(self.root, '')
328
def __contains__(self, file_id):
329
"""True if this entry contains a file with given id.
331
>>> inv = Inventory()
332
>>> inv.add(InventoryEntry('123', 'foo.c', 'file', ROOT_ID))
333
InventoryEntry('123', 'foo.c', kind='file', parent_id='TREE_ROOT')
339
return file_id in self._byid
342
1138
def __getitem__(self, file_id):
343
1139
"""Return the entry for given file_id.
345
1141
>>> inv = Inventory()
346
>>> inv.add(InventoryEntry('123123', 'hello.c', 'file', ROOT_ID))
347
InventoryEntry('123123', 'hello.c', kind='file', parent_id='TREE_ROOT')
1142
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1143
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
348
1144
>>> inv['123123'].name
352
1148
return self._byid[file_id]
353
1149
except KeyError:
355
raise BzrError("can't look up file_id None")
357
raise BzrError("file_id {%s} not in inventory" % file_id)
1150
# really we're passing an inventory, not a tree...
1151
raise errors.NoSuchId(self, file_id)
360
1153
def get_file_kind(self, file_id):
361
1154
return self._byid[file_id].kind
570
1352
del old_parent.children[file_ie.name]
571
1353
new_parent.children[new_name] = file_ie
573
1355
file_ie.name = new_name
574
1356
file_ie.parent_id = new_parent_id
1358
def is_root(self, file_id):
1359
return self.root is not None and file_id == self.root.file_id
1362
class CHKInventory(CommonInventory):
1363
"""An inventory persisted in a CHK store.
1365
By design, a CHKInventory is immutable so many of the methods
1366
supported by Inventory - add, rename, apply_delta, etc - are *not*
1367
supported. To create a new CHKInventory, use create_by_apply_delta()
1368
or from_inventory(), say.
1370
Internally, a CHKInventory has one or two CHKMaps:
1372
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1373
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1376
The second map is optional and not present in early CHkRepository's.
1378
No caching is performed: every method call or item access will perform
1379
requests to the storage layer. As such, keep references to objects you
1383
def __init__(self, search_key_name):
1384
CommonInventory.__init__(self)
1385
self._fileid_to_entry_cache = {}
1386
self._fully_cached = False
1387
self._path_to_fileid_cache = {}
1388
self._search_key_name = search_key_name
1391
def __eq__(self, other):
1392
"""Compare two sets by comparing their contents."""
1393
if not isinstance(other, CHKInventory):
1394
return NotImplemented
1396
this_key = self.id_to_entry.key()
1397
other_key = other.id_to_entry.key()
1398
this_pid_key = self.parent_id_basename_to_file_id.key()
1399
other_pid_key = other.parent_id_basename_to_file_id.key()
1400
if None in (this_key, this_pid_key, other_key, other_pid_key):
1402
return this_key == other_key and this_pid_key == other_pid_key
1404
def _entry_to_bytes(self, entry):
1405
"""Serialise entry as a single bytestring.
1407
:param Entry: An inventory entry.
1408
:return: A bytestring for the entry.
1411
ENTRY ::= FILE | DIR | SYMLINK | TREE
1412
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1413
DIR ::= "dir: " COMMON
1414
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1415
TREE ::= "tree: " COMMON REFERENCE_REVISION
1416
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1419
if entry.parent_id is not None:
1420
parent_str = entry.parent_id
1423
name_str = entry.name.encode("utf8")
1424
if entry.kind == 'file':
1425
if entry.executable:
1429
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1430
entry.file_id, parent_str, name_str, entry.revision,
1431
entry.text_sha1, entry.text_size, exec_str)
1432
elif entry.kind == 'directory':
1433
return "dir: %s\n%s\n%s\n%s" % (
1434
entry.file_id, parent_str, name_str, entry.revision)
1435
elif entry.kind == 'symlink':
1436
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1437
entry.file_id, parent_str, name_str, entry.revision,
1438
entry.symlink_target.encode("utf8"))
1439
elif entry.kind == 'tree-reference':
1440
return "tree: %s\n%s\n%s\n%s\n%s" % (
1441
entry.file_id, parent_str, name_str, entry.revision,
1442
entry.reference_revision)
1444
raise ValueError("unknown kind %r" % entry.kind)
1446
def _expand_fileids_to_parents_and_children(self, file_ids):
1447
"""Give a more wholistic view starting with the given file_ids.
1449
For any file_id which maps to a directory, we will include all children
1450
of that directory. We will also include all directories which are
1451
parents of the given file_ids, but we will not include their children.
1458
fringle # fringle-id
1462
if given [foo-id] we will include
1463
TREE_ROOT as interesting parents
1465
foo-id, baz-id, frob-id, fringle-id
1469
# TODO: Pre-pass over the list of fileids to see if anything is already
1470
# deserialized in self._fileid_to_entry_cache
1472
directories_to_expand = set()
1473
children_of_parent_id = {}
1474
# It is okay if some of the fileids are missing
1475
for entry in self._getitems(file_ids):
1476
if entry.kind == 'directory':
1477
directories_to_expand.add(entry.file_id)
1478
interesting.add(entry.parent_id)
1479
children_of_parent_id.setdefault(entry.parent_id, set()
1480
).add(entry.file_id)
1482
# Now, interesting has all of the direct parents, but not the
1483
# parents of those parents. It also may have some duplicates with
1485
remaining_parents = interesting.difference(file_ids)
1486
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1487
# but we don't actually want to recurse into that
1488
interesting.add(None) # this will auto-filter it in the loop
1489
remaining_parents.discard(None)
1490
while remaining_parents:
1491
next_parents = set()
1492
for entry in self._getitems(remaining_parents):
1493
next_parents.add(entry.parent_id)
1494
children_of_parent_id.setdefault(entry.parent_id, set()
1495
).add(entry.file_id)
1496
# Remove any search tips we've already processed
1497
remaining_parents = next_parents.difference(interesting)
1498
interesting.update(remaining_parents)
1499
# We should probably also .difference(directories_to_expand)
1500
interesting.update(file_ids)
1501
interesting.discard(None)
1502
while directories_to_expand:
1503
# Expand directories by looking in the
1504
# parent_id_basename_to_file_id map
1505
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1506
directories_to_expand = set()
1507
items = self.parent_id_basename_to_file_id.iteritems(keys)
1508
next_file_ids = set([item[1] for item in items])
1509
next_file_ids = next_file_ids.difference(interesting)
1510
interesting.update(next_file_ids)
1511
for entry in self._getitems(next_file_ids):
1512
if entry.kind == 'directory':
1513
directories_to_expand.add(entry.file_id)
1514
children_of_parent_id.setdefault(entry.parent_id, set()
1515
).add(entry.file_id)
1516
return interesting, children_of_parent_id
1518
def filter(self, specific_fileids):
1519
"""Get an inventory view filtered against a set of file-ids.
1521
Children of directories and parents are included.
1523
The result may or may not reference the underlying inventory
1524
so it should be treated as immutable.
1527
parent_to_children) = self._expand_fileids_to_parents_and_children(
1529
# There is some overlap here, but we assume that all interesting items
1530
# are in the _fileid_to_entry_cache because we had to read them to
1531
# determine if they were a dir we wanted to recurse, or just a file
1532
# This should give us all the entries we'll want to add, so start
1534
other = Inventory(self.root_id)
1535
other.root.revision = self.root.revision
1536
other.revision_id = self.revision_id
1537
if not interesting or not parent_to_children:
1538
# empty filter, or filtering entrys that don't exist
1539
# (if even 1 existed, then we would have populated
1540
# parent_to_children with at least the tree root.)
1542
cache = self._fileid_to_entry_cache
1543
remaining_children = collections.deque(parent_to_children[self.root_id])
1544
while remaining_children:
1545
file_id = remaining_children.popleft()
1547
if ie.kind == 'directory':
1548
ie = ie.copy() # We create a copy to depopulate the .children attribute
1549
# TODO: depending on the uses of 'other' we should probably alwyas
1550
# '.copy()' to prevent someone from mutating other and
1551
# invaliding our internal cache
1553
if file_id in parent_to_children:
1554
remaining_children.extend(parent_to_children[file_id])
1558
def _bytes_to_utf8name_key(bytes):
1559
"""Get the file_id, revision_id key out of bytes."""
1560
# We don't normally care about name, except for times when we want
1561
# to filter out empty names because of non rich-root...
1562
sections = bytes.split('\n')
1563
kind, file_id = sections[0].split(': ')
1564
return (sections[2], intern(file_id), intern(sections[3]))
1566
def _bytes_to_entry(self, bytes):
1567
"""Deserialise a serialised entry."""
1568
sections = bytes.split('\n')
1569
if sections[0].startswith("file: "):
1570
result = InventoryFile(sections[0][6:],
1571
sections[2].decode('utf8'),
1573
result.text_sha1 = sections[4]
1574
result.text_size = int(sections[5])
1575
result.executable = sections[6] == "Y"
1576
elif sections[0].startswith("dir: "):
1577
result = CHKInventoryDirectory(sections[0][5:],
1578
sections[2].decode('utf8'),
1580
elif sections[0].startswith("symlink: "):
1581
result = InventoryLink(sections[0][9:],
1582
sections[2].decode('utf8'),
1584
result.symlink_target = sections[4].decode('utf8')
1585
elif sections[0].startswith("tree: "):
1586
result = TreeReference(sections[0][6:],
1587
sections[2].decode('utf8'),
1589
result.reference_revision = sections[4]
1591
raise ValueError("Not a serialised entry %r" % bytes)
1592
result.file_id = intern(result.file_id)
1593
result.revision = intern(sections[3])
1594
if result.parent_id == '':
1595
result.parent_id = None
1596
self._fileid_to_entry_cache[result.file_id] = result
1599
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1600
propagate_caches=False):
1601
"""Create a new CHKInventory by applying inventory_delta to this one.
1603
See the inventory developers documentation for the theory behind
1606
:param inventory_delta: The inventory delta to apply. See
1607
Inventory.apply_delta for details.
1608
:param new_revision_id: The revision id of the resulting CHKInventory.
1609
:param propagate_caches: If True, the caches for this inventory are
1610
copied to and updated for the result.
1611
:return: The new CHKInventory.
1613
split = osutils.split
1614
result = CHKInventory(self._search_key_name)
1615
if propagate_caches:
1616
# Just propagate the path-to-fileid cache for now
1617
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1618
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1619
self.id_to_entry._ensure_root()
1620
maximum_size = self.id_to_entry._root_node.maximum_size
1621
result.revision_id = new_revision_id
1622
result.id_to_entry = chk_map.CHKMap(
1623
self.id_to_entry._store,
1624
self.id_to_entry.key(),
1625
search_key_func=search_key_func)
1626
result.id_to_entry._ensure_root()
1627
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1628
# Change to apply to the parent_id_basename delta. The dict maps
1629
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1630
# when a path has its id replaced (e.g. the root is changed, or someone
1631
# does bzr mv a b, bzr mv c a, we should output a single change to this
1632
# map rather than two.
1633
parent_id_basename_delta = {}
1634
if self.parent_id_basename_to_file_id is not None:
1635
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1636
self.parent_id_basename_to_file_id._store,
1637
self.parent_id_basename_to_file_id.key(),
1638
search_key_func=search_key_func)
1639
result.parent_id_basename_to_file_id._ensure_root()
1640
self.parent_id_basename_to_file_id._ensure_root()
1641
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1642
p_id_root = self.parent_id_basename_to_file_id._root_node
1643
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1644
result_p_id_root._key_width = p_id_root._key_width
1646
result.parent_id_basename_to_file_id = None
1647
result.root_id = self.root_id
1648
id_to_entry_delta = []
1649
# inventory_delta is only traversed once, so we just update the
1651
# Check for repeated file ids
1652
inventory_delta = _check_delta_unique_ids(inventory_delta)
1653
# Repeated old paths
1654
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1655
# Check for repeated new paths
1656
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1657
# Check for entries that don't match the fileid
1658
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1659
# Check for nonsense fileids
1660
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1661
# Check for new_path <-> entry consistency
1662
inventory_delta = _check_delta_new_path_entry_both_or_None(
1664
# All changed entries need to have their parents be directories and be
1665
# at the right path. This set contains (path, id) tuples.
1667
# When we delete an item, all the children of it must be either deleted
1668
# or altered in their own right. As we batch process the change via
1669
# CHKMap.apply_delta, we build a set of things to use to validate the
1673
for old_path, new_path, file_id, entry in inventory_delta:
1676
result.root_id = file_id
1677
if new_path is None:
1682
if propagate_caches:
1684
del result._path_to_fileid_cache[old_path]
1687
deletes.add(file_id)
1689
new_key = StaticTuple(file_id,)
1690
new_value = result._entry_to_bytes(entry)
1691
# Update caches. It's worth doing this whether
1692
# we're propagating the old caches or not.
1693
result._path_to_fileid_cache[new_path] = file_id
1694
parents.add((split(new_path)[0], entry.parent_id))
1695
if old_path is None:
1698
old_key = StaticTuple(file_id,)
1699
if self.id2path(file_id) != old_path:
1700
raise errors.InconsistentDelta(old_path, file_id,
1701
"Entry was at wrong other path %r." %
1702
self.id2path(file_id))
1703
altered.add(file_id)
1704
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1705
if result.parent_id_basename_to_file_id is not None:
1706
# parent_id, basename changes
1707
if old_path is None:
1710
old_entry = self[file_id]
1711
old_key = self._parent_id_basename_key(old_entry)
1712
if new_path is None:
1716
new_key = self._parent_id_basename_key(entry)
1718
# If the two keys are the same, the value will be unchanged
1719
# as its always the file id for this entry.
1720
if old_key != new_key:
1721
# Transform a change into explicit delete/add preserving
1722
# a possible match on the key from a different file id.
1723
if old_key is not None:
1724
parent_id_basename_delta.setdefault(
1725
old_key, [None, None])[0] = old_key
1726
if new_key is not None:
1727
parent_id_basename_delta.setdefault(
1728
new_key, [None, None])[1] = new_value
1729
# validate that deletes are complete.
1730
for file_id in deletes:
1731
entry = self[file_id]
1732
if entry.kind != 'directory':
1734
# This loop could potentially be better by using the id_basename
1735
# map to just get the child file ids.
1736
for child in entry.children.values():
1737
if child.file_id not in altered:
1738
raise errors.InconsistentDelta(self.id2path(child.file_id),
1739
child.file_id, "Child not deleted or reparented when "
1741
result.id_to_entry.apply_delta(id_to_entry_delta)
1742
if parent_id_basename_delta:
1743
# Transform the parent_id_basename delta data into a linear delta
1744
# with only one record for a given key. Optimally this would allow
1745
# re-keying, but its simpler to just output that as a delete+add
1746
# to spend less time calculating the delta.
1748
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1749
if value is not None:
1750
delta_list.append((old_key, key, value))
1752
delta_list.append((old_key, None, None))
1753
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1754
parents.discard(('', None))
1755
for parent_path, parent in parents:
1757
if result[parent].kind != 'directory':
1758
raise errors.InconsistentDelta(result.id2path(parent), parent,
1759
'Not a directory, but given children')
1760
except errors.NoSuchId:
1761
raise errors.InconsistentDelta("<unknown>", parent,
1762
"Parent is not present in resulting inventory.")
1763
if result.path2id(parent_path) != parent:
1764
raise errors.InconsistentDelta(parent_path, parent,
1765
"Parent has wrong path %r." % result.path2id(parent_path))
1769
def deserialise(klass, chk_store, bytes, expected_revision_id):
1770
"""Deserialise a CHKInventory.
1772
:param chk_store: A CHK capable VersionedFiles instance.
1773
:param bytes: The serialised bytes.
1774
:param expected_revision_id: The revision ID we think this inventory is
1776
:return: A CHKInventory
1778
lines = bytes.split('\n')
1780
raise AssertionError('bytes to deserialize must end with an eol')
1782
if lines[0] != 'chkinventory:':
1783
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1785
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1786
'parent_id_basename_to_file_id',
1788
for line in lines[1:]:
1789
key, value = line.split(': ', 1)
1790
if key not in allowed_keys:
1791
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1794
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1797
revision_id = intern(info['revision_id'])
1798
root_id = intern(info['root_id'])
1799
search_key_name = intern(info.get('search_key_name', 'plain'))
1800
parent_id_basename_to_file_id = intern(info.get(
1801
'parent_id_basename_to_file_id', None))
1802
if not parent_id_basename_to_file_id.startswith('sha1:'):
1803
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1804
' key not %r' % (parent_id_basename_to_file_id,))
1805
id_to_entry = info['id_to_entry']
1806
if not id_to_entry.startswith('sha1:'):
1807
raise ValueError('id_to_entry should be a sha1'
1808
' key not %r' % (id_to_entry,))
1810
result = CHKInventory(search_key_name)
1811
result.revision_id = revision_id
1812
result.root_id = root_id
1813
search_key_func = chk_map.search_key_registry.get(
1814
result._search_key_name)
1815
if parent_id_basename_to_file_id is not None:
1816
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1817
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1818
search_key_func=search_key_func)
1820
result.parent_id_basename_to_file_id = None
1822
result.id_to_entry = chk_map.CHKMap(chk_store,
1823
StaticTuple(id_to_entry,),
1824
search_key_func=search_key_func)
1825
if (result.revision_id,) != expected_revision_id:
1826
raise ValueError("Mismatched revision id and expected: %r, %r" %
1827
(result.revision_id, expected_revision_id))
1831
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1832
"""Create a CHKInventory from an existing inventory.
1834
The content of inventory is copied into the chk_store, and a
1835
CHKInventory referencing that is returned.
1837
:param chk_store: A CHK capable VersionedFiles instance.
1838
:param inventory: The inventory to copy.
1839
:param maximum_size: The CHKMap node size limit.
1840
:param search_key_name: The identifier for the search key function
1842
result = klass(search_key_name)
1843
result.revision_id = inventory.revision_id
1844
result.root_id = inventory.root.file_id
1846
entry_to_bytes = result._entry_to_bytes
1847
parent_id_basename_key = result._parent_id_basename_key
1848
id_to_entry_dict = {}
1849
parent_id_basename_dict = {}
1850
for path, entry in inventory.iter_entries():
1851
key = StaticTuple(entry.file_id,).intern()
1852
id_to_entry_dict[key] = entry_to_bytes(entry)
1853
p_id_key = parent_id_basename_key(entry)
1854
parent_id_basename_dict[p_id_key] = entry.file_id
1856
result._populate_from_dicts(chk_store, id_to_entry_dict,
1857
parent_id_basename_dict, maximum_size=maximum_size)
1860
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1861
parent_id_basename_dict, maximum_size):
1862
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1863
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1864
maximum_size=maximum_size, key_width=1,
1865
search_key_func=search_key_func)
1866
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1868
root_key = chk_map.CHKMap.from_dict(chk_store,
1869
parent_id_basename_dict,
1870
maximum_size=maximum_size, key_width=2,
1871
search_key_func=search_key_func)
1872
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1873
root_key, search_key_func)
1875
def _parent_id_basename_key(self, entry):
1876
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1877
if entry.parent_id is not None:
1878
parent_id = entry.parent_id
1881
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1883
def __getitem__(self, file_id):
1884
"""map a single file_id -> InventoryEntry."""
1886
raise errors.NoSuchId(self, file_id)
1887
result = self._fileid_to_entry_cache.get(file_id, None)
1888
if result is not None:
1891
return self._bytes_to_entry(
1892
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1893
except StopIteration:
1894
# really we're passing an inventory, not a tree...
1895
raise errors.NoSuchId(self, file_id)
1897
def _getitems(self, file_ids):
1898
"""Similar to __getitem__, but lets you query for multiple.
1900
The returned order is undefined. And currently if an item doesn't
1901
exist, it isn't included in the output.
1905
for file_id in file_ids:
1906
entry = self._fileid_to_entry_cache.get(file_id, None)
1908
remaining.append(file_id)
1910
result.append(entry)
1911
file_keys = [StaticTuple(f,).intern() for f in remaining]
1912
for file_key, value in self.id_to_entry.iteritems(file_keys):
1913
entry = self._bytes_to_entry(value)
1914
result.append(entry)
1915
self._fileid_to_entry_cache[entry.file_id] = entry
1918
def has_id(self, file_id):
1919
# Perhaps have an explicit 'contains' method on CHKMap ?
1920
if self._fileid_to_entry_cache.get(file_id, None) is not None:
1923
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
1925
def is_root(self, file_id):
1926
return file_id == self.root_id
1928
def _iter_file_id_parents(self, file_id):
1929
"""Yield the parents of file_id up to the root."""
1930
while file_id is not None:
1934
raise errors.NoSuchId(tree=self, file_id=file_id)
1936
file_id = ie.parent_id
1939
"""Iterate over all file-ids."""
1940
for key, _ in self.id_to_entry.iteritems():
1943
def iter_just_entries(self):
1944
"""Iterate over all entries.
1946
Unlike iter_entries(), just the entries are returned (not (path, ie))
1947
and the order of entries is undefined.
1949
XXX: We may not want to merge this into bzr.dev.
1951
for key, entry in self.id_to_entry.iteritems():
1953
ie = self._fileid_to_entry_cache.get(file_id, None)
1955
ie = self._bytes_to_entry(entry)
1956
self._fileid_to_entry_cache[file_id] = ie
1959
def _preload_cache(self):
1960
"""Make sure all file-ids are in _fileid_to_entry_cache"""
1961
if self._fully_cached:
1962
return # No need to do it again
1963
# The optimal sort order is to use iteritems() directly
1964
cache = self._fileid_to_entry_cache
1965
for key, entry in self.id_to_entry.iteritems():
1967
if file_id not in cache:
1968
ie = self._bytes_to_entry(entry)
1972
last_parent_id = last_parent_ie = None
1973
pid_items = self.parent_id_basename_to_file_id.iteritems()
1974
for key, child_file_id in pid_items:
1975
if key == ('', ''): # This is the root
1976
if child_file_id != self.root_id:
1977
raise ValueError('Data inconsistency detected.'
1978
' We expected data with key ("","") to match'
1979
' the root id, but %s != %s'
1980
% (child_file_id, self.root_id))
1982
parent_id, basename = key
1983
ie = cache[child_file_id]
1984
if parent_id == last_parent_id:
1985
parent_ie = last_parent_ie
1987
parent_ie = cache[parent_id]
1988
if parent_ie.kind != 'directory':
1989
raise ValueError('Data inconsistency detected.'
1990
' An entry in the parent_id_basename_to_file_id map'
1991
' has parent_id {%s} but the kind of that object'
1992
' is %r not "directory"' % (parent_id, parent_ie.kind))
1993
if parent_ie._children is None:
1994
parent_ie._children = {}
1995
basename = basename.decode('utf-8')
1996
if basename in parent_ie._children:
1997
existing_ie = parent_ie._children[basename]
1998
if existing_ie != ie:
1999
raise ValueError('Data inconsistency detected.'
2000
' Two entries with basename %r were found'
2001
' in the parent entry {%s}'
2002
% (basename, parent_id))
2003
if basename != ie.name:
2004
raise ValueError('Data inconsistency detected.'
2005
' In the parent_id_basename_to_file_id map, file_id'
2006
' {%s} is listed as having basename %r, but in the'
2007
' id_to_entry map it is %r'
2008
% (child_file_id, basename, ie.name))
2009
parent_ie._children[basename] = ie
2010
self._fully_cached = True
2012
def iter_changes(self, basis):
2013
"""Generate a Tree.iter_changes change list between this and basis.
2015
:param basis: Another CHKInventory.
2016
:return: An iterator over the changes between self and basis, as per
2017
tree.iter_changes().
2019
# We want: (file_id, (path_in_source, path_in_target),
2020
# changed_content, versioned, parent, name, kind,
2022
for key, basis_value, self_value in \
2023
self.id_to_entry.iter_changes(basis.id_to_entry):
2025
if basis_value is not None:
2026
basis_entry = basis._bytes_to_entry(basis_value)
2027
path_in_source = basis.id2path(file_id)
2028
basis_parent = basis_entry.parent_id
2029
basis_name = basis_entry.name
2030
basis_executable = basis_entry.executable
2032
path_in_source = None
2035
basis_executable = None
2036
if self_value is not None:
2037
self_entry = self._bytes_to_entry(self_value)
2038
path_in_target = self.id2path(file_id)
2039
self_parent = self_entry.parent_id
2040
self_name = self_entry.name
2041
self_executable = self_entry.executable
2043
path_in_target = None
2046
self_executable = None
2047
if basis_value is None:
2049
kind = (None, self_entry.kind)
2050
versioned = (False, True)
2051
elif self_value is None:
2053
kind = (basis_entry.kind, None)
2054
versioned = (True, False)
2056
kind = (basis_entry.kind, self_entry.kind)
2057
versioned = (True, True)
2058
changed_content = False
2059
if kind[0] != kind[1]:
2060
changed_content = True
2061
elif kind[0] == 'file':
2062
if (self_entry.text_size != basis_entry.text_size or
2063
self_entry.text_sha1 != basis_entry.text_sha1):
2064
changed_content = True
2065
elif kind[0] == 'symlink':
2066
if self_entry.symlink_target != basis_entry.symlink_target:
2067
changed_content = True
2068
elif kind[0] == 'tree-reference':
2069
if (self_entry.reference_revision !=
2070
basis_entry.reference_revision):
2071
changed_content = True
2072
parent = (basis_parent, self_parent)
2073
name = (basis_name, self_name)
2074
executable = (basis_executable, self_executable)
2075
if (not changed_content
2076
and parent[0] == parent[1]
2077
and name[0] == name[1]
2078
and executable[0] == executable[1]):
2079
# Could happen when only the revision changed for a directory
2082
yield (file_id, (path_in_source, path_in_target), changed_content,
2083
versioned, parent, name, kind, executable)
2086
"""Return the number of entries in the inventory."""
2087
return len(self.id_to_entry)
2089
def _make_delta(self, old):
2090
"""Make an inventory delta from two inventories."""
2091
if type(old) != CHKInventory:
2092
return CommonInventory._make_delta(self, old)
2094
for key, old_value, self_value in \
2095
self.id_to_entry.iter_changes(old.id_to_entry):
2097
if old_value is not None:
2098
old_path = old.id2path(file_id)
2101
if self_value is not None:
2102
entry = self._bytes_to_entry(self_value)
2103
self._fileid_to_entry_cache[file_id] = entry
2104
new_path = self.id2path(file_id)
2108
delta.append((old_path, new_path, file_id, entry))
2111
def path2id(self, relpath):
2112
"""See CommonInventory.path2id()."""
2113
# TODO: perhaps support negative hits?
2114
result = self._path_to_fileid_cache.get(relpath, None)
2115
if result is not None:
2117
if isinstance(relpath, basestring):
2118
names = osutils.splitpath(relpath)
2121
current_id = self.root_id
2122
if current_id is None:
2124
parent_id_index = self.parent_id_basename_to_file_id
2126
for basename in names:
2127
if cur_path is None:
2130
cur_path = cur_path + '/' + basename
2131
basename_utf8 = basename.encode('utf8')
2132
file_id = self._path_to_fileid_cache.get(cur_path, None)
2134
key_filter = [StaticTuple(current_id, basename_utf8)]
2135
items = parent_id_index.iteritems(key_filter)
2136
for (parent_id, name_utf8), file_id in items:
2137
if parent_id != current_id or name_utf8 != basename_utf8:
2138
raise errors.BzrError("corrupt inventory lookup! "
2139
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2144
self._path_to_fileid_cache[cur_path] = file_id
2145
current_id = file_id
2149
"""Serialise the inventory to lines."""
2150
lines = ["chkinventory:\n"]
2151
if self._search_key_name != 'plain':
2152
# custom ordering grouping things that don't change together
2153
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2154
lines.append("root_id: %s\n" % self.root_id)
2155
lines.append('parent_id_basename_to_file_id: %s\n' %
2156
(self.parent_id_basename_to_file_id.key()[0],))
2157
lines.append("revision_id: %s\n" % self.revision_id)
2158
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2160
lines.append("revision_id: %s\n" % self.revision_id)
2161
lines.append("root_id: %s\n" % self.root_id)
2162
if self.parent_id_basename_to_file_id is not None:
2163
lines.append('parent_id_basename_to_file_id: %s\n' %
2164
(self.parent_id_basename_to_file_id.key()[0],))
2165
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2170
"""Get the root entry."""
2171
return self[self.root_id]
2174
class CHKInventoryDirectory(InventoryDirectory):
2175
"""A directory in an inventory."""
2177
__slots__ = ['_children', '_chk_inventory']
2179
def __init__(self, file_id, name, parent_id, chk_inventory):
2180
# Don't call InventoryDirectory.__init__ - it isn't right for this
2182
InventoryEntry.__init__(self, file_id, name, parent_id)
2183
self._children = None
2184
self._chk_inventory = chk_inventory
2188
"""Access the list of children of this directory.
2190
With a parent_id_basename_to_file_id index, loads all the children,
2191
without loads the entire index. Without is bad. A more sophisticated
2192
proxy object might be nice, to allow partial loading of children as
2193
well when specific names are accessed. (So path traversal can be
2194
written in the obvious way but not examine siblings.).
2196
if self._children is not None:
2197
return self._children
2198
# No longer supported
2199
if self._chk_inventory.parent_id_basename_to_file_id is None:
2200
raise AssertionError("Inventories without"
2201
" parent_id_basename_to_file_id are no longer supported")
2203
# XXX: Todo - use proxy objects for the children rather than loading
2204
# all when the attribute is referenced.
2205
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2207
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2208
key_filter=[StaticTuple(self.file_id,)]):
2209
child_keys.add(StaticTuple(file_id,))
2211
for file_id_key in child_keys:
2212
entry = self._chk_inventory._fileid_to_entry_cache.get(
2213
file_id_key[0], None)
2214
if entry is not None:
2215
result[entry.name] = entry
2216
cached.add(file_id_key)
2217
child_keys.difference_update(cached)
2218
# populate; todo: do by name
2219
id_to_entry = self._chk_inventory.id_to_entry
2220
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2221
entry = self._chk_inventory._bytes_to_entry(bytes)
2222
result[entry.name] = entry
2223
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2224
self._children = result
2228
'directory': InventoryDirectory,
2229
'file': InventoryFile,
2230
'symlink': InventoryLink,
2231
'tree-reference': TreeReference
2234
def make_entry(kind, name, parent_id, file_id=None):
2235
"""Create an inventory entry.
2237
:param kind: the type of inventory entry to create.
2238
:param name: the basename of the entry.
2239
:param parent_id: the parent_id of the entry.
2240
:param file_id: the file_id to use. if None, one will be created.
2243
file_id = generate_ids.gen_file_id(name)
2244
name = ensure_normalized_name(name)
2246
factory = entry_factory[kind]
2248
raise errors.BadFileKindError(name, kind)
2249
return factory(file_id, name, parent_id)
2252
def ensure_normalized_name(name):
2255
:raises InvalidNormalization: When name is not normalized, and cannot be
2256
accessed on this platform by the normalized path.
2257
:return: The NFC normalised version of name.
2259
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2260
# keep them synchronised.
2261
# we dont import normalized_filename directly because we want to be
2262
# able to change the implementation at runtime for tests.
2263
norm_name, can_access = osutils.normalized_filename(name)
2264
if norm_name != name:
2268
# TODO: jam 20060701 This would probably be more useful
2269
# if the error was raised with the full path
2270
raise errors.InvalidNormalization(name)
2274
_NAME_RE = lazy_regex.lazy_compile(r'^[^/\\]+$')
581
2276
def is_valid_name(name):
584
_NAME_RE = re.compile(r'^[^/\\]+$')
586
2277
return bool(_NAME_RE.match(name))
2280
def _check_delta_unique_ids(delta):
2281
"""Decorate a delta and check that the file ids in it are unique.
2283
:return: A generator over delta.
2287
length = len(ids) + 1
2289
if len(ids) != length:
2290
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2295
def _check_delta_unique_new_paths(delta):
2296
"""Decorate a delta and check that the new paths in it are unique.
2298
:return: A generator over delta.
2302
length = len(paths) + 1
2304
if path is not None:
2306
if len(paths) != length:
2307
raise errors.InconsistentDelta(path, item[2], "repeated path")
2311
def _check_delta_unique_old_paths(delta):
2312
"""Decorate a delta and check that the old paths in it are unique.
2314
:return: A generator over delta.
2318
length = len(paths) + 1
2320
if path is not None:
2322
if len(paths) != length:
2323
raise errors.InconsistentDelta(path, item[2], "repeated path")
2327
def _check_delta_ids_are_valid(delta):
2328
"""Decorate a delta and check that the ids in it are valid.
2330
:return: A generator over delta.
2335
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2336
"entry with file_id None %r" % entry)
2337
if type(item[2]) != str:
2338
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2339
"entry with non bytes file_id %r" % entry)
2343
def _check_delta_ids_match_entry(delta):
2344
"""Decorate a delta and check that the ids in it match the entry.file_id.
2346
:return: A generator over delta.
2350
if entry is not None:
2351
if entry.file_id != item[2]:
2352
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2353
"mismatched id with %r" % entry)
2357
def _check_delta_new_path_entry_both_or_None(delta):
2358
"""Decorate a delta and check that the new_path and entry are paired.
2360
:return: A generator over delta.
2365
if new_path is None and entry is not None:
2366
raise errors.InconsistentDelta(item[0], item[1],
2367
"Entry with no new_path")
2368
if new_path is not None and entry is None:
2369
raise errors.InconsistentDelta(new_path, item[1],
2370
"new_path with no entry")
2374
def mutable_inventory_from_tree(tree):
2375
"""Create a new inventory that has the same contents as a specified tree.
2377
:param tree: Revision tree to create inventory from
2379
entries = tree.iter_entries_by_dir()
2380
inv = Inventory(None, tree.get_revision_id())
2381
for path, inv_entry in entries:
2382
inv.add(inv_entry.copy())