83
121
src/wibble/wibble.c
84
122
>>> i.id2path('2326')
85
123
'src/wibble/wibble.c'
87
TODO: Maybe also keep the full path of the entry, and the children?
88
But those depend on its position within a particular inventory, and
89
it would be nice not to need to hold the backpointer here.
92
# TODO: split InventoryEntry into subclasses for files,
93
# directories, etc etc.
126
# Constants returned by describe_change()
128
# TODO: These should probably move to some kind of FileChangeDescription
129
# class; that's like what's inside a TreeDelta but we want to be able to
130
# generate them just for one file at a time.
132
MODIFIED_AND_RENAMED = 'modified and renamed'
134
__slots__ = ['file_id', 'revision', 'parent_id', 'name']
136
# Attributes that all InventoryEntry instances are expected to have, but
137
# that don't vary for all kinds of entry. (e.g. symlink_target is only
138
# relevant to InventoryLink, so there's no reason to make every
139
# InventoryFile instance allocate space to hold a value for it.)
140
# Attributes that only vary for files: executable, text_sha1, text_size,
98
def __init__(self, file_id, name, kind, parent_id, text_id=None):
146
# Attributes that only vary for symlinks: symlink_target
147
symlink_target = None
148
# Attributes that only vary for tree-references: reference_revision
149
reference_revision = None
152
def detect_changes(self, old_entry):
153
"""Return a (text_modified, meta_modified) from this to old_entry.
155
_read_tree_state must have been called on self and old_entry prior to
156
calling detect_changes.
160
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
161
output_to, reverse=False):
162
"""Perform a diff between two entries of the same kind."""
164
def parent_candidates(self, previous_inventories):
165
"""Find possible per-file graph parents.
167
This is currently defined by:
168
- Select the last changed revision in the parent inventory.
169
- Do deal with a short lived bug in bzr 0.8's development two entries
170
that have the same last changed but different 'x' bit settings are
173
# revision:ie mapping for each ie found in previous_inventories.
175
# identify candidate head revision ids.
176
for inv in previous_inventories:
177
if inv.has_id(self.file_id):
178
ie = inv[self.file_id]
179
if ie.revision in candidates:
180
# same revision value in two different inventories:
181
# correct possible inconsistencies:
182
# * there was a bug in revision updates with 'x' bit
185
if candidates[ie.revision].executable != ie.executable:
186
candidates[ie.revision].executable = False
187
ie.executable = False
188
except AttributeError:
191
# add this revision as a candidate.
192
candidates[ie.revision] = ie
196
"""Return true if the object this entry represents has textual data.
198
Note that textual data includes binary content.
200
Also note that all entries get weave files created for them.
201
This attribute is primarily used when upgrading from old trees that
202
did not have the weave index for all inventory entries.
206
def __init__(self, file_id, name, parent_id):
99
207
"""Create an InventoryEntry
101
209
The filename must be a single component, relative to the
102
210
parent directory; it cannot be a whole path or relative name.
104
>>> e = InventoryEntry('123', 'hello.c', 'file', ROOT_ID)
212
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
109
>>> e = InventoryEntry('123', 'src/hello.c', 'file', ROOT_ID)
217
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
110
218
Traceback (most recent call last):
111
BzrCheckError: InventoryEntry name 'src/hello.c' is invalid
219
InvalidEntryName: Invalid entry name: src/hello.c
113
221
if '/' in name or '\\' in name:
114
raise BzrCheckError('InventoryEntry name %r is invalid' % name)
222
raise errors.InvalidEntryName(name=name)
116
223
self.file_id = file_id
119
self.text_id = text_id
120
226
self.parent_id = parent_id
121
if kind == 'directory':
126
raise BzrError("unhandled entry kind %r" % kind)
228
def kind_character(self):
229
"""Return a short kind indicator useful for appending to names."""
230
raise errors.BzrError('unknown kind %r' % self.kind)
232
known_kinds = ('file', 'directory', 'symlink')
130
234
def sorted_children(self):
131
l = self.children.items()
137
other = InventoryEntry(self.file_id, self.name, self.kind,
138
self.parent_id, text_id=self.text_id)
139
other.text_sha1 = self.text_sha1
140
other.text_size = self.text_size
235
return sorted(self.children.items())
238
def versionable_kind(kind):
239
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
241
def check(self, checker, rev_id, inv):
242
"""Check this inventory entry is intact.
244
This is a template method, override _check for kind specific
247
:param checker: Check object providing context for the checks;
248
can be used to find out what parts of the repository have already
250
:param rev_id: Revision id from which this InventoryEntry was loaded.
251
Not necessarily the last-changed revision for this file.
252
:param inv: Inventory from which the entry was loaded.
254
if self.parent_id is not None:
255
if not inv.has_id(self.parent_id):
256
raise errors.BzrCheckError(
257
'missing parent {%s} in inventory for revision {%s}' % (
258
self.parent_id, rev_id))
259
checker._add_entry_to_text_key_references(inv, self)
260
self._check(checker, rev_id)
262
def _check(self, checker, rev_id):
263
"""Check this inventory entry for kind specific errors."""
264
checker._report_items.append(
265
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
268
"""Clone this inventory entry."""
269
raise NotImplementedError
272
def describe_change(old_entry, new_entry):
273
"""Describe the change between old_entry and this.
275
This smells of being an InterInventoryEntry situation, but as its
276
the first one, we're making it a static method for now.
278
An entry with a different parent, or different name is considered
279
to be renamed. Reparenting is an internal detail.
280
Note that renaming the parent does not trigger a rename for the
283
# TODO: Perhaps return an object rather than just a string
284
if old_entry is new_entry:
285
# also the case of both being None
287
elif old_entry is None:
289
elif new_entry is None:
291
if old_entry.kind != new_entry.kind:
293
text_modified, meta_modified = new_entry.detect_changes(old_entry)
294
if text_modified or meta_modified:
298
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
299
if old_entry.parent_id != new_entry.parent_id:
301
elif old_entry.name != new_entry.name:
305
if renamed and not modified:
306
return InventoryEntry.RENAMED
307
if modified and not renamed:
309
if modified and renamed:
310
return InventoryEntry.MODIFIED_AND_RENAMED
314
return ("%s(%r, %r, parent_id=%r, revision=%r)"
315
% (self.__class__.__name__,
321
def __eq__(self, other):
323
# For the case when objects are cached
325
if not isinstance(other, InventoryEntry):
326
return NotImplemented
328
return ((self.file_id == other.file_id)
329
and (self.name == other.name)
330
and (other.symlink_target == self.symlink_target)
331
and (self.text_sha1 == other.text_sha1)
332
and (self.text_size == other.text_size)
333
and (self.text_id == other.text_id)
334
and (self.parent_id == other.parent_id)
335
and (self.kind == other.kind)
336
and (self.revision == other.revision)
337
and (self.executable == other.executable)
338
and (self.reference_revision == other.reference_revision)
341
def __ne__(self, other):
342
return not (self == other)
345
raise ValueError('not hashable')
347
def _unchanged(self, previous_ie):
348
"""Has this entry changed relative to previous_ie.
350
This method should be overridden in child classes.
353
# different inv parent
354
if previous_ie.parent_id != self.parent_id:
357
elif previous_ie.name != self.name:
359
elif previous_ie.kind != self.kind:
363
def _read_tree_state(self, path, work_tree):
364
"""Populate fields in the inventory entry from the given tree.
366
Note that this should be modified to be a noop on virtual trees
367
as all entries created there are prepopulated.
369
# TODO: Rather than running this manually, we should check the
370
# working sha1 and other expensive properties when they're
371
# first requested, or preload them if they're already known
372
pass # nothing to do by default
374
def _forget_tree_state(self):
378
class InventoryDirectory(InventoryEntry):
379
"""A directory in an inventory."""
381
__slots__ = ['children']
385
def _check(self, checker, rev_id):
386
"""See InventoryEntry._check"""
387
# In non rich root repositories we do not expect a file graph for the
389
if self.name == '' and not checker.rich_roots:
391
# Directories are stored as an empty file, but the file should exist
392
# to provide a per-fileid log. The hash of every directory content is
393
# "da..." below (the sha1sum of '').
394
checker.add_pending_item(rev_id,
395
('texts', self.file_id, self.revision), 'text',
396
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
399
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
400
other.revision = self.revision
141
401
# note that children are *not* copied; they're pulled across when
142
402
# others are added
147
return ("%s(%r, %r, kind=%r, parent_id=%r)"
148
% (self.__class__.__name__,
155
def to_element(self):
156
"""Convert to XML element"""
157
from bzrlib.xml import Element
161
e.set('name', self.name)
162
e.set('file_id', self.file_id)
163
e.set('kind', self.kind)
165
if self.text_size != None:
166
e.set('text_size', '%d' % self.text_size)
168
for f in ['text_id', 'text_sha1']:
173
# to be conservative, we don't externalize the root pointers
174
# for now, leaving them as null in the xml form. in a future
175
# version it will be implied by nested elements.
176
if self.parent_id != ROOT_ID:
177
assert isinstance(self.parent_id, basestring)
178
e.set('parent_id', self.parent_id)
185
def from_element(cls, elt):
186
assert elt.tag == 'entry'
188
## original format inventories don't have a parent_id for
189
## nodes in the root directory, but it's cleaner to use one
191
parent_id = elt.get('parent_id')
192
if parent_id == None:
195
self = cls(elt.get('file_id'), elt.get('name'), elt.get('kind'), parent_id)
196
self.text_id = elt.get('text_id')
197
self.text_sha1 = elt.get('text_sha1')
199
## mutter("read inventoryentry: %r" % (elt.attrib))
201
v = elt.get('text_size')
202
self.text_size = v and int(v)
207
from_element = classmethod(from_element)
209
def __eq__(self, other):
210
if not isinstance(other, InventoryEntry):
211
return NotImplemented
213
return (self.file_id == other.file_id) \
214
and (self.name == other.name) \
215
and (self.text_sha1 == other.text_sha1) \
216
and (self.text_size == other.text_size) \
217
and (self.text_id == other.text_id) \
218
and (self.parent_id == other.parent_id) \
219
and (self.kind == other.kind)
222
def __ne__(self, other):
223
return not (self == other)
226
raise ValueError('not hashable')
230
class RootEntry(InventoryEntry):
231
def __init__(self, file_id):
232
self.file_id = file_id
405
def __init__(self, file_id, name, parent_id):
406
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
233
407
self.children = {}
234
self.kind = 'root_directory'
235
self.parent_id = None
238
def __eq__(self, other):
239
if not isinstance(other, RootEntry):
240
return NotImplemented
242
return (self.file_id == other.file_id) \
243
and (self.children == other.children)
247
class Inventory(object):
248
"""Inventory of versioned files in a tree.
250
This describes which file_id is present at each point in the tree,
251
and possibly the SHA-1 or other information about the file.
409
def kind_character(self):
410
"""See InventoryEntry.kind_character."""
414
class InventoryFile(InventoryEntry):
415
"""A file in an inventory."""
417
__slots__ = ['text_sha1', 'text_size', 'text_id', 'executable']
421
def __init__(self, file_id, name, parent_id):
422
super(InventoryFile, self).__init__(file_id, name, parent_id)
423
self.text_sha1 = None
424
self.text_size = None
426
self.executable = False
428
def _check(self, checker, tree_revision_id):
429
"""See InventoryEntry._check"""
430
# TODO: check size too.
431
checker.add_pending_item(tree_revision_id,
432
('texts', self.file_id, self.revision), 'text',
434
if self.text_size is None:
435
checker._report_items.append(
436
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
440
other = InventoryFile(self.file_id, self.name, self.parent_id)
441
other.executable = self.executable
442
other.text_id = self.text_id
443
other.text_sha1 = self.text_sha1
444
other.text_size = self.text_size
445
other.revision = self.revision
448
def detect_changes(self, old_entry):
449
"""See InventoryEntry.detect_changes."""
450
text_modified = (self.text_sha1 != old_entry.text_sha1)
451
meta_modified = (self.executable != old_entry.executable)
452
return text_modified, meta_modified
454
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
455
output_to, reverse=False):
456
"""See InventoryEntry._diff."""
457
from bzrlib.diff import DiffText
458
from_file_id = self.file_id
460
to_file_id = to_entry.file_id
464
to_file_id, from_file_id = from_file_id, to_file_id
465
tree, to_tree = to_tree, tree
466
from_label, to_label = to_label, from_label
467
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
469
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
472
"""See InventoryEntry.has_text."""
475
def kind_character(self):
476
"""See InventoryEntry.kind_character."""
479
def _read_tree_state(self, path, work_tree):
480
"""See InventoryEntry._read_tree_state."""
481
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
482
# FIXME: 20050930 probe for the text size when getting sha1
483
# in _read_tree_state
484
self.executable = work_tree.is_executable(self.file_id, path=path)
487
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
488
% (self.__class__.__name__,
496
def _forget_tree_state(self):
497
self.text_sha1 = None
499
def _unchanged(self, previous_ie):
500
"""See InventoryEntry._unchanged."""
501
compatible = super(InventoryFile, self)._unchanged(previous_ie)
502
if self.text_sha1 != previous_ie.text_sha1:
505
# FIXME: 20050930 probe for the text size when getting sha1
506
# in _read_tree_state
507
self.text_size = previous_ie.text_size
508
if self.executable != previous_ie.executable:
513
class InventoryLink(InventoryEntry):
514
"""A file in an inventory."""
516
__slots__ = ['symlink_target']
520
def __init__(self, file_id, name, parent_id):
521
super(InventoryLink, self).__init__(file_id, name, parent_id)
522
self.symlink_target = None
524
def _check(self, checker, tree_revision_id):
525
"""See InventoryEntry._check"""
526
if self.symlink_target is None:
527
checker._report_items.append(
528
'symlink {%s} has no target in revision {%s}'
529
% (self.file_id, tree_revision_id))
530
# Symlinks are stored as ''
531
checker.add_pending_item(tree_revision_id,
532
('texts', self.file_id, self.revision), 'text',
533
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
536
other = InventoryLink(self.file_id, self.name, self.parent_id)
537
other.symlink_target = self.symlink_target
538
other.revision = self.revision
541
def detect_changes(self, old_entry):
542
"""See InventoryEntry.detect_changes."""
543
# FIXME: which _modified field should we use ? RBC 20051003
544
text_modified = (self.symlink_target != old_entry.symlink_target)
546
trace.mutter(" symlink target changed")
547
meta_modified = False
548
return text_modified, meta_modified
550
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
551
output_to, reverse=False):
552
"""See InventoryEntry._diff."""
553
from bzrlib.diff import DiffSymlink
554
old_target = self.symlink_target
555
if to_entry is not None:
556
new_target = to_entry.symlink_target
565
new_target, old_target = old_target, new_target
566
differ = DiffSymlink(old_tree, new_tree, output_to)
567
return differ.diff_symlink(old_target, new_target)
569
def kind_character(self):
570
"""See InventoryEntry.kind_character."""
573
def _read_tree_state(self, path, work_tree):
574
"""See InventoryEntry._read_tree_state."""
575
self.symlink_target = work_tree.get_symlink_target(self.file_id)
577
def _forget_tree_state(self):
578
self.symlink_target = None
580
def _unchanged(self, previous_ie):
581
"""See InventoryEntry._unchanged."""
582
compatible = super(InventoryLink, self)._unchanged(previous_ie)
583
if self.symlink_target != previous_ie.symlink_target:
588
class TreeReference(InventoryEntry):
590
__slots__ = ['reference_revision']
592
kind = 'tree-reference'
594
def __init__(self, file_id, name, parent_id, revision=None,
595
reference_revision=None):
596
InventoryEntry.__init__(self, file_id, name, parent_id)
597
self.revision = revision
598
self.reference_revision = reference_revision
601
return TreeReference(self.file_id, self.name, self.parent_id,
602
self.revision, self.reference_revision)
604
def _read_tree_state(self, path, work_tree):
605
"""Populate fields in the inventory entry from the given tree.
607
self.reference_revision = work_tree.get_reference_revision(
610
def _forget_tree_state(self):
611
self.reference_revision = None
613
def _unchanged(self, previous_ie):
614
"""See InventoryEntry._unchanged."""
615
compatible = super(TreeReference, self)._unchanged(previous_ie)
616
if self.reference_revision != previous_ie.reference_revision:
621
class CommonInventory(object):
622
"""Basic inventory logic, defined in terms of primitives like has_id.
624
An inventory is the metadata about the contents of a tree.
626
This is broadly a map from file_id to entries such as directories, files,
627
symlinks and tree references. Each entry maintains its own metadata like
628
SHA1 and length for files, or children for a directory.
252
630
Entries can be looked up either by path or by file_id.
254
The inventory represents a typical unix file tree, with
255
directories containing files and subdirectories. We never store
256
the full path to a file, because renaming a directory implicitly
257
moves all of its contents. This class internally maintains a
632
InventoryEntry objects must not be modified after they are
633
inserted, other than through the Inventory API.
636
def has_filename(self, filename):
637
return bool(self.path2id(filename))
639
def id2path(self, file_id):
640
"""Return as a string the path to file_id.
643
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
644
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
645
>>> print i.id2path('foo-id')
648
:raises NoSuchId: If file_id is not present in the inventory.
650
# get all names, skipping root
651
return '/'.join(reversed(
652
[parent.name for parent in
653
self._iter_file_id_parents(file_id)][:-1]))
655
def iter_entries(self, from_dir=None, recursive=True):
656
"""Return (path, entry) pairs, in order by name.
658
:param from_dir: if None, start from the root,
659
otherwise start from this directory (either file-id or entry)
660
:param recursive: recurse into directories or not
663
if self.root is None:
667
elif isinstance(from_dir, basestring):
668
from_dir = self[from_dir]
670
# unrolling the recursive called changed the time from
671
# 440ms/663ms (inline/total) to 116ms/116ms
672
children = from_dir.children.items()
675
for name, ie in children:
678
children = collections.deque(children)
679
stack = [(u'', children)]
681
from_dir_relpath, children = stack[-1]
684
name, ie = children.popleft()
686
# we know that from_dir_relpath never ends in a slash
687
# and 'f' doesn't begin with one, we can do a string op, rather
688
# than the checks of pathjoin(), though this means that all paths
690
path = from_dir_relpath + '/' + name
694
if ie.kind != 'directory':
697
# But do this child first
698
new_children = ie.children.items()
700
new_children = collections.deque(new_children)
701
stack.append((path, new_children))
702
# Break out of inner loop, so that we start outer loop with child
705
# if we finished all children, pop it off the stack
708
def _preload_cache(self):
709
"""Populate any caches, we are about to access all items.
711
The default implementation does nothing, because CommonInventory doesn't
716
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
717
yield_parents=False):
718
"""Iterate over the entries in a directory first order.
720
This returns all entries for a directory before returning
721
the entries for children of a directory. This is not
722
lexicographically sorted order, and is a hybrid between
723
depth-first and breadth-first.
725
:param yield_parents: If True, yield the parents from the root leading
726
down to specific_file_ids that have been requested. This has no
727
impact if specific_file_ids is None.
728
:return: This yields (path, entry) pairs
730
if specific_file_ids and not isinstance(specific_file_ids, set):
731
specific_file_ids = set(specific_file_ids)
732
# TODO? Perhaps this should return the from_dir so that the root is
733
# yielded? or maybe an option?
734
if from_dir is None and specific_file_ids is None:
735
# They are iterating from the root, and have not specified any
736
# specific entries to look at. All current callers fully consume the
737
# iterator, so we can safely assume we are accessing all entries
738
self._preload_cache()
740
if self.root is None:
742
# Optimize a common case
743
if (not yield_parents and specific_file_ids is not None and
744
len(specific_file_ids) == 1):
745
file_id = list(specific_file_ids)[0]
746
if self.has_id(file_id):
747
yield self.id2path(file_id), self[file_id]
750
if (specific_file_ids is None or yield_parents or
751
self.root.file_id in specific_file_ids):
753
elif isinstance(from_dir, basestring):
754
from_dir = self[from_dir]
756
if specific_file_ids is not None:
757
# TODO: jam 20070302 This could really be done as a loop rather
758
# than a bunch of recursive calls.
761
def add_ancestors(file_id):
762
if not byid.has_id(file_id):
764
parent_id = byid[file_id].parent_id
765
if parent_id is None:
767
if parent_id not in parents:
768
parents.add(parent_id)
769
add_ancestors(parent_id)
770
for file_id in specific_file_ids:
771
add_ancestors(file_id)
775
stack = [(u'', from_dir)]
777
cur_relpath, cur_dir = stack.pop()
780
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
782
child_relpath = cur_relpath + child_name
784
if (specific_file_ids is None or
785
child_ie.file_id in specific_file_ids or
786
(yield_parents and child_ie.file_id in parents)):
787
yield child_relpath, child_ie
789
if child_ie.kind == 'directory':
790
if parents is None or child_ie.file_id in parents:
791
child_dirs.append((child_relpath+'/', child_ie))
792
stack.extend(reversed(child_dirs))
794
def _make_delta(self, old):
795
"""Make an inventory delta from two inventories."""
798
adds = new_ids - old_ids
799
deletes = old_ids - new_ids
800
common = old_ids.intersection(new_ids)
802
for file_id in deletes:
803
delta.append((old.id2path(file_id), None, file_id, None))
805
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
806
for file_id in common:
807
if old[file_id] != self[file_id]:
808
delta.append((old.id2path(file_id), self.id2path(file_id),
809
file_id, self[file_id]))
812
def make_entry(self, kind, name, parent_id, file_id=None):
813
"""Simple thunk to bzrlib.inventory.make_entry."""
814
return make_entry(kind, name, parent_id, file_id)
817
"""Return list of (path, ie) for all entries except the root.
819
This may be faster than iter_entries.
822
def descend(dir_ie, dir_path):
823
kids = dir_ie.children.items()
825
for name, ie in kids:
826
child_path = osutils.pathjoin(dir_path, name)
827
accum.append((child_path, ie))
828
if ie.kind == 'directory':
829
descend(ie, child_path)
831
if self.root is not None:
832
descend(self.root, u'')
835
def path2id(self, relpath):
836
"""Walk down through directories to return entry of last component.
838
:param relpath: may be either a list of path components, or a single
839
string, in which case it is automatically split.
841
This returns the entry of the last component in the path,
842
which may be either a file or a directory.
844
Returns None IFF the path is not found.
846
if isinstance(relpath, basestring):
847
names = osutils.splitpath(relpath)
853
except errors.NoSuchId:
854
# root doesn't exist yet so nothing else can
860
children = getattr(parent, 'children', None)
869
return parent.file_id
871
def filter(self, specific_fileids):
872
"""Get an inventory view filtered against a set of file-ids.
874
Children of directories and parents are included.
876
The result may or may not reference the underlying inventory
877
so it should be treated as immutable.
879
interesting_parents = set()
880
for fileid in specific_fileids:
882
interesting_parents.update(self.get_idpath(fileid))
883
except errors.NoSuchId:
884
# This fileid is not in the inventory - that's ok
886
entries = self.iter_entries()
887
if self.root is None:
888
return Inventory(root_id=None)
889
other = Inventory(entries.next()[1].file_id)
890
other.root.revision = self.root.revision
891
other.revision_id = self.revision_id
892
directories_to_expand = set()
893
for path, entry in entries:
894
file_id = entry.file_id
895
if (file_id in specific_fileids
896
or entry.parent_id in directories_to_expand):
897
if entry.kind == 'directory':
898
directories_to_expand.add(file_id)
899
elif file_id not in interesting_parents:
901
other.add(entry.copy())
904
def get_idpath(self, file_id):
905
"""Return a list of file_ids for the path to an entry.
907
The list contains one element for each directory followed by
908
the id of the file itself. So the length of the returned list
909
is equal to the depth of the file in the tree, counting the
910
root directory as depth 1.
913
for parent in self._iter_file_id_parents(file_id):
914
p.insert(0, parent.file_id)
918
class Inventory(CommonInventory):
919
"""Mutable dict based in-memory inventory.
921
We never store the full path to a file, because renaming a directory
922
implicitly moves all of its contents. This class internally maintains a
258
923
lookup tree that allows the children under a directory to be
259
924
returned quickly.
261
InventoryEntry objects must not be modified after they are
262
inserted, other than through the Inventory API.
264
926
>>> inv = Inventory()
265
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
927
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
928
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
266
929
>>> inv['123-123'].name
269
May be treated as an iterator or set to look up file ids:
271
>>> bool(inv.path2id('hello.c'))
276
May also look up by name:
278
>>> [x[0] for x in inv.iter_entries()]
280
>>> inv = Inventory('TREE_ROOT-12345678-12345678')
281
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
932
Id's may be looked up from paths:
934
>>> inv.path2id('hello.c')
936
>>> inv.has_id('123-123')
939
There are iterators over the contents:
941
>>> [entry[0] for entry in inv.iter_entries()]
283
def __init__(self, root_id=ROOT_ID):
945
def __init__(self, root_id=ROOT_ID, revision_id=None):
284
946
"""Create or read an inventory.
286
948
If a working directory is specified, the inventory is read
290
952
The inventory is created with a default root directory, with
293
# We are letting Branch(init=True) create a unique inventory
294
# root id. Rather than generating a random one here.
296
# root_id = bzrlib.branch.gen_file_id('TREE_ROOT')
297
self.root = RootEntry(root_id)
955
if root_id is not None:
956
self._set_root(InventoryDirectory(root_id, u'', None))
960
self.revision_id = revision_id
963
# More than one page of ouput is not useful anymore to debug
966
contents = repr(self._byid)
967
if len(contents) > max_len:
968
contents = contents[:(max_len-len(closing))] + closing
969
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
971
def apply_delta(self, delta):
972
"""Apply a delta to this inventory.
974
See the inventory developers documentation for the theory behind
977
If delta application fails the inventory is left in an indeterminate
978
state and must not be used.
980
:param delta: A list of changes to apply. After all the changes are
981
applied the final inventory must be internally consistent, but it
982
is ok to supply changes which, if only half-applied would have an
983
invalid result - such as supplying two changes which rename two
984
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
985
('B', 'A', 'B-id', b_entry)].
987
Each change is a tuple, of the form (old_path, new_path, file_id,
990
When new_path is None, the change indicates the removal of an entry
991
from the inventory and new_entry will be ignored (using None is
992
appropriate). If new_path is not None, then new_entry must be an
993
InventoryEntry instance, which will be incorporated into the
994
inventory (and replace any existing entry with the same file id).
996
When old_path is None, the change indicates the addition of
997
a new entry to the inventory.
999
When neither new_path nor old_path are None, the change is a
1000
modification to an entry, such as a rename, reparent, kind change
1003
The children attribute of new_entry is ignored. This is because
1004
this method preserves children automatically across alterations to
1005
the parent of the children, and cases where the parent id of a
1006
child is changing require the child to be passed in as a separate
1007
change regardless. E.g. in the recursive deletion of a directory -
1008
the directory's children must be included in the delta, or the
1009
final inventory will be invalid.
1011
Note that a file_id must only appear once within a given delta.
1012
An AssertionError is raised otherwise.
1014
# Check that the delta is legal. It would be nice if this could be
1015
# done within the loops below but it's safer to validate the delta
1016
# before starting to mutate the inventory, as there isn't a rollback
1018
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1019
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1020
_check_delta_ids_are_valid(
1021
_check_delta_new_path_entry_both_or_None(
1025
# Remove all affected items which were in the original inventory,
1026
# starting with the longest paths, thus ensuring parents are examined
1027
# after their children, which means that everything we examine has no
1028
# modified children remaining by the time we examine it.
1029
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1030
if op is not None), reverse=True):
1031
# Preserve unaltered children of file_id for later reinsertion.
1032
file_id_children = getattr(self[file_id], 'children', {})
1033
if len(file_id_children):
1034
children[file_id] = file_id_children
1035
if self.id2path(file_id) != old_path:
1036
raise errors.InconsistentDelta(old_path, file_id,
1037
"Entry was at wrong other path %r." % self.id2path(file_id))
1038
# Remove file_id and the unaltered children. If file_id is not
1039
# being deleted it will be reinserted back later.
1040
self.remove_recursive_id(file_id)
1041
# Insert all affected which should be in the new inventory, reattaching
1042
# their children if they had any. This is done from shortest path to
1043
# longest, ensuring that items which were modified and whose parents in
1044
# the resulting inventory were also modified, are inserted after their
1046
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1047
delta if np is not None):
1048
if new_entry.kind == 'directory':
1049
# Pop the child which to allow detection of children whose
1050
# parents were deleted and which were not reattached to a new
1052
replacement = InventoryDirectory(new_entry.file_id,
1053
new_entry.name, new_entry.parent_id)
1054
replacement.revision = new_entry.revision
1055
replacement.children = children.pop(replacement.file_id, {})
1056
new_entry = replacement
1059
except errors.DuplicateFileId:
1060
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1061
"New id is already present in target.")
1062
except AttributeError:
1063
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1064
"Parent is not a directory.")
1065
if self.id2path(new_entry.file_id) != new_path:
1066
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1067
"New path is not consistent with parent path.")
1069
# Get the parent id that was deleted
1070
parent_id, children = children.popitem()
1071
raise errors.InconsistentDelta("<deleted>", parent_id,
1072
"The file id was deleted but its children were not deleted.")
1074
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1075
propagate_caches=False):
1076
"""See CHKInventory.create_by_apply_delta()"""
1077
new_inv = self.copy()
1078
new_inv.apply_delta(inventory_delta)
1079
new_inv.revision_id = new_revision_id
1082
def _set_root(self, ie):
298
1084
self._byid = {self.root.file_id: self.root}
1087
# TODO: jam 20051218 Should copy also copy the revision_id?
1088
entries = self.iter_entries()
1089
if self.root is None:
1090
return Inventory(root_id=None)
1091
other = Inventory(entries.next()[1].file_id)
1092
other.root.revision = self.root.revision
1093
# copy recursively so we know directories will be added before
1094
# their children. There are more efficient ways than this...
1095
for path, entry in entries:
1096
other.add(entry.copy())
301
1099
def __iter__(self):
1100
"""Iterate over all file-ids."""
302
1101
return iter(self._byid)
1103
def iter_just_entries(self):
1104
"""Iterate over all entries.
1106
Unlike iter_entries(), just the entries are returned (not (path, ie))
1107
and the order of entries is undefined.
1109
XXX: We may not want to merge this into bzr.dev.
1111
if self.root is None:
1113
for _, ie in self._byid.iteritems():
305
1116
def __len__(self):
306
1117
"""Returns number of entries."""
307
1118
return len(self._byid)
310
def iter_entries(self, from_dir=None):
311
"""Return (path, entry) pairs, in order by name."""
315
elif isinstance(from_dir, basestring):
316
from_dir = self._byid[from_dir]
318
kids = from_dir.children.items()
320
for name, ie in kids:
322
if ie.kind == 'directory':
323
for cn, cie in self.iter_entries(from_dir=ie.file_id):
324
yield os.path.join(name, cn), cie
328
"""Return list of (path, ie) for all entries except the root.
330
This may be faster than iter_entries.
333
def descend(dir_ie, dir_path):
334
kids = dir_ie.children.items()
336
for name, ie in kids:
337
child_path = os.path.join(dir_path, name)
338
accum.append((child_path, ie))
339
if ie.kind == 'directory':
340
descend(ie, child_path)
342
descend(self.root, '')
346
def directories(self):
347
"""Return (path, entry) pairs for all directories, including the root.
350
def descend(parent_ie, parent_path):
351
accum.append((parent_path, parent_ie))
353
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
356
for name, child_ie in kids:
357
child_path = os.path.join(parent_path, name)
358
descend(child_ie, child_path)
359
descend(self.root, '')
364
def __contains__(self, file_id):
365
"""True if this entry contains a file with given id.
367
>>> inv = Inventory()
368
>>> inv.add(InventoryEntry('123', 'foo.c', 'file', ROOT_ID))
374
return file_id in self._byid
377
1120
def __getitem__(self, file_id):
378
1121
"""Return the entry for given file_id.
380
1123
>>> inv = Inventory()
381
>>> inv.add(InventoryEntry('123123', 'hello.c', 'file', ROOT_ID))
1124
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1125
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
382
1126
>>> inv['123123'].name
386
1130
return self._byid[file_id]
387
1131
except KeyError:
389
raise BzrError("can't look up file_id None")
391
raise BzrError("file_id {%s} not in inventory" % file_id)
1132
# really we're passing an inventory, not a tree...
1133
raise errors.NoSuchId(self, file_id)
394
1135
def get_file_kind(self, file_id):
395
1136
return self._byid[file_id].kind
634
1334
del old_parent.children[file_ie.name]
635
1335
new_parent.children[new_name] = file_ie
637
1337
file_ie.name = new_name
638
1338
file_ie.parent_id = new_parent_id
643
_NAME_RE = re.compile(r'^[^/\\]+$')
1340
def is_root(self, file_id):
1341
return self.root is not None and file_id == self.root.file_id
1344
class CHKInventory(CommonInventory):
1345
"""An inventory persisted in a CHK store.
1347
By design, a CHKInventory is immutable so many of the methods
1348
supported by Inventory - add, rename, apply_delta, etc - are *not*
1349
supported. To create a new CHKInventory, use create_by_apply_delta()
1350
or from_inventory(), say.
1352
Internally, a CHKInventory has one or two CHKMaps:
1354
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1355
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1358
The second map is optional and not present in early CHkRepository's.
1360
No caching is performed: every method call or item access will perform
1361
requests to the storage layer. As such, keep references to objects you
1365
def __init__(self, search_key_name):
1366
CommonInventory.__init__(self)
1367
self._fileid_to_entry_cache = {}
1368
self._fully_cached = False
1369
self._path_to_fileid_cache = {}
1370
self._search_key_name = search_key_name
1373
def __eq__(self, other):
1374
"""Compare two sets by comparing their contents."""
1375
if not isinstance(other, CHKInventory):
1376
return NotImplemented
1378
this_key = self.id_to_entry.key()
1379
other_key = other.id_to_entry.key()
1380
this_pid_key = self.parent_id_basename_to_file_id.key()
1381
other_pid_key = other.parent_id_basename_to_file_id.key()
1382
if None in (this_key, this_pid_key, other_key, other_pid_key):
1384
return this_key == other_key and this_pid_key == other_pid_key
1386
def _entry_to_bytes(self, entry):
1387
"""Serialise entry as a single bytestring.
1389
:param Entry: An inventory entry.
1390
:return: A bytestring for the entry.
1393
ENTRY ::= FILE | DIR | SYMLINK | TREE
1394
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1395
DIR ::= "dir: " COMMON
1396
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1397
TREE ::= "tree: " COMMON REFERENCE_REVISION
1398
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1401
if entry.parent_id is not None:
1402
parent_str = entry.parent_id
1405
name_str = entry.name.encode("utf8")
1406
if entry.kind == 'file':
1407
if entry.executable:
1411
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1412
entry.file_id, parent_str, name_str, entry.revision,
1413
entry.text_sha1, entry.text_size, exec_str)
1414
elif entry.kind == 'directory':
1415
return "dir: %s\n%s\n%s\n%s" % (
1416
entry.file_id, parent_str, name_str, entry.revision)
1417
elif entry.kind == 'symlink':
1418
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1419
entry.file_id, parent_str, name_str, entry.revision,
1420
entry.symlink_target.encode("utf8"))
1421
elif entry.kind == 'tree-reference':
1422
return "tree: %s\n%s\n%s\n%s\n%s" % (
1423
entry.file_id, parent_str, name_str, entry.revision,
1424
entry.reference_revision)
1426
raise ValueError("unknown kind %r" % entry.kind)
1428
def _expand_fileids_to_parents_and_children(self, file_ids):
1429
"""Give a more wholistic view starting with the given file_ids.
1431
For any file_id which maps to a directory, we will include all children
1432
of that directory. We will also include all directories which are
1433
parents of the given file_ids, but we will not include their children.
1440
fringle # fringle-id
1444
if given [foo-id] we will include
1445
TREE_ROOT as interesting parents
1447
foo-id, baz-id, frob-id, fringle-id
1451
# TODO: Pre-pass over the list of fileids to see if anything is already
1452
# deserialized in self._fileid_to_entry_cache
1454
directories_to_expand = set()
1455
children_of_parent_id = {}
1456
# It is okay if some of the fileids are missing
1457
for entry in self._getitems(file_ids):
1458
if entry.kind == 'directory':
1459
directories_to_expand.add(entry.file_id)
1460
interesting.add(entry.parent_id)
1461
children_of_parent_id.setdefault(entry.parent_id, set()
1462
).add(entry.file_id)
1464
# Now, interesting has all of the direct parents, but not the
1465
# parents of those parents. It also may have some duplicates with
1467
remaining_parents = interesting.difference(file_ids)
1468
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1469
# but we don't actually want to recurse into that
1470
interesting.add(None) # this will auto-filter it in the loop
1471
remaining_parents.discard(None)
1472
while remaining_parents:
1473
next_parents = set()
1474
for entry in self._getitems(remaining_parents):
1475
next_parents.add(entry.parent_id)
1476
children_of_parent_id.setdefault(entry.parent_id, set()
1477
).add(entry.file_id)
1478
# Remove any search tips we've already processed
1479
remaining_parents = next_parents.difference(interesting)
1480
interesting.update(remaining_parents)
1481
# We should probably also .difference(directories_to_expand)
1482
interesting.update(file_ids)
1483
interesting.discard(None)
1484
while directories_to_expand:
1485
# Expand directories by looking in the
1486
# parent_id_basename_to_file_id map
1487
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1488
directories_to_expand = set()
1489
items = self.parent_id_basename_to_file_id.iteritems(keys)
1490
next_file_ids = set([item[1] for item in items])
1491
next_file_ids = next_file_ids.difference(interesting)
1492
interesting.update(next_file_ids)
1493
for entry in self._getitems(next_file_ids):
1494
if entry.kind == 'directory':
1495
directories_to_expand.add(entry.file_id)
1496
children_of_parent_id.setdefault(entry.parent_id, set()
1497
).add(entry.file_id)
1498
return interesting, children_of_parent_id
1500
def filter(self, specific_fileids):
1501
"""Get an inventory view filtered against a set of file-ids.
1503
Children of directories and parents are included.
1505
The result may or may not reference the underlying inventory
1506
so it should be treated as immutable.
1509
parent_to_children) = self._expand_fileids_to_parents_and_children(
1511
# There is some overlap here, but we assume that all interesting items
1512
# are in the _fileid_to_entry_cache because we had to read them to
1513
# determine if they were a dir we wanted to recurse, or just a file
1514
# This should give us all the entries we'll want to add, so start
1516
other = Inventory(self.root_id)
1517
other.root.revision = self.root.revision
1518
other.revision_id = self.revision_id
1519
if not interesting or not parent_to_children:
1520
# empty filter, or filtering entrys that don't exist
1521
# (if even 1 existed, then we would have populated
1522
# parent_to_children with at least the tree root.)
1524
cache = self._fileid_to_entry_cache
1525
remaining_children = collections.deque(parent_to_children[self.root_id])
1526
while remaining_children:
1527
file_id = remaining_children.popleft()
1529
if ie.kind == 'directory':
1530
ie = ie.copy() # We create a copy to depopulate the .children attribute
1531
# TODO: depending on the uses of 'other' we should probably alwyas
1532
# '.copy()' to prevent someone from mutating other and
1533
# invaliding our internal cache
1535
if file_id in parent_to_children:
1536
remaining_children.extend(parent_to_children[file_id])
1540
def _bytes_to_utf8name_key(bytes):
1541
"""Get the file_id, revision_id key out of bytes."""
1542
# We don't normally care about name, except for times when we want
1543
# to filter out empty names because of non rich-root...
1544
sections = bytes.split('\n')
1545
kind, file_id = sections[0].split(': ')
1546
return (sections[2], intern(file_id), intern(sections[3]))
1548
def _bytes_to_entry(self, bytes):
1549
"""Deserialise a serialised entry."""
1550
sections = bytes.split('\n')
1551
if sections[0].startswith("file: "):
1552
result = InventoryFile(sections[0][6:],
1553
sections[2].decode('utf8'),
1555
result.text_sha1 = sections[4]
1556
result.text_size = int(sections[5])
1557
result.executable = sections[6] == "Y"
1558
elif sections[0].startswith("dir: "):
1559
result = CHKInventoryDirectory(sections[0][5:],
1560
sections[2].decode('utf8'),
1562
elif sections[0].startswith("symlink: "):
1563
result = InventoryLink(sections[0][9:],
1564
sections[2].decode('utf8'),
1566
result.symlink_target = sections[4].decode('utf8')
1567
elif sections[0].startswith("tree: "):
1568
result = TreeReference(sections[0][6:],
1569
sections[2].decode('utf8'),
1571
result.reference_revision = sections[4]
1573
raise ValueError("Not a serialised entry %r" % bytes)
1574
result.file_id = intern(result.file_id)
1575
result.revision = intern(sections[3])
1576
if result.parent_id == '':
1577
result.parent_id = None
1578
self._fileid_to_entry_cache[result.file_id] = result
1581
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1582
propagate_caches=False):
1583
"""Create a new CHKInventory by applying inventory_delta to this one.
1585
See the inventory developers documentation for the theory behind
1588
:param inventory_delta: The inventory delta to apply. See
1589
Inventory.apply_delta for details.
1590
:param new_revision_id: The revision id of the resulting CHKInventory.
1591
:param propagate_caches: If True, the caches for this inventory are
1592
copied to and updated for the result.
1593
:return: The new CHKInventory.
1595
split = osutils.split
1596
result = CHKInventory(self._search_key_name)
1597
if propagate_caches:
1598
# Just propagate the path-to-fileid cache for now
1599
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1600
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1601
self.id_to_entry._ensure_root()
1602
maximum_size = self.id_to_entry._root_node.maximum_size
1603
result.revision_id = new_revision_id
1604
result.id_to_entry = chk_map.CHKMap(
1605
self.id_to_entry._store,
1606
self.id_to_entry.key(),
1607
search_key_func=search_key_func)
1608
result.id_to_entry._ensure_root()
1609
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1610
# Change to apply to the parent_id_basename delta. The dict maps
1611
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1612
# when a path has its id replaced (e.g. the root is changed, or someone
1613
# does bzr mv a b, bzr mv c a, we should output a single change to this
1614
# map rather than two.
1615
parent_id_basename_delta = {}
1616
if self.parent_id_basename_to_file_id is not None:
1617
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1618
self.parent_id_basename_to_file_id._store,
1619
self.parent_id_basename_to_file_id.key(),
1620
search_key_func=search_key_func)
1621
result.parent_id_basename_to_file_id._ensure_root()
1622
self.parent_id_basename_to_file_id._ensure_root()
1623
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1624
p_id_root = self.parent_id_basename_to_file_id._root_node
1625
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1626
result_p_id_root._key_width = p_id_root._key_width
1628
result.parent_id_basename_to_file_id = None
1629
result.root_id = self.root_id
1630
id_to_entry_delta = []
1631
# inventory_delta is only traversed once, so we just update the
1633
# Check for repeated file ids
1634
inventory_delta = _check_delta_unique_ids(inventory_delta)
1635
# Repeated old paths
1636
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1637
# Check for repeated new paths
1638
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1639
# Check for entries that don't match the fileid
1640
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1641
# Check for nonsense fileids
1642
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1643
# Check for new_path <-> entry consistency
1644
inventory_delta = _check_delta_new_path_entry_both_or_None(
1646
# All changed entries need to have their parents be directories and be
1647
# at the right path. This set contains (path, id) tuples.
1649
# When we delete an item, all the children of it must be either deleted
1650
# or altered in their own right. As we batch process the change via
1651
# CHKMap.apply_delta, we build a set of things to use to validate the
1655
for old_path, new_path, file_id, entry in inventory_delta:
1658
result.root_id = file_id
1659
if new_path is None:
1664
if propagate_caches:
1666
del result._path_to_fileid_cache[old_path]
1669
deletes.add(file_id)
1671
new_key = StaticTuple(file_id,)
1672
new_value = result._entry_to_bytes(entry)
1673
# Update caches. It's worth doing this whether
1674
# we're propagating the old caches or not.
1675
result._path_to_fileid_cache[new_path] = file_id
1676
parents.add((split(new_path)[0], entry.parent_id))
1677
if old_path is None:
1680
old_key = StaticTuple(file_id,)
1681
if self.id2path(file_id) != old_path:
1682
raise errors.InconsistentDelta(old_path, file_id,
1683
"Entry was at wrong other path %r." %
1684
self.id2path(file_id))
1685
altered.add(file_id)
1686
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1687
if result.parent_id_basename_to_file_id is not None:
1688
# parent_id, basename changes
1689
if old_path is None:
1692
old_entry = self[file_id]
1693
old_key = self._parent_id_basename_key(old_entry)
1694
if new_path is None:
1698
new_key = self._parent_id_basename_key(entry)
1700
# If the two keys are the same, the value will be unchanged
1701
# as its always the file id for this entry.
1702
if old_key != new_key:
1703
# Transform a change into explicit delete/add preserving
1704
# a possible match on the key from a different file id.
1705
if old_key is not None:
1706
parent_id_basename_delta.setdefault(
1707
old_key, [None, None])[0] = old_key
1708
if new_key is not None:
1709
parent_id_basename_delta.setdefault(
1710
new_key, [None, None])[1] = new_value
1711
# validate that deletes are complete.
1712
for file_id in deletes:
1713
entry = self[file_id]
1714
if entry.kind != 'directory':
1716
# This loop could potentially be better by using the id_basename
1717
# map to just get the child file ids.
1718
for child in entry.children.values():
1719
if child.file_id not in altered:
1720
raise errors.InconsistentDelta(self.id2path(child.file_id),
1721
child.file_id, "Child not deleted or reparented when "
1723
result.id_to_entry.apply_delta(id_to_entry_delta)
1724
if parent_id_basename_delta:
1725
# Transform the parent_id_basename delta data into a linear delta
1726
# with only one record for a given key. Optimally this would allow
1727
# re-keying, but its simpler to just output that as a delete+add
1728
# to spend less time calculating the delta.
1730
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1731
if value is not None:
1732
delta_list.append((old_key, key, value))
1734
delta_list.append((old_key, None, None))
1735
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1736
parents.discard(('', None))
1737
for parent_path, parent in parents:
1739
if result[parent].kind != 'directory':
1740
raise errors.InconsistentDelta(result.id2path(parent), parent,
1741
'Not a directory, but given children')
1742
except errors.NoSuchId:
1743
raise errors.InconsistentDelta("<unknown>", parent,
1744
"Parent is not present in resulting inventory.")
1745
if result.path2id(parent_path) != parent:
1746
raise errors.InconsistentDelta(parent_path, parent,
1747
"Parent has wrong path %r." % result.path2id(parent_path))
1751
def deserialise(klass, chk_store, bytes, expected_revision_id):
1752
"""Deserialise a CHKInventory.
1754
:param chk_store: A CHK capable VersionedFiles instance.
1755
:param bytes: The serialised bytes.
1756
:param expected_revision_id: The revision ID we think this inventory is
1758
:return: A CHKInventory
1760
lines = bytes.split('\n')
1762
raise AssertionError('bytes to deserialize must end with an eol')
1764
if lines[0] != 'chkinventory:':
1765
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1767
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1768
'parent_id_basename_to_file_id',
1770
for line in lines[1:]:
1771
key, value = line.split(': ', 1)
1772
if key not in allowed_keys:
1773
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1776
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1779
revision_id = intern(info['revision_id'])
1780
root_id = intern(info['root_id'])
1781
search_key_name = intern(info.get('search_key_name', 'plain'))
1782
parent_id_basename_to_file_id = intern(info.get(
1783
'parent_id_basename_to_file_id', None))
1784
if not parent_id_basename_to_file_id.startswith('sha1:'):
1785
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1786
' key not %r' % (parent_id_basename_to_file_id,))
1787
id_to_entry = info['id_to_entry']
1788
if not id_to_entry.startswith('sha1:'):
1789
raise ValueError('id_to_entry should be a sha1'
1790
' key not %r' % (id_to_entry,))
1792
result = CHKInventory(search_key_name)
1793
result.revision_id = revision_id
1794
result.root_id = root_id
1795
search_key_func = chk_map.search_key_registry.get(
1796
result._search_key_name)
1797
if parent_id_basename_to_file_id is not None:
1798
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1799
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1800
search_key_func=search_key_func)
1802
result.parent_id_basename_to_file_id = None
1804
result.id_to_entry = chk_map.CHKMap(chk_store,
1805
StaticTuple(id_to_entry,),
1806
search_key_func=search_key_func)
1807
if (result.revision_id,) != expected_revision_id:
1808
raise ValueError("Mismatched revision id and expected: %r, %r" %
1809
(result.revision_id, expected_revision_id))
1813
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1814
"""Create a CHKInventory from an existing inventory.
1816
The content of inventory is copied into the chk_store, and a
1817
CHKInventory referencing that is returned.
1819
:param chk_store: A CHK capable VersionedFiles instance.
1820
:param inventory: The inventory to copy.
1821
:param maximum_size: The CHKMap node size limit.
1822
:param search_key_name: The identifier for the search key function
1824
result = klass(search_key_name)
1825
result.revision_id = inventory.revision_id
1826
result.root_id = inventory.root.file_id
1828
entry_to_bytes = result._entry_to_bytes
1829
parent_id_basename_key = result._parent_id_basename_key
1830
id_to_entry_dict = {}
1831
parent_id_basename_dict = {}
1832
for path, entry in inventory.iter_entries():
1833
key = StaticTuple(entry.file_id,).intern()
1834
id_to_entry_dict[key] = entry_to_bytes(entry)
1835
p_id_key = parent_id_basename_key(entry)
1836
parent_id_basename_dict[p_id_key] = entry.file_id
1838
result._populate_from_dicts(chk_store, id_to_entry_dict,
1839
parent_id_basename_dict, maximum_size=maximum_size)
1842
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1843
parent_id_basename_dict, maximum_size):
1844
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1845
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1846
maximum_size=maximum_size, key_width=1,
1847
search_key_func=search_key_func)
1848
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1850
root_key = chk_map.CHKMap.from_dict(chk_store,
1851
parent_id_basename_dict,
1852
maximum_size=maximum_size, key_width=2,
1853
search_key_func=search_key_func)
1854
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1855
root_key, search_key_func)
1857
def _parent_id_basename_key(self, entry):
1858
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1859
if entry.parent_id is not None:
1860
parent_id = entry.parent_id
1863
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1865
def __getitem__(self, file_id):
1866
"""map a single file_id -> InventoryEntry."""
1868
raise errors.NoSuchId(self, file_id)
1869
result = self._fileid_to_entry_cache.get(file_id, None)
1870
if result is not None:
1873
return self._bytes_to_entry(
1874
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1875
except StopIteration:
1876
# really we're passing an inventory, not a tree...
1877
raise errors.NoSuchId(self, file_id)
1879
def _getitems(self, file_ids):
1880
"""Similar to __getitem__, but lets you query for multiple.
1882
The returned order is undefined. And currently if an item doesn't
1883
exist, it isn't included in the output.
1887
for file_id in file_ids:
1888
entry = self._fileid_to_entry_cache.get(file_id, None)
1890
remaining.append(file_id)
1892
result.append(entry)
1893
file_keys = [StaticTuple(f,).intern() for f in remaining]
1894
for file_key, value in self.id_to_entry.iteritems(file_keys):
1895
entry = self._bytes_to_entry(value)
1896
result.append(entry)
1897
self._fileid_to_entry_cache[entry.file_id] = entry
1900
def has_id(self, file_id):
1901
# Perhaps have an explicit 'contains' method on CHKMap ?
1902
if self._fileid_to_entry_cache.get(file_id, None) is not None:
1905
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
1907
def is_root(self, file_id):
1908
return file_id == self.root_id
1910
def _iter_file_id_parents(self, file_id):
1911
"""Yield the parents of file_id up to the root."""
1912
while file_id is not None:
1916
raise errors.NoSuchId(tree=self, file_id=file_id)
1918
file_id = ie.parent_id
1921
"""Iterate over all file-ids."""
1922
for key, _ in self.id_to_entry.iteritems():
1925
def iter_just_entries(self):
1926
"""Iterate over all entries.
1928
Unlike iter_entries(), just the entries are returned (not (path, ie))
1929
and the order of entries is undefined.
1931
XXX: We may not want to merge this into bzr.dev.
1933
for key, entry in self.id_to_entry.iteritems():
1935
ie = self._fileid_to_entry_cache.get(file_id, None)
1937
ie = self._bytes_to_entry(entry)
1938
self._fileid_to_entry_cache[file_id] = ie
1941
def _preload_cache(self):
1942
"""Make sure all file-ids are in _fileid_to_entry_cache"""
1943
if self._fully_cached:
1944
return # No need to do it again
1945
# The optimal sort order is to use iteritems() directly
1946
cache = self._fileid_to_entry_cache
1947
for key, entry in self.id_to_entry.iteritems():
1949
if file_id not in cache:
1950
ie = self._bytes_to_entry(entry)
1954
last_parent_id = last_parent_ie = None
1955
pid_items = self.parent_id_basename_to_file_id.iteritems()
1956
for key, child_file_id in pid_items:
1957
if key == ('', ''): # This is the root
1958
if child_file_id != self.root_id:
1959
raise ValueError('Data inconsistency detected.'
1960
' We expected data with key ("","") to match'
1961
' the root id, but %s != %s'
1962
% (child_file_id, self.root_id))
1964
parent_id, basename = key
1965
ie = cache[child_file_id]
1966
if parent_id == last_parent_id:
1967
parent_ie = last_parent_ie
1969
parent_ie = cache[parent_id]
1970
if parent_ie.kind != 'directory':
1971
raise ValueError('Data inconsistency detected.'
1972
' An entry in the parent_id_basename_to_file_id map'
1973
' has parent_id {%s} but the kind of that object'
1974
' is %r not "directory"' % (parent_id, parent_ie.kind))
1975
if parent_ie._children is None:
1976
parent_ie._children = {}
1977
basename = basename.decode('utf-8')
1978
if basename in parent_ie._children:
1979
existing_ie = parent_ie._children[basename]
1980
if existing_ie != ie:
1981
raise ValueError('Data inconsistency detected.'
1982
' Two entries with basename %r were found'
1983
' in the parent entry {%s}'
1984
% (basename, parent_id))
1985
if basename != ie.name:
1986
raise ValueError('Data inconsistency detected.'
1987
' In the parent_id_basename_to_file_id map, file_id'
1988
' {%s} is listed as having basename %r, but in the'
1989
' id_to_entry map it is %r'
1990
% (child_file_id, basename, ie.name))
1991
parent_ie._children[basename] = ie
1992
self._fully_cached = True
1994
def iter_changes(self, basis):
1995
"""Generate a Tree.iter_changes change list between this and basis.
1997
:param basis: Another CHKInventory.
1998
:return: An iterator over the changes between self and basis, as per
1999
tree.iter_changes().
2001
# We want: (file_id, (path_in_source, path_in_target),
2002
# changed_content, versioned, parent, name, kind,
2004
for key, basis_value, self_value in \
2005
self.id_to_entry.iter_changes(basis.id_to_entry):
2007
if basis_value is not None:
2008
basis_entry = basis._bytes_to_entry(basis_value)
2009
path_in_source = basis.id2path(file_id)
2010
basis_parent = basis_entry.parent_id
2011
basis_name = basis_entry.name
2012
basis_executable = basis_entry.executable
2014
path_in_source = None
2017
basis_executable = None
2018
if self_value is not None:
2019
self_entry = self._bytes_to_entry(self_value)
2020
path_in_target = self.id2path(file_id)
2021
self_parent = self_entry.parent_id
2022
self_name = self_entry.name
2023
self_executable = self_entry.executable
2025
path_in_target = None
2028
self_executable = None
2029
if basis_value is None:
2031
kind = (None, self_entry.kind)
2032
versioned = (False, True)
2033
elif self_value is None:
2035
kind = (basis_entry.kind, None)
2036
versioned = (True, False)
2038
kind = (basis_entry.kind, self_entry.kind)
2039
versioned = (True, True)
2040
changed_content = False
2041
if kind[0] != kind[1]:
2042
changed_content = True
2043
elif kind[0] == 'file':
2044
if (self_entry.text_size != basis_entry.text_size or
2045
self_entry.text_sha1 != basis_entry.text_sha1):
2046
changed_content = True
2047
elif kind[0] == 'symlink':
2048
if self_entry.symlink_target != basis_entry.symlink_target:
2049
changed_content = True
2050
elif kind[0] == 'tree-reference':
2051
if (self_entry.reference_revision !=
2052
basis_entry.reference_revision):
2053
changed_content = True
2054
parent = (basis_parent, self_parent)
2055
name = (basis_name, self_name)
2056
executable = (basis_executable, self_executable)
2057
if (not changed_content
2058
and parent[0] == parent[1]
2059
and name[0] == name[1]
2060
and executable[0] == executable[1]):
2061
# Could happen when only the revision changed for a directory
2064
yield (file_id, (path_in_source, path_in_target), changed_content,
2065
versioned, parent, name, kind, executable)
2068
"""Return the number of entries in the inventory."""
2069
return len(self.id_to_entry)
2071
def _make_delta(self, old):
2072
"""Make an inventory delta from two inventories."""
2073
if type(old) != CHKInventory:
2074
return CommonInventory._make_delta(self, old)
2076
for key, old_value, self_value in \
2077
self.id_to_entry.iter_changes(old.id_to_entry):
2079
if old_value is not None:
2080
old_path = old.id2path(file_id)
2083
if self_value is not None:
2084
entry = self._bytes_to_entry(self_value)
2085
self._fileid_to_entry_cache[file_id] = entry
2086
new_path = self.id2path(file_id)
2090
delta.append((old_path, new_path, file_id, entry))
2093
def path2id(self, relpath):
2094
"""See CommonInventory.path2id()."""
2095
# TODO: perhaps support negative hits?
2096
if isinstance(relpath, basestring):
2097
names = osutils.splitpath(relpath)
2102
relpath = osutils.pathjoin(*relpath)
2103
result = self._path_to_fileid_cache.get(relpath, None)
2104
if result is not None:
2106
current_id = self.root_id
2107
if current_id is None:
2109
parent_id_index = self.parent_id_basename_to_file_id
2111
for basename in names:
2112
if cur_path is None:
2115
cur_path = cur_path + '/' + basename
2116
basename_utf8 = basename.encode('utf8')
2117
file_id = self._path_to_fileid_cache.get(cur_path, None)
2119
key_filter = [StaticTuple(current_id, basename_utf8)]
2120
items = parent_id_index.iteritems(key_filter)
2121
for (parent_id, name_utf8), file_id in items:
2122
if parent_id != current_id or name_utf8 != basename_utf8:
2123
raise errors.BzrError("corrupt inventory lookup! "
2124
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2129
self._path_to_fileid_cache[cur_path] = file_id
2130
current_id = file_id
2134
"""Serialise the inventory to lines."""
2135
lines = ["chkinventory:\n"]
2136
if self._search_key_name != 'plain':
2137
# custom ordering grouping things that don't change together
2138
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2139
lines.append("root_id: %s\n" % self.root_id)
2140
lines.append('parent_id_basename_to_file_id: %s\n' %
2141
(self.parent_id_basename_to_file_id.key()[0],))
2142
lines.append("revision_id: %s\n" % self.revision_id)
2143
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2145
lines.append("revision_id: %s\n" % self.revision_id)
2146
lines.append("root_id: %s\n" % self.root_id)
2147
if self.parent_id_basename_to_file_id is not None:
2148
lines.append('parent_id_basename_to_file_id: %s\n' %
2149
(self.parent_id_basename_to_file_id.key()[0],))
2150
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2155
"""Get the root entry."""
2156
return self[self.root_id]
2159
class CHKInventoryDirectory(InventoryDirectory):
2160
"""A directory in an inventory."""
2162
__slots__ = ['_children', '_chk_inventory']
2164
def __init__(self, file_id, name, parent_id, chk_inventory):
2165
# Don't call InventoryDirectory.__init__ - it isn't right for this
2167
InventoryEntry.__init__(self, file_id, name, parent_id)
2168
self._children = None
2169
self._chk_inventory = chk_inventory
2173
"""Access the list of children of this directory.
2175
With a parent_id_basename_to_file_id index, loads all the children,
2176
without loads the entire index. Without is bad. A more sophisticated
2177
proxy object might be nice, to allow partial loading of children as
2178
well when specific names are accessed. (So path traversal can be
2179
written in the obvious way but not examine siblings.).
2181
if self._children is not None:
2182
return self._children
2183
# No longer supported
2184
if self._chk_inventory.parent_id_basename_to_file_id is None:
2185
raise AssertionError("Inventories without"
2186
" parent_id_basename_to_file_id are no longer supported")
2188
# XXX: Todo - use proxy objects for the children rather than loading
2189
# all when the attribute is referenced.
2190
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2192
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2193
key_filter=[StaticTuple(self.file_id,)]):
2194
child_keys.add(StaticTuple(file_id,))
2196
for file_id_key in child_keys:
2197
entry = self._chk_inventory._fileid_to_entry_cache.get(
2198
file_id_key[0], None)
2199
if entry is not None:
2200
result[entry.name] = entry
2201
cached.add(file_id_key)
2202
child_keys.difference_update(cached)
2203
# populate; todo: do by name
2204
id_to_entry = self._chk_inventory.id_to_entry
2205
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2206
entry = self._chk_inventory._bytes_to_entry(bytes)
2207
result[entry.name] = entry
2208
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2209
self._children = result
2213
'directory': InventoryDirectory,
2214
'file': InventoryFile,
2215
'symlink': InventoryLink,
2216
'tree-reference': TreeReference
2219
def make_entry(kind, name, parent_id, file_id=None):
2220
"""Create an inventory entry.
2222
:param kind: the type of inventory entry to create.
2223
:param name: the basename of the entry.
2224
:param parent_id: the parent_id of the entry.
2225
:param file_id: the file_id to use. if None, one will be created.
2228
file_id = generate_ids.gen_file_id(name)
2229
name = ensure_normalized_name(name)
2231
factory = entry_factory[kind]
2233
raise errors.BadFileKindError(name, kind)
2234
return factory(file_id, name, parent_id)
2237
def ensure_normalized_name(name):
2240
:raises InvalidNormalization: When name is not normalized, and cannot be
2241
accessed on this platform by the normalized path.
2242
:return: The NFC normalised version of name.
2244
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2245
# keep them synchronised.
2246
# we dont import normalized_filename directly because we want to be
2247
# able to change the implementation at runtime for tests.
2248
norm_name, can_access = osutils.normalized_filename(name)
2249
if norm_name != name:
2253
# TODO: jam 20060701 This would probably be more useful
2254
# if the error was raised with the full path
2255
raise errors.InvalidNormalization(name)
2259
_NAME_RE = lazy_regex.lazy_compile(r'^[^/\\]+$')
645
2261
def is_valid_name(name):
646
2262
return bool(_NAME_RE.match(name))
2265
def _check_delta_unique_ids(delta):
2266
"""Decorate a delta and check that the file ids in it are unique.
2268
:return: A generator over delta.
2272
length = len(ids) + 1
2274
if len(ids) != length:
2275
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2280
def _check_delta_unique_new_paths(delta):
2281
"""Decorate a delta and check that the new paths in it are unique.
2283
:return: A generator over delta.
2287
length = len(paths) + 1
2289
if path is not None:
2291
if len(paths) != length:
2292
raise errors.InconsistentDelta(path, item[2], "repeated path")
2296
def _check_delta_unique_old_paths(delta):
2297
"""Decorate a delta and check that the old paths in it are unique.
2299
:return: A generator over delta.
2303
length = len(paths) + 1
2305
if path is not None:
2307
if len(paths) != length:
2308
raise errors.InconsistentDelta(path, item[2], "repeated path")
2312
def _check_delta_ids_are_valid(delta):
2313
"""Decorate a delta and check that the ids in it are valid.
2315
:return: A generator over delta.
2320
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2321
"entry with file_id None %r" % entry)
2322
if type(item[2]) != str:
2323
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2324
"entry with non bytes file_id %r" % entry)
2328
def _check_delta_ids_match_entry(delta):
2329
"""Decorate a delta and check that the ids in it match the entry.file_id.
2331
:return: A generator over delta.
2335
if entry is not None:
2336
if entry.file_id != item[2]:
2337
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2338
"mismatched id with %r" % entry)
2342
def _check_delta_new_path_entry_both_or_None(delta):
2343
"""Decorate a delta and check that the new_path and entry are paired.
2345
:return: A generator over delta.
2350
if new_path is None and entry is not None:
2351
raise errors.InconsistentDelta(item[0], item[1],
2352
"Entry with no new_path")
2353
if new_path is not None and entry is None:
2354
raise errors.InconsistentDelta(new_path, item[1],
2355
"new_path with no entry")
2359
def mutable_inventory_from_tree(tree):
2360
"""Create a new inventory that has the same contents as a specified tree.
2362
:param tree: Revision tree to create inventory from
2364
entries = tree.iter_entries_by_dir()
2365
inv = Inventory(None, tree.get_revision_id())
2366
for path, inv_entry in entries:
2367
inv.add(inv_entry.copy())