85
118
src/wibble/wibble.c
86
119
>>> i.id2path('2326')
87
120
'src/wibble/wibble.c'
89
TODO: Maybe also keep the full path of the entry, and the children?
90
But those depend on its position within a particular inventory, and
91
it would be nice not to need to hold the backpointer here.
94
# TODO: split InventoryEntry into subclasses for files,
95
# directories, etc etc.
97
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
98
'text_id', 'parent_id', 'children', ]
100
def __init__(self, file_id, name, kind, parent_id, text_id=None):
123
# Constants returned by describe_change()
125
# TODO: These should probably move to some kind of FileChangeDescription
126
# class; that's like what's inside a TreeDelta but we want to be able to
127
# generate them just for one file at a time.
129
MODIFIED_AND_RENAMED = 'modified and renamed'
131
__slots__ = ['file_id', 'revision', 'parent_id', 'name']
133
# Attributes that all InventoryEntry instances are expected to have, but
134
# that don't vary for all kinds of entry. (e.g. symlink_target is only
135
# relevant to InventoryLink, so there's no reason to make every
136
# InventoryFile instance allocate space to hold a value for it.)
137
# Attributes that only vary for files: executable, text_sha1, text_size,
143
# Attributes that only vary for symlinks: symlink_target
144
symlink_target = None
145
# Attributes that only vary for tree-references: reference_revision
146
reference_revision = None
149
def detect_changes(self, old_entry):
150
"""Return a (text_modified, meta_modified) from this to old_entry.
152
_read_tree_state must have been called on self and old_entry prior to
153
calling detect_changes.
157
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
158
output_to, reverse=False):
159
"""Perform a diff between two entries of the same kind."""
161
def parent_candidates(self, previous_inventories):
162
"""Find possible per-file graph parents.
164
This is currently defined by:
165
- Select the last changed revision in the parent inventory.
166
- Do deal with a short lived bug in bzr 0.8's development two entries
167
that have the same last changed but different 'x' bit settings are
170
# revision:ie mapping for each ie found in previous_inventories.
172
# identify candidate head revision ids.
173
for inv in previous_inventories:
174
if self.file_id in inv:
175
ie = inv[self.file_id]
176
if ie.revision in candidates:
177
# same revision value in two different inventories:
178
# correct possible inconsistencies:
179
# * there was a bug in revision updates with 'x' bit
182
if candidates[ie.revision].executable != ie.executable:
183
candidates[ie.revision].executable = False
184
ie.executable = False
185
except AttributeError:
188
# add this revision as a candidate.
189
candidates[ie.revision] = ie
193
"""Return true if the object this entry represents has textual data.
195
Note that textual data includes binary content.
197
Also note that all entries get weave files created for them.
198
This attribute is primarily used when upgrading from old trees that
199
did not have the weave index for all inventory entries.
203
def __init__(self, file_id, name, parent_id):
101
204
"""Create an InventoryEntry
103
206
The filename must be a single component, relative to the
104
207
parent directory; it cannot be a whole path or relative name.
106
>>> e = InventoryEntry('123', 'hello.c', 'file', ROOT_ID)
209
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
111
>>> e = InventoryEntry('123', 'src/hello.c', 'file', ROOT_ID)
214
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
112
215
Traceback (most recent call last):
113
BzrCheckError: InventoryEntry name 'src/hello.c' is invalid
216
InvalidEntryName: Invalid entry name: src/hello.c
115
218
if '/' in name or '\\' in name:
116
raise BzrCheckError('InventoryEntry name %r is invalid' % name)
118
self.text_sha1 = None
119
self.text_size = None
219
raise errors.InvalidEntryName(name=name)
121
220
self.file_id = file_id
124
self.text_id = text_id
125
223
self.parent_id = parent_id
126
if kind == 'directory':
131
raise BzrError("unhandled entry kind %r" % kind)
225
def kind_character(self):
226
"""Return a short kind indicator useful for appending to names."""
227
raise BzrError('unknown kind %r' % self.kind)
229
known_kinds = ('file', 'directory', 'symlink')
135
231
def sorted_children(self):
136
l = self.children.items()
142
other = InventoryEntry(self.file_id, self.name, self.kind,
143
self.parent_id, text_id=self.text_id)
144
other.text_sha1 = self.text_sha1
145
other.text_size = self.text_size
232
return sorted(self.children.items())
235
def versionable_kind(kind):
236
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
238
def check(self, checker, rev_id, inv):
239
"""Check this inventory entry is intact.
241
This is a template method, override _check for kind specific
244
:param checker: Check object providing context for the checks;
245
can be used to find out what parts of the repository have already
247
:param rev_id: Revision id from which this InventoryEntry was loaded.
248
Not necessarily the last-changed revision for this file.
249
:param inv: Inventory from which the entry was loaded.
251
if self.parent_id is not None:
252
if not inv.has_id(self.parent_id):
253
raise BzrCheckError('missing parent {%s} in inventory for revision {%s}'
254
% (self.parent_id, rev_id))
255
checker._add_entry_to_text_key_references(inv, self)
256
self._check(checker, rev_id)
258
def _check(self, checker, rev_id):
259
"""Check this inventory entry for kind specific errors."""
260
checker._report_items.append(
261
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
264
"""Clone this inventory entry."""
265
raise NotImplementedError
268
def describe_change(old_entry, new_entry):
269
"""Describe the change between old_entry and this.
271
This smells of being an InterInventoryEntry situation, but as its
272
the first one, we're making it a static method for now.
274
An entry with a different parent, or different name is considered
275
to be renamed. Reparenting is an internal detail.
276
Note that renaming the parent does not trigger a rename for the
279
# TODO: Perhaps return an object rather than just a string
280
if old_entry is new_entry:
281
# also the case of both being None
283
elif old_entry is None:
285
elif new_entry is None:
287
if old_entry.kind != new_entry.kind:
289
text_modified, meta_modified = new_entry.detect_changes(old_entry)
290
if text_modified or meta_modified:
294
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
295
if old_entry.parent_id != new_entry.parent_id:
297
elif old_entry.name != new_entry.name:
301
if renamed and not modified:
302
return InventoryEntry.RENAMED
303
if modified and not renamed:
305
if modified and renamed:
306
return InventoryEntry.MODIFIED_AND_RENAMED
310
return ("%s(%r, %r, parent_id=%r, revision=%r)"
311
% (self.__class__.__name__,
317
def __eq__(self, other):
319
# For the case when objects are cached
321
if not isinstance(other, InventoryEntry):
322
return NotImplemented
324
return ((self.file_id == other.file_id)
325
and (self.name == other.name)
326
and (other.symlink_target == self.symlink_target)
327
and (self.text_sha1 == other.text_sha1)
328
and (self.text_size == other.text_size)
329
and (self.text_id == other.text_id)
330
and (self.parent_id == other.parent_id)
331
and (self.kind == other.kind)
332
and (self.revision == other.revision)
333
and (self.executable == other.executable)
334
and (self.reference_revision == other.reference_revision)
337
def __ne__(self, other):
338
return not (self == other)
341
raise ValueError('not hashable')
343
def _unchanged(self, previous_ie):
344
"""Has this entry changed relative to previous_ie.
346
This method should be overridden in child classes.
349
# different inv parent
350
if previous_ie.parent_id != self.parent_id:
353
elif previous_ie.name != self.name:
355
elif previous_ie.kind != self.kind:
359
def _read_tree_state(self, path, work_tree):
360
"""Populate fields in the inventory entry from the given tree.
362
Note that this should be modified to be a noop on virtual trees
363
as all entries created there are prepopulated.
365
# TODO: Rather than running this manually, we should check the
366
# working sha1 and other expensive properties when they're
367
# first requested, or preload them if they're already known
368
pass # nothing to do by default
370
def _forget_tree_state(self):
374
class InventoryDirectory(InventoryEntry):
375
"""A directory in an inventory."""
377
__slots__ = ['children']
381
def _check(self, checker, rev_id):
382
"""See InventoryEntry._check"""
383
# In non rich root repositories we do not expect a file graph for the
385
if self.name == '' and not checker.rich_roots:
387
# Directories are stored as an empty file, but the file should exist
388
# to provide a per-fileid log. The hash of every directory content is
389
# "da..." below (the sha1sum of '').
390
checker.add_pending_item(rev_id,
391
('texts', self.file_id, self.revision), 'text',
392
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
395
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
396
other.revision = self.revision
146
397
# note that children are *not* copied; they're pulled across when
147
398
# others are added
152
return ("%s(%r, %r, kind=%r, parent_id=%r)"
153
% (self.__class__.__name__,
160
def to_element(self):
161
"""Convert to XML element"""
162
from bzrlib.xml import Element
166
e.set('name', self.name)
167
e.set('file_id', self.file_id)
168
e.set('kind', self.kind)
170
if self.text_size != None:
171
e.set('text_size', '%d' % self.text_size)
173
for f in ['text_id', 'text_sha1']:
178
# to be conservative, we don't externalize the root pointers
179
# for now, leaving them as null in the xml form. in a future
180
# version it will be implied by nested elements.
181
if self.parent_id != ROOT_ID:
182
assert isinstance(self.parent_id, basestring)
183
e.set('parent_id', self.parent_id)
190
def from_element(cls, elt):
191
assert elt.tag == 'entry'
193
## original format inventories don't have a parent_id for
194
## nodes in the root directory, but it's cleaner to use one
196
parent_id = elt.get('parent_id')
197
if parent_id == None:
200
self = cls(elt.get('file_id'), elt.get('name'), elt.get('kind'), parent_id)
201
self.text_id = elt.get('text_id')
202
self.text_sha1 = elt.get('text_sha1')
204
## mutter("read inventoryentry: %r" % (elt.attrib))
206
v = elt.get('text_size')
207
self.text_size = v and int(v)
212
from_element = classmethod(from_element)
214
def __eq__(self, other):
215
if not isinstance(other, InventoryEntry):
216
return NotImplemented
218
return (self.file_id == other.file_id) \
219
and (self.name == other.name) \
220
and (self.text_sha1 == other.text_sha1) \
221
and (self.text_size == other.text_size) \
222
and (self.text_id == other.text_id) \
223
and (self.parent_id == other.parent_id) \
224
and (self.kind == other.kind)
227
def __ne__(self, other):
228
return not (self == other)
231
raise ValueError('not hashable')
235
class RootEntry(InventoryEntry):
236
def __init__(self, file_id):
237
self.file_id = file_id
401
def __init__(self, file_id, name, parent_id):
402
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
238
403
self.children = {}
239
self.kind = 'root_directory'
240
self.parent_id = None
243
def __eq__(self, other):
244
if not isinstance(other, RootEntry):
245
return NotImplemented
247
return (self.file_id == other.file_id) \
248
and (self.children == other.children)
252
class Inventory(object):
253
"""Inventory of versioned files in a tree.
255
This describes which file_id is present at each point in the tree,
256
and possibly the SHA-1 or other information about the file.
405
def kind_character(self):
406
"""See InventoryEntry.kind_character."""
410
class InventoryFile(InventoryEntry):
411
"""A file in an inventory."""
413
__slots__ = ['text_sha1', 'text_size', 'text_id', 'executable']
417
def __init__(self, file_id, name, parent_id):
418
super(InventoryFile, self).__init__(file_id, name, parent_id)
419
self.text_sha1 = None
420
self.text_size = None
422
self.executable = False
424
def _check(self, checker, tree_revision_id):
425
"""See InventoryEntry._check"""
426
# TODO: check size too.
427
checker.add_pending_item(tree_revision_id,
428
('texts', self.file_id, self.revision), 'text',
430
if self.text_size is None:
431
checker._report_items.append(
432
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
436
other = InventoryFile(self.file_id, self.name, self.parent_id)
437
other.executable = self.executable
438
other.text_id = self.text_id
439
other.text_sha1 = self.text_sha1
440
other.text_size = self.text_size
441
other.revision = self.revision
444
def detect_changes(self, old_entry):
445
"""See InventoryEntry.detect_changes."""
446
text_modified = (self.text_sha1 != old_entry.text_sha1)
447
meta_modified = (self.executable != old_entry.executable)
448
return text_modified, meta_modified
450
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
451
output_to, reverse=False):
452
"""See InventoryEntry._diff."""
453
from bzrlib.diff import DiffText
454
from_file_id = self.file_id
456
to_file_id = to_entry.file_id
460
to_file_id, from_file_id = from_file_id, to_file_id
461
tree, to_tree = to_tree, tree
462
from_label, to_label = to_label, from_label
463
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
465
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
468
"""See InventoryEntry.has_text."""
471
def kind_character(self):
472
"""See InventoryEntry.kind_character."""
475
def _read_tree_state(self, path, work_tree):
476
"""See InventoryEntry._read_tree_state."""
477
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
478
# FIXME: 20050930 probe for the text size when getting sha1
479
# in _read_tree_state
480
self.executable = work_tree.is_executable(self.file_id, path=path)
483
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
484
% (self.__class__.__name__,
492
def _forget_tree_state(self):
493
self.text_sha1 = None
495
def _unchanged(self, previous_ie):
496
"""See InventoryEntry._unchanged."""
497
compatible = super(InventoryFile, self)._unchanged(previous_ie)
498
if self.text_sha1 != previous_ie.text_sha1:
501
# FIXME: 20050930 probe for the text size when getting sha1
502
# in _read_tree_state
503
self.text_size = previous_ie.text_size
504
if self.executable != previous_ie.executable:
509
class InventoryLink(InventoryEntry):
510
"""A file in an inventory."""
512
__slots__ = ['symlink_target']
516
def __init__(self, file_id, name, parent_id):
517
super(InventoryLink, self).__init__(file_id, name, parent_id)
518
self.symlink_target = None
520
def _check(self, checker, tree_revision_id):
521
"""See InventoryEntry._check"""
522
if self.symlink_target is None:
523
checker._report_items.append(
524
'symlink {%s} has no target in revision {%s}'
525
% (self.file_id, tree_revision_id))
526
# Symlinks are stored as ''
527
checker.add_pending_item(tree_revision_id,
528
('texts', self.file_id, self.revision), 'text',
529
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
532
other = InventoryLink(self.file_id, self.name, self.parent_id)
533
other.symlink_target = self.symlink_target
534
other.revision = self.revision
537
def detect_changes(self, old_entry):
538
"""See InventoryEntry.detect_changes."""
539
# FIXME: which _modified field should we use ? RBC 20051003
540
text_modified = (self.symlink_target != old_entry.symlink_target)
542
mutter(" symlink target changed")
543
meta_modified = False
544
return text_modified, meta_modified
546
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
547
output_to, reverse=False):
548
"""See InventoryEntry._diff."""
549
from bzrlib.diff import DiffSymlink
550
old_target = self.symlink_target
551
if to_entry is not None:
552
new_target = to_entry.symlink_target
561
new_target, old_target = old_target, new_target
562
differ = DiffSymlink(old_tree, new_tree, output_to)
563
return differ.diff_symlink(old_target, new_target)
565
def kind_character(self):
566
"""See InventoryEntry.kind_character."""
569
def _read_tree_state(self, path, work_tree):
570
"""See InventoryEntry._read_tree_state."""
571
self.symlink_target = work_tree.get_symlink_target(self.file_id)
573
def _forget_tree_state(self):
574
self.symlink_target = None
576
def _unchanged(self, previous_ie):
577
"""See InventoryEntry._unchanged."""
578
compatible = super(InventoryLink, self)._unchanged(previous_ie)
579
if self.symlink_target != previous_ie.symlink_target:
584
class TreeReference(InventoryEntry):
586
__slots__ = ['reference_revision']
588
kind = 'tree-reference'
590
def __init__(self, file_id, name, parent_id, revision=None,
591
reference_revision=None):
592
InventoryEntry.__init__(self, file_id, name, parent_id)
593
self.revision = revision
594
self.reference_revision = reference_revision
597
return TreeReference(self.file_id, self.name, self.parent_id,
598
self.revision, self.reference_revision)
600
def _read_tree_state(self, path, work_tree):
601
"""Populate fields in the inventory entry from the given tree.
603
self.reference_revision = work_tree.get_reference_revision(
606
def _forget_tree_state(self):
607
self.reference_revision = None
609
def _unchanged(self, previous_ie):
610
"""See InventoryEntry._unchanged."""
611
compatible = super(TreeReference, self)._unchanged(previous_ie)
612
if self.reference_revision != previous_ie.reference_revision:
617
class CommonInventory(object):
618
"""Basic inventory logic, defined in terms of primitives like has_id.
620
An inventory is the metadata about the contents of a tree.
622
This is broadly a map from file_id to entries such as directories, files,
623
symlinks and tree references. Each entry maintains its own metadata like
624
SHA1 and length for files, or children for a directory.
257
626
Entries can be looked up either by path or by file_id.
259
The inventory represents a typical unix file tree, with
260
directories containing files and subdirectories. We never store
261
the full path to a file, because renaming a directory implicitly
262
moves all of its contents. This class internally maintains a
263
lookup tree that allows the children under a directory to be
266
628
InventoryEntry objects must not be modified after they are
267
629
inserted, other than through the Inventory API.
269
>>> inv = Inventory()
270
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
271
>>> inv['123-123'].name
274
May be treated as an iterator or set to look up file ids:
276
>>> bool(inv.path2id('hello.c'))
281
May also look up by name:
283
>>> [x[0] for x in inv.iter_entries()]
285
>>> inv = Inventory('TREE_ROOT-12345678-12345678')
286
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
288
def __init__(self, root_id=ROOT_ID):
289
"""Create or read an inventory.
291
If a working directory is specified, the inventory is read
292
from there. If the file is specified, read from that. If not,
293
the inventory is created empty.
295
The inventory is created with a default root directory, with
298
# We are letting Branch(init=True) create a unique inventory
299
# root id. Rather than generating a random one here.
301
# root_id = bzrlib.branch.gen_file_id('TREE_ROOT')
302
self.root = RootEntry(root_id)
303
self._byid = {self.root.file_id: self.root}
307
return iter(self._byid)
311
"""Returns number of entries."""
312
return len(self._byid)
315
def iter_entries(self, from_dir=None):
316
"""Return (path, entry) pairs, in order by name."""
320
elif isinstance(from_dir, basestring):
321
from_dir = self._byid[from_dir]
323
kids = from_dir.children.items()
325
for name, ie in kids:
327
if ie.kind == 'directory':
328
for cn, cie in self.iter_entries(from_dir=ie.file_id):
329
yield os.path.join(name, cn), cie
632
def __contains__(self, file_id):
633
"""True if this entry contains a file with given id.
635
>>> inv = Inventory()
636
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
637
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
643
Note that this method along with __iter__ are not encouraged for use as
644
they are less clear than specific query methods - they may be rmeoved
647
return self.has_id(file_id)
649
def has_filename(self, filename):
650
return bool(self.path2id(filename))
652
def id2path(self, file_id):
653
"""Return as a string the path to file_id.
656
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
657
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
658
>>> print i.id2path('foo-id')
661
:raises NoSuchId: If file_id is not present in the inventory.
663
# get all names, skipping root
664
return '/'.join(reversed(
665
[parent.name for parent in
666
self._iter_file_id_parents(file_id)][:-1]))
668
def iter_entries(self, from_dir=None, recursive=True):
669
"""Return (path, entry) pairs, in order by name.
671
:param from_dir: if None, start from the root,
672
otherwise start from this directory (either file-id or entry)
673
:param recursive: recurse into directories or not
676
if self.root is None:
680
elif isinstance(from_dir, basestring):
681
from_dir = self[from_dir]
683
# unrolling the recursive called changed the time from
684
# 440ms/663ms (inline/total) to 116ms/116ms
685
children = from_dir.children.items()
688
for name, ie in children:
691
children = collections.deque(children)
692
stack = [(u'', children)]
694
from_dir_relpath, children = stack[-1]
697
name, ie = children.popleft()
699
# we know that from_dir_relpath never ends in a slash
700
# and 'f' doesn't begin with one, we can do a string op, rather
701
# than the checks of pathjoin(), though this means that all paths
703
path = from_dir_relpath + '/' + name
707
if ie.kind != 'directory':
710
# But do this child first
711
new_children = ie.children.items()
713
new_children = collections.deque(new_children)
714
stack.append((path, new_children))
715
# Break out of inner loop, so that we start outer loop with child
718
# if we finished all children, pop it off the stack
721
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
722
yield_parents=False):
723
"""Iterate over the entries in a directory first order.
725
This returns all entries for a directory before returning
726
the entries for children of a directory. This is not
727
lexicographically sorted order, and is a hybrid between
728
depth-first and breadth-first.
730
:param yield_parents: If True, yield the parents from the root leading
731
down to specific_file_ids that have been requested. This has no
732
impact if specific_file_ids is None.
733
:return: This yields (path, entry) pairs
735
if specific_file_ids and not isinstance(specific_file_ids, set):
736
specific_file_ids = set(specific_file_ids)
737
# TODO? Perhaps this should return the from_dir so that the root is
738
# yielded? or maybe an option?
740
if self.root is None:
742
# Optimize a common case
743
if (not yield_parents and specific_file_ids is not None and
744
len(specific_file_ids) == 1):
745
file_id = list(specific_file_ids)[0]
747
yield self.id2path(file_id), self[file_id]
750
if (specific_file_ids is None or yield_parents or
751
self.root.file_id in specific_file_ids):
753
elif isinstance(from_dir, basestring):
754
from_dir = self[from_dir]
756
if specific_file_ids is not None:
757
# TODO: jam 20070302 This could really be done as a loop rather
758
# than a bunch of recursive calls.
761
def add_ancestors(file_id):
762
if file_id not in byid:
764
parent_id = byid[file_id].parent_id
765
if parent_id is None:
767
if parent_id not in parents:
768
parents.add(parent_id)
769
add_ancestors(parent_id)
770
for file_id in specific_file_ids:
771
add_ancestors(file_id)
775
stack = [(u'', from_dir)]
777
cur_relpath, cur_dir = stack.pop()
780
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
782
child_relpath = cur_relpath + child_name
784
if (specific_file_ids is None or
785
child_ie.file_id in specific_file_ids or
786
(yield_parents and child_ie.file_id in parents)):
787
yield child_relpath, child_ie
789
if child_ie.kind == 'directory':
790
if parents is None or child_ie.file_id in parents:
791
child_dirs.append((child_relpath+'/', child_ie))
792
stack.extend(reversed(child_dirs))
794
def _make_delta(self, old):
795
"""Make an inventory delta from two inventories."""
798
adds = new_ids - old_ids
799
deletes = old_ids - new_ids
800
common = old_ids.intersection(new_ids)
802
for file_id in deletes:
803
delta.append((old.id2path(file_id), None, file_id, None))
805
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
806
for file_id in common:
807
if old[file_id] != self[file_id]:
808
delta.append((old.id2path(file_id), self.id2path(file_id),
809
file_id, self[file_id]))
812
def _get_mutable_inventory(self):
813
"""Returns a mutable copy of the object.
815
Some inventories are immutable, yet working trees, for example, needs
816
to mutate exisiting inventories instead of creating a new one.
818
raise NotImplementedError(self._get_mutable_inventory)
820
def make_entry(self, kind, name, parent_id, file_id=None):
821
"""Simple thunk to bzrlib.inventory.make_entry."""
822
return make_entry(kind, name, parent_id, file_id)
332
824
def entries(self):
333
825
"""Return list of (path, ie) for all entries except the root.
339
831
kids = dir_ie.children.items()
341
833
for name, ie in kids:
342
child_path = os.path.join(dir_path, name)
834
child_path = osutils.pathjoin(dir_path, name)
343
835
accum.append((child_path, ie))
344
836
if ie.kind == 'directory':
345
837
descend(ie, child_path)
347
descend(self.root, '')
839
if self.root is not None:
840
descend(self.root, u'')
351
843
def directories(self):
352
844
"""Return (path, entry) pairs for all directories, including the root.
355
847
def descend(parent_ie, parent_path):
356
848
accum.append((parent_path, parent_ie))
358
850
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
361
853
for name, child_ie in kids:
362
child_path = os.path.join(parent_path, name)
854
child_path = osutils.pathjoin(parent_path, name)
363
855
descend(child_ie, child_path)
364
descend(self.root, '')
856
descend(self.root, u'')
859
def path2id(self, relpath):
860
"""Walk down through directories to return entry of last component.
862
:param relpath: may be either a list of path components, or a single
863
string, in which case it is automatically split.
865
This returns the entry of the last component in the path,
866
which may be either a file or a directory.
868
Returns None IFF the path is not found.
870
if isinstance(relpath, basestring):
871
names = osutils.splitpath(relpath)
877
except errors.NoSuchId:
878
# root doesn't exist yet so nothing else can
884
children = getattr(parent, 'children', None)
893
return parent.file_id
895
def filter(self, specific_fileids):
896
"""Get an inventory view filtered against a set of file-ids.
898
Children of directories and parents are included.
900
The result may or may not reference the underlying inventory
901
so it should be treated as immutable.
903
interesting_parents = set()
904
for fileid in specific_fileids:
906
interesting_parents.update(self.get_idpath(fileid))
907
except errors.NoSuchId:
908
# This fileid is not in the inventory - that's ok
910
entries = self.iter_entries()
911
if self.root is None:
912
return Inventory(root_id=None)
913
other = Inventory(entries.next()[1].file_id)
914
other.root.revision = self.root.revision
915
other.revision_id = self.revision_id
916
directories_to_expand = set()
917
for path, entry in entries:
918
file_id = entry.file_id
919
if (file_id in specific_fileids
920
or entry.parent_id in directories_to_expand):
921
if entry.kind == 'directory':
922
directories_to_expand.add(file_id)
923
elif file_id not in interesting_parents:
925
other.add(entry.copy())
928
def get_idpath(self, file_id):
929
"""Return a list of file_ids for the path to an entry.
931
The list contains one element for each directory followed by
932
the id of the file itself. So the length of the returned list
933
is equal to the depth of the file in the tree, counting the
934
root directory as depth 1.
937
for parent in self._iter_file_id_parents(file_id):
938
p.insert(0, parent.file_id)
942
class Inventory(CommonInventory):
943
"""Mutable dict based in-memory inventory.
945
We never store the full path to a file, because renaming a directory
946
implicitly moves all of its contents. This class internally maintains a
947
lookup tree that allows the children under a directory to be
950
>>> inv = Inventory()
951
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
952
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
953
>>> inv['123-123'].name
956
Id's may be looked up from paths:
958
>>> inv.path2id('hello.c')
963
There are iterators over the contents:
965
>>> [entry[0] for entry in inv.iter_entries()]
969
def __init__(self, root_id=ROOT_ID, revision_id=None):
970
"""Create or read an inventory.
972
If a working directory is specified, the inventory is read
973
from there. If the file is specified, read from that. If not,
974
the inventory is created empty.
976
The inventory is created with a default root directory, with
979
if root_id is not None:
980
self._set_root(InventoryDirectory(root_id, u'', None))
984
self.revision_id = revision_id
987
# More than one page of ouput is not useful anymore to debug
990
contents = repr(self._byid)
991
if len(contents) > max_len:
992
contents = contents[:(max_len-len(closing))] + closing
993
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
995
def apply_delta(self, delta):
996
"""Apply a delta to this inventory.
998
See the inventory developers documentation for the theory behind
1001
If delta application fails the inventory is left in an indeterminate
1002
state and must not be used.
1004
:param delta: A list of changes to apply. After all the changes are
1005
applied the final inventory must be internally consistent, but it
1006
is ok to supply changes which, if only half-applied would have an
1007
invalid result - such as supplying two changes which rename two
1008
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
1009
('B', 'A', 'B-id', b_entry)].
1011
Each change is a tuple, of the form (old_path, new_path, file_id,
1014
When new_path is None, the change indicates the removal of an entry
1015
from the inventory and new_entry will be ignored (using None is
1016
appropriate). If new_path is not None, then new_entry must be an
1017
InventoryEntry instance, which will be incorporated into the
1018
inventory (and replace any existing entry with the same file id).
1020
When old_path is None, the change indicates the addition of
1021
a new entry to the inventory.
1023
When neither new_path nor old_path are None, the change is a
1024
modification to an entry, such as a rename, reparent, kind change
1027
The children attribute of new_entry is ignored. This is because
1028
this method preserves children automatically across alterations to
1029
the parent of the children, and cases where the parent id of a
1030
child is changing require the child to be passed in as a separate
1031
change regardless. E.g. in the recursive deletion of a directory -
1032
the directory's children must be included in the delta, or the
1033
final inventory will be invalid.
1035
Note that a file_id must only appear once within a given delta.
1036
An AssertionError is raised otherwise.
1038
# Check that the delta is legal. It would be nice if this could be
1039
# done within the loops below but it's safer to validate the delta
1040
# before starting to mutate the inventory, as there isn't a rollback
1042
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1043
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1044
_check_delta_ids_are_valid(
1045
_check_delta_new_path_entry_both_or_None(
1049
# Remove all affected items which were in the original inventory,
1050
# starting with the longest paths, thus ensuring parents are examined
1051
# after their children, which means that everything we examine has no
1052
# modified children remaining by the time we examine it.
1053
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1054
if op is not None), reverse=True):
1055
# Preserve unaltered children of file_id for later reinsertion.
1056
file_id_children = getattr(self[file_id], 'children', {})
1057
if len(file_id_children):
1058
children[file_id] = file_id_children
1059
if self.id2path(file_id) != old_path:
1060
raise errors.InconsistentDelta(old_path, file_id,
1061
"Entry was at wrong other path %r." % self.id2path(file_id))
1062
# Remove file_id and the unaltered children. If file_id is not
1063
# being deleted it will be reinserted back later.
1064
self.remove_recursive_id(file_id)
1065
# Insert all affected which should be in the new inventory, reattaching
1066
# their children if they had any. This is done from shortest path to
1067
# longest, ensuring that items which were modified and whose parents in
1068
# the resulting inventory were also modified, are inserted after their
1070
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1071
delta if np is not None):
1072
if new_entry.kind == 'directory':
1073
# Pop the child which to allow detection of children whose
1074
# parents were deleted and which were not reattached to a new
1076
replacement = InventoryDirectory(new_entry.file_id,
1077
new_entry.name, new_entry.parent_id)
1078
replacement.revision = new_entry.revision
1079
replacement.children = children.pop(replacement.file_id, {})
1080
new_entry = replacement
1083
except errors.DuplicateFileId:
1084
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1085
"New id is already present in target.")
1086
except AttributeError:
1087
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1088
"Parent is not a directory.")
1089
if self.id2path(new_entry.file_id) != new_path:
1090
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1091
"New path is not consistent with parent path.")
1093
# Get the parent id that was deleted
1094
parent_id, children = children.popitem()
1095
raise errors.InconsistentDelta("<deleted>", parent_id,
1096
"The file id was deleted but its children were not deleted.")
1098
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1099
propagate_caches=False):
1100
"""See CHKInventory.create_by_apply_delta()"""
1101
new_inv = self.copy()
1102
new_inv.apply_delta(inventory_delta)
1103
new_inv.revision_id = new_revision_id
1106
def _set_root(self, ie):
1108
self._byid = {self.root.file_id: self.root}
1111
# TODO: jam 20051218 Should copy also copy the revision_id?
1112
entries = self.iter_entries()
1113
if self.root is None:
1114
return Inventory(root_id=None)
1115
other = Inventory(entries.next()[1].file_id)
1116
other.root.revision = self.root.revision
1117
# copy recursively so we know directories will be added before
1118
# their children. There are more efficient ways than this...
1119
for path, entry in entries:
1120
other.add(entry.copy())
1123
def _get_mutable_inventory(self):
1124
"""See CommonInventory._get_mutable_inventory."""
1125
return copy.deepcopy(self)
1128
"""Iterate over all file-ids."""
1129
return iter(self._byid)
1131
def iter_just_entries(self):
1132
"""Iterate over all entries.
369
def __contains__(self, file_id):
370
"""True if this entry contains a file with given id.
372
>>> inv = Inventory()
373
>>> inv.add(InventoryEntry('123', 'foo.c', 'file', ROOT_ID))
1134
Unlike iter_entries(), just the entries are returned (not (path, ie))
1135
and the order of entries is undefined.
1137
XXX: We may not want to merge this into bzr.dev.
379
return file_id in self._byid
1139
if self.root is None:
1141
for _, ie in self._byid.iteritems():
1145
"""Returns number of entries."""
1146
return len(self._byid)
382
1148
def __getitem__(self, file_id):
383
1149
"""Return the entry for given file_id.
385
1151
>>> inv = Inventory()
386
>>> inv.add(InventoryEntry('123123', 'hello.c', 'file', ROOT_ID))
1152
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1153
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
387
1154
>>> inv['123123'].name
391
1158
return self._byid[file_id]
392
1159
except KeyError:
394
raise BzrError("can't look up file_id None")
396
raise BzrError("file_id {%s} not in inventory" % file_id)
1160
# really we're passing an inventory, not a tree...
1161
raise errors.NoSuchId(self, file_id)
399
1163
def get_file_kind(self, file_id):
400
1164
return self._byid[file_id].kind
638
1359
del old_parent.children[file_ie.name]
639
1360
new_parent.children[new_name] = file_ie
641
1362
file_ie.name = new_name
642
1363
file_ie.parent_id = new_parent_id
1365
def is_root(self, file_id):
1366
return self.root is not None and file_id == self.root.file_id
1369
class CHKInventory(CommonInventory):
1370
"""An inventory persisted in a CHK store.
1372
By design, a CHKInventory is immutable so many of the methods
1373
supported by Inventory - add, rename, apply_delta, etc - are *not*
1374
supported. To create a new CHKInventory, use create_by_apply_delta()
1375
or from_inventory(), say.
1377
Internally, a CHKInventory has one or two CHKMaps:
1379
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1380
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1383
The second map is optional and not present in early CHkRepository's.
1385
No caching is performed: every method call or item access will perform
1386
requests to the storage layer. As such, keep references to objects you
1390
def __init__(self, search_key_name):
1391
CommonInventory.__init__(self)
1392
self._fileid_to_entry_cache = {}
1393
self._path_to_fileid_cache = {}
1394
self._search_key_name = search_key_name
1397
def __eq__(self, other):
1398
"""Compare two sets by comparing their contents."""
1399
if not isinstance(other, CHKInventory):
1400
return NotImplemented
1402
this_key = self.id_to_entry.key()
1403
other_key = other.id_to_entry.key()
1404
this_pid_key = self.parent_id_basename_to_file_id.key()
1405
other_pid_key = other.parent_id_basename_to_file_id.key()
1406
if None in (this_key, this_pid_key, other_key, other_pid_key):
1408
return this_key == other_key and this_pid_key == other_pid_key
1410
def _entry_to_bytes(self, entry):
1411
"""Serialise entry as a single bytestring.
1413
:param Entry: An inventory entry.
1414
:return: A bytestring for the entry.
1417
ENTRY ::= FILE | DIR | SYMLINK | TREE
1418
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1419
DIR ::= "dir: " COMMON
1420
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1421
TREE ::= "tree: " COMMON REFERENCE_REVISION
1422
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1425
if entry.parent_id is not None:
1426
parent_str = entry.parent_id
1429
name_str = entry.name.encode("utf8")
1430
if entry.kind == 'file':
1431
if entry.executable:
1435
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1436
entry.file_id, parent_str, name_str, entry.revision,
1437
entry.text_sha1, entry.text_size, exec_str)
1438
elif entry.kind == 'directory':
1439
return "dir: %s\n%s\n%s\n%s" % (
1440
entry.file_id, parent_str, name_str, entry.revision)
1441
elif entry.kind == 'symlink':
1442
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1443
entry.file_id, parent_str, name_str, entry.revision,
1444
entry.symlink_target.encode("utf8"))
1445
elif entry.kind == 'tree-reference':
1446
return "tree: %s\n%s\n%s\n%s\n%s" % (
1447
entry.file_id, parent_str, name_str, entry.revision,
1448
entry.reference_revision)
1450
raise ValueError("unknown kind %r" % entry.kind)
1452
def _expand_fileids_to_parents_and_children(self, file_ids):
1453
"""Give a more wholistic view starting with the given file_ids.
1455
For any file_id which maps to a directory, we will include all children
1456
of that directory. We will also include all directories which are
1457
parents of the given file_ids, but we will not include their children.
1464
fringle # fringle-id
1468
if given [foo-id] we will include
1469
TREE_ROOT as interesting parents
1471
foo-id, baz-id, frob-id, fringle-id
1475
# TODO: Pre-pass over the list of fileids to see if anything is already
1476
# deserialized in self._fileid_to_entry_cache
1478
directories_to_expand = set()
1479
children_of_parent_id = {}
1480
# It is okay if some of the fileids are missing
1481
for entry in self._getitems(file_ids):
1482
if entry.kind == 'directory':
1483
directories_to_expand.add(entry.file_id)
1484
interesting.add(entry.parent_id)
1485
children_of_parent_id.setdefault(entry.parent_id, []
1486
).append(entry.file_id)
1488
# Now, interesting has all of the direct parents, but not the
1489
# parents of those parents. It also may have some duplicates with
1491
remaining_parents = interesting.difference(file_ids)
1492
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1493
# but we don't actually want to recurse into that
1494
interesting.add(None) # this will auto-filter it in the loop
1495
remaining_parents.discard(None)
1496
while remaining_parents:
1497
next_parents = set()
1498
for entry in self._getitems(remaining_parents):
1499
next_parents.add(entry.parent_id)
1500
children_of_parent_id.setdefault(entry.parent_id, []
1501
).append(entry.file_id)
1502
# Remove any search tips we've already processed
1503
remaining_parents = next_parents.difference(interesting)
1504
interesting.update(remaining_parents)
1505
# We should probably also .difference(directories_to_expand)
1506
interesting.update(file_ids)
1507
interesting.discard(None)
1508
while directories_to_expand:
1509
# Expand directories by looking in the
1510
# parent_id_basename_to_file_id map
1511
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1512
directories_to_expand = set()
1513
items = self.parent_id_basename_to_file_id.iteritems(keys)
1514
next_file_ids = set([item[1] for item in items])
1515
next_file_ids = next_file_ids.difference(interesting)
1516
interesting.update(next_file_ids)
1517
for entry in self._getitems(next_file_ids):
1518
if entry.kind == 'directory':
1519
directories_to_expand.add(entry.file_id)
1520
children_of_parent_id.setdefault(entry.parent_id, []
1521
).append(entry.file_id)
1522
return interesting, children_of_parent_id
1524
def filter(self, specific_fileids):
1525
"""Get an inventory view filtered against a set of file-ids.
1527
Children of directories and parents are included.
1529
The result may or may not reference the underlying inventory
1530
so it should be treated as immutable.
1533
parent_to_children) = self._expand_fileids_to_parents_and_children(
1535
# There is some overlap here, but we assume that all interesting items
1536
# are in the _fileid_to_entry_cache because we had to read them to
1537
# determine if they were a dir we wanted to recurse, or just a file
1538
# This should give us all the entries we'll want to add, so start
1540
other = Inventory(self.root_id)
1541
other.root.revision = self.root.revision
1542
other.revision_id = self.revision_id
1543
if not interesting or not parent_to_children:
1544
# empty filter, or filtering entrys that don't exist
1545
# (if even 1 existed, then we would have populated
1546
# parent_to_children with at least the tree root.)
1548
cache = self._fileid_to_entry_cache
1549
remaining_children = collections.deque(parent_to_children[self.root_id])
1550
while remaining_children:
1551
file_id = remaining_children.popleft()
1553
if ie.kind == 'directory':
1554
ie = ie.copy() # We create a copy to depopulate the .children attribute
1555
# TODO: depending on the uses of 'other' we should probably alwyas
1556
# '.copy()' to prevent someone from mutating other and
1557
# invaliding our internal cache
1559
if file_id in parent_to_children:
1560
remaining_children.extend(parent_to_children[file_id])
1564
def _bytes_to_utf8name_key(bytes):
1565
"""Get the file_id, revision_id key out of bytes."""
1566
# We don't normally care about name, except for times when we want
1567
# to filter out empty names because of non rich-root...
1568
sections = bytes.split('\n')
1569
kind, file_id = sections[0].split(': ')
1570
return (sections[2], intern(file_id), intern(sections[3]))
1572
def _bytes_to_entry(self, bytes):
1573
"""Deserialise a serialised entry."""
1574
sections = bytes.split('\n')
1575
if sections[0].startswith("file: "):
1576
result = InventoryFile(sections[0][6:],
1577
sections[2].decode('utf8'),
1579
result.text_sha1 = sections[4]
1580
result.text_size = int(sections[5])
1581
result.executable = sections[6] == "Y"
1582
elif sections[0].startswith("dir: "):
1583
result = CHKInventoryDirectory(sections[0][5:],
1584
sections[2].decode('utf8'),
1586
elif sections[0].startswith("symlink: "):
1587
result = InventoryLink(sections[0][9:],
1588
sections[2].decode('utf8'),
1590
result.symlink_target = sections[4].decode('utf8')
1591
elif sections[0].startswith("tree: "):
1592
result = TreeReference(sections[0][6:],
1593
sections[2].decode('utf8'),
1595
result.reference_revision = sections[4]
1597
raise ValueError("Not a serialised entry %r" % bytes)
1598
result.file_id = intern(result.file_id)
1599
result.revision = intern(sections[3])
1600
if result.parent_id == '':
1601
result.parent_id = None
1602
self._fileid_to_entry_cache[result.file_id] = result
1605
def _get_mutable_inventory(self):
1606
"""See CommonInventory._get_mutable_inventory."""
1607
entries = self.iter_entries()
1608
inv = Inventory(None, self.revision_id)
1609
for path, inv_entry in entries:
1610
inv.add(inv_entry.copy())
1613
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1614
propagate_caches=False):
1615
"""Create a new CHKInventory by applying inventory_delta to this one.
1617
See the inventory developers documentation for the theory behind
1620
:param inventory_delta: The inventory delta to apply. See
1621
Inventory.apply_delta for details.
1622
:param new_revision_id: The revision id of the resulting CHKInventory.
1623
:param propagate_caches: If True, the caches for this inventory are
1624
copied to and updated for the result.
1625
:return: The new CHKInventory.
1627
split = osutils.split
1628
result = CHKInventory(self._search_key_name)
1629
if propagate_caches:
1630
# Just propagate the path-to-fileid cache for now
1631
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1632
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1633
self.id_to_entry._ensure_root()
1634
maximum_size = self.id_to_entry._root_node.maximum_size
1635
result.revision_id = new_revision_id
1636
result.id_to_entry = chk_map.CHKMap(
1637
self.id_to_entry._store,
1638
self.id_to_entry.key(),
1639
search_key_func=search_key_func)
1640
result.id_to_entry._ensure_root()
1641
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1642
# Change to apply to the parent_id_basename delta. The dict maps
1643
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1644
# when a path has its id replaced (e.g. the root is changed, or someone
1645
# does bzr mv a b, bzr mv c a, we should output a single change to this
1646
# map rather than two.
1647
parent_id_basename_delta = {}
1648
if self.parent_id_basename_to_file_id is not None:
1649
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1650
self.parent_id_basename_to_file_id._store,
1651
self.parent_id_basename_to_file_id.key(),
1652
search_key_func=search_key_func)
1653
result.parent_id_basename_to_file_id._ensure_root()
1654
self.parent_id_basename_to_file_id._ensure_root()
1655
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1656
p_id_root = self.parent_id_basename_to_file_id._root_node
1657
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1658
result_p_id_root._key_width = p_id_root._key_width
1660
result.parent_id_basename_to_file_id = None
1661
result.root_id = self.root_id
1662
id_to_entry_delta = []
1663
# inventory_delta is only traversed once, so we just update the
1665
# Check for repeated file ids
1666
inventory_delta = _check_delta_unique_ids(inventory_delta)
1667
# Repeated old paths
1668
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1669
# Check for repeated new paths
1670
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1671
# Check for entries that don't match the fileid
1672
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1673
# Check for nonsense fileids
1674
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1675
# Check for new_path <-> entry consistency
1676
inventory_delta = _check_delta_new_path_entry_both_or_None(
1678
# All changed entries need to have their parents be directories and be
1679
# at the right path. This set contains (path, id) tuples.
1681
# When we delete an item, all the children of it must be either deleted
1682
# or altered in their own right. As we batch process the change via
1683
# CHKMap.apply_delta, we build a set of things to use to validate the
1687
for old_path, new_path, file_id, entry in inventory_delta:
1690
result.root_id = file_id
1691
if new_path is None:
1696
if propagate_caches:
1698
del result._path_to_fileid_cache[old_path]
1701
deletes.add(file_id)
1703
new_key = StaticTuple(file_id,)
1704
new_value = result._entry_to_bytes(entry)
1705
# Update caches. It's worth doing this whether
1706
# we're propagating the old caches or not.
1707
result._path_to_fileid_cache[new_path] = file_id
1708
parents.add((split(new_path)[0], entry.parent_id))
1709
if old_path is None:
1712
old_key = StaticTuple(file_id,)
1713
if self.id2path(file_id) != old_path:
1714
raise errors.InconsistentDelta(old_path, file_id,
1715
"Entry was at wrong other path %r." %
1716
self.id2path(file_id))
1717
altered.add(file_id)
1718
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1719
if result.parent_id_basename_to_file_id is not None:
1720
# parent_id, basename changes
1721
if old_path is None:
1724
old_entry = self[file_id]
1725
old_key = self._parent_id_basename_key(old_entry)
1726
if new_path is None:
1730
new_key = self._parent_id_basename_key(entry)
1732
# If the two keys are the same, the value will be unchanged
1733
# as its always the file id for this entry.
1734
if old_key != new_key:
1735
# Transform a change into explicit delete/add preserving
1736
# a possible match on the key from a different file id.
1737
if old_key is not None:
1738
parent_id_basename_delta.setdefault(
1739
old_key, [None, None])[0] = old_key
1740
if new_key is not None:
1741
parent_id_basename_delta.setdefault(
1742
new_key, [None, None])[1] = new_value
1743
# validate that deletes are complete.
1744
for file_id in deletes:
1745
entry = self[file_id]
1746
if entry.kind != 'directory':
1748
# This loop could potentially be better by using the id_basename
1749
# map to just get the child file ids.
1750
for child in entry.children.values():
1751
if child.file_id not in altered:
1752
raise errors.InconsistentDelta(self.id2path(child.file_id),
1753
child.file_id, "Child not deleted or reparented when "
1755
result.id_to_entry.apply_delta(id_to_entry_delta)
1756
if parent_id_basename_delta:
1757
# Transform the parent_id_basename delta data into a linear delta
1758
# with only one record for a given key. Optimally this would allow
1759
# re-keying, but its simpler to just output that as a delete+add
1760
# to spend less time calculating the delta.
1762
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1763
if value is not None:
1764
delta_list.append((old_key, key, value))
1766
delta_list.append((old_key, None, None))
1767
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1768
parents.discard(('', None))
1769
for parent_path, parent in parents:
1771
if result[parent].kind != 'directory':
1772
raise errors.InconsistentDelta(result.id2path(parent), parent,
1773
'Not a directory, but given children')
1774
except errors.NoSuchId:
1775
raise errors.InconsistentDelta("<unknown>", parent,
1776
"Parent is not present in resulting inventory.")
1777
if result.path2id(parent_path) != parent:
1778
raise errors.InconsistentDelta(parent_path, parent,
1779
"Parent has wrong path %r." % result.path2id(parent_path))
1783
def deserialise(klass, chk_store, bytes, expected_revision_id):
1784
"""Deserialise a CHKInventory.
1786
:param chk_store: A CHK capable VersionedFiles instance.
1787
:param bytes: The serialised bytes.
1788
:param expected_revision_id: The revision ID we think this inventory is
1790
:return: A CHKInventory
1792
lines = bytes.split('\n')
1794
raise AssertionError('bytes to deserialize must end with an eol')
1796
if lines[0] != 'chkinventory:':
1797
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1799
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1800
'parent_id_basename_to_file_id',
1802
for line in lines[1:]:
1803
key, value = line.split(': ', 1)
1804
if key not in allowed_keys:
1805
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1808
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1811
revision_id = intern(info['revision_id'])
1812
root_id = intern(info['root_id'])
1813
search_key_name = intern(info.get('search_key_name', 'plain'))
1814
parent_id_basename_to_file_id = intern(info.get(
1815
'parent_id_basename_to_file_id', None))
1816
if not parent_id_basename_to_file_id.startswith('sha1:'):
1817
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1818
' key not %r' % (parent_id_basename_to_file_id,))
1819
id_to_entry = info['id_to_entry']
1820
if not id_to_entry.startswith('sha1:'):
1821
raise ValueError('id_to_entry should be a sha1'
1822
' key not %r' % (id_to_entry,))
1824
result = CHKInventory(search_key_name)
1825
result.revision_id = revision_id
1826
result.root_id = root_id
1827
search_key_func = chk_map.search_key_registry.get(
1828
result._search_key_name)
1829
if parent_id_basename_to_file_id is not None:
1830
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1831
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1832
search_key_func=search_key_func)
1834
result.parent_id_basename_to_file_id = None
1836
result.id_to_entry = chk_map.CHKMap(chk_store,
1837
StaticTuple(id_to_entry,),
1838
search_key_func=search_key_func)
1839
if (result.revision_id,) != expected_revision_id:
1840
raise ValueError("Mismatched revision id and expected: %r, %r" %
1841
(result.revision_id, expected_revision_id))
1845
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1846
"""Create a CHKInventory from an existing inventory.
1848
The content of inventory is copied into the chk_store, and a
1849
CHKInventory referencing that is returned.
1851
:param chk_store: A CHK capable VersionedFiles instance.
1852
:param inventory: The inventory to copy.
1853
:param maximum_size: The CHKMap node size limit.
1854
:param search_key_name: The identifier for the search key function
1856
result = klass(search_key_name)
1857
result.revision_id = inventory.revision_id
1858
result.root_id = inventory.root.file_id
1860
entry_to_bytes = result._entry_to_bytes
1861
parent_id_basename_key = result._parent_id_basename_key
1862
id_to_entry_dict = {}
1863
parent_id_basename_dict = {}
1864
for path, entry in inventory.iter_entries():
1865
key = StaticTuple(entry.file_id,).intern()
1866
id_to_entry_dict[key] = entry_to_bytes(entry)
1867
p_id_key = parent_id_basename_key(entry)
1868
parent_id_basename_dict[p_id_key] = entry.file_id
1870
result._populate_from_dicts(chk_store, id_to_entry_dict,
1871
parent_id_basename_dict, maximum_size=maximum_size)
1874
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1875
parent_id_basename_dict, maximum_size):
1876
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1877
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1878
maximum_size=maximum_size, key_width=1,
1879
search_key_func=search_key_func)
1880
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1882
root_key = chk_map.CHKMap.from_dict(chk_store,
1883
parent_id_basename_dict,
1884
maximum_size=maximum_size, key_width=2,
1885
search_key_func=search_key_func)
1886
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1887
root_key, search_key_func)
1889
def _parent_id_basename_key(self, entry):
1890
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1891
if entry.parent_id is not None:
1892
parent_id = entry.parent_id
1895
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1897
def __getitem__(self, file_id):
1898
"""map a single file_id -> InventoryEntry."""
1900
raise errors.NoSuchId(self, file_id)
1901
result = self._fileid_to_entry_cache.get(file_id, None)
1902
if result is not None:
1905
return self._bytes_to_entry(
1906
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1907
except StopIteration:
1908
# really we're passing an inventory, not a tree...
1909
raise errors.NoSuchId(self, file_id)
1911
def _getitems(self, file_ids):
1912
"""Similar to __getitem__, but lets you query for multiple.
1914
The returned order is undefined. And currently if an item doesn't
1915
exist, it isn't included in the output.
1919
for file_id in file_ids:
1920
entry = self._fileid_to_entry_cache.get(file_id, None)
1922
remaining.append(file_id)
1924
result.append(entry)
1925
file_keys = [StaticTuple(f,).intern() for f in remaining]
1926
for file_key, value in self.id_to_entry.iteritems(file_keys):
1927
entry = self._bytes_to_entry(value)
1928
result.append(entry)
1929
self._fileid_to_entry_cache[entry.file_id] = entry
1932
def has_id(self, file_id):
1933
# Perhaps have an explicit 'contains' method on CHKMap ?
1934
if self._fileid_to_entry_cache.get(file_id, None) is not None:
1937
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
1939
def is_root(self, file_id):
1940
return file_id == self.root_id
1942
def _iter_file_id_parents(self, file_id):
1943
"""Yield the parents of file_id up to the root."""
1944
while file_id is not None:
1948
raise errors.NoSuchId(tree=self, file_id=file_id)
1950
file_id = ie.parent_id
1953
"""Iterate over all file-ids."""
1954
for key, _ in self.id_to_entry.iteritems():
1957
def iter_just_entries(self):
1958
"""Iterate over all entries.
1960
Unlike iter_entries(), just the entries are returned (not (path, ie))
1961
and the order of entries is undefined.
1963
XXX: We may not want to merge this into bzr.dev.
1965
for key, entry in self.id_to_entry.iteritems():
1967
ie = self._fileid_to_entry_cache.get(file_id, None)
1969
ie = self._bytes_to_entry(entry)
1970
self._fileid_to_entry_cache[file_id] = ie
1973
def iter_changes(self, basis):
1974
"""Generate a Tree.iter_changes change list between this and basis.
1976
:param basis: Another CHKInventory.
1977
:return: An iterator over the changes between self and basis, as per
1978
tree.iter_changes().
1980
# We want: (file_id, (path_in_source, path_in_target),
1981
# changed_content, versioned, parent, name, kind,
1983
for key, basis_value, self_value in \
1984
self.id_to_entry.iter_changes(basis.id_to_entry):
1986
if basis_value is not None:
1987
basis_entry = basis._bytes_to_entry(basis_value)
1988
path_in_source = basis.id2path(file_id)
1989
basis_parent = basis_entry.parent_id
1990
basis_name = basis_entry.name
1991
basis_executable = basis_entry.executable
1993
path_in_source = None
1996
basis_executable = None
1997
if self_value is not None:
1998
self_entry = self._bytes_to_entry(self_value)
1999
path_in_target = self.id2path(file_id)
2000
self_parent = self_entry.parent_id
2001
self_name = self_entry.name
2002
self_executable = self_entry.executable
2004
path_in_target = None
2007
self_executable = None
2008
if basis_value is None:
2010
kind = (None, self_entry.kind)
2011
versioned = (False, True)
2012
elif self_value is None:
2014
kind = (basis_entry.kind, None)
2015
versioned = (True, False)
2017
kind = (basis_entry.kind, self_entry.kind)
2018
versioned = (True, True)
2019
changed_content = False
2020
if kind[0] != kind[1]:
2021
changed_content = True
2022
elif kind[0] == 'file':
2023
if (self_entry.text_size != basis_entry.text_size or
2024
self_entry.text_sha1 != basis_entry.text_sha1):
2025
changed_content = True
2026
elif kind[0] == 'symlink':
2027
if self_entry.symlink_target != basis_entry.symlink_target:
2028
changed_content = True
2029
elif kind[0] == 'tree-reference':
2030
if (self_entry.reference_revision !=
2031
basis_entry.reference_revision):
2032
changed_content = True
2033
parent = (basis_parent, self_parent)
2034
name = (basis_name, self_name)
2035
executable = (basis_executable, self_executable)
2036
if (not changed_content
2037
and parent[0] == parent[1]
2038
and name[0] == name[1]
2039
and executable[0] == executable[1]):
2040
# Could happen when only the revision changed for a directory
2043
yield (file_id, (path_in_source, path_in_target), changed_content,
2044
versioned, parent, name, kind, executable)
2047
"""Return the number of entries in the inventory."""
2048
return len(self.id_to_entry)
2050
def _make_delta(self, old):
2051
"""Make an inventory delta from two inventories."""
2052
if type(old) != CHKInventory:
2053
return CommonInventory._make_delta(self, old)
2055
for key, old_value, self_value in \
2056
self.id_to_entry.iter_changes(old.id_to_entry):
2058
if old_value is not None:
2059
old_path = old.id2path(file_id)
2062
if self_value is not None:
2063
entry = self._bytes_to_entry(self_value)
2064
self._fileid_to_entry_cache[file_id] = entry
2065
new_path = self.id2path(file_id)
2069
delta.append((old_path, new_path, file_id, entry))
2072
def path2id(self, relpath):
2073
"""See CommonInventory.path2id()."""
2074
# TODO: perhaps support negative hits?
2075
result = self._path_to_fileid_cache.get(relpath, None)
2076
if result is not None:
2078
if isinstance(relpath, basestring):
2079
names = osutils.splitpath(relpath)
2082
current_id = self.root_id
2083
if current_id is None:
2085
parent_id_index = self.parent_id_basename_to_file_id
2087
for basename in names:
2088
if cur_path is None:
2091
cur_path = cur_path + '/' + basename
2092
basename_utf8 = basename.encode('utf8')
2093
file_id = self._path_to_fileid_cache.get(cur_path, None)
2095
key_filter = [StaticTuple(current_id, basename_utf8)]
2096
items = parent_id_index.iteritems(key_filter)
2097
for (parent_id, name_utf8), file_id in items:
2098
if parent_id != current_id or name_utf8 != basename_utf8:
2099
raise errors.BzrError("corrupt inventory lookup! "
2100
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2105
self._path_to_fileid_cache[cur_path] = file_id
2106
current_id = file_id
2110
"""Serialise the inventory to lines."""
2111
lines = ["chkinventory:\n"]
2112
if self._search_key_name != 'plain':
2113
# custom ordering grouping things that don't change together
2114
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2115
lines.append("root_id: %s\n" % self.root_id)
2116
lines.append('parent_id_basename_to_file_id: %s\n' %
2117
(self.parent_id_basename_to_file_id.key()[0],))
2118
lines.append("revision_id: %s\n" % self.revision_id)
2119
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2121
lines.append("revision_id: %s\n" % self.revision_id)
2122
lines.append("root_id: %s\n" % self.root_id)
2123
if self.parent_id_basename_to_file_id is not None:
2124
lines.append('parent_id_basename_to_file_id: %s\n' %
2125
(self.parent_id_basename_to_file_id.key()[0],))
2126
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2131
"""Get the root entry."""
2132
return self[self.root_id]
2135
class CHKInventoryDirectory(InventoryDirectory):
2136
"""A directory in an inventory."""
2138
__slots__ = ['_children', '_chk_inventory']
2140
def __init__(self, file_id, name, parent_id, chk_inventory):
2141
# Don't call InventoryDirectory.__init__ - it isn't right for this
2143
InventoryEntry.__init__(self, file_id, name, parent_id)
2144
self._children = None
2145
self._chk_inventory = chk_inventory
2149
"""Access the list of children of this directory.
2151
With a parent_id_basename_to_file_id index, loads all the children,
2152
without loads the entire index. Without is bad. A more sophisticated
2153
proxy object might be nice, to allow partial loading of children as
2154
well when specific names are accessed. (So path traversal can be
2155
written in the obvious way but not examine siblings.).
2157
if self._children is not None:
2158
return self._children
2159
# No longer supported
2160
if self._chk_inventory.parent_id_basename_to_file_id is None:
2161
raise AssertionError("Inventories without"
2162
" parent_id_basename_to_file_id are no longer supported")
2164
# XXX: Todo - use proxy objects for the children rather than loading
2165
# all when the attribute is referenced.
2166
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2168
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2169
key_filter=[StaticTuple(self.file_id,)]):
2170
child_keys.add(StaticTuple(file_id,))
2172
for file_id_key in child_keys:
2173
entry = self._chk_inventory._fileid_to_entry_cache.get(
2174
file_id_key[0], None)
2175
if entry is not None:
2176
result[entry.name] = entry
2177
cached.add(file_id_key)
2178
child_keys.difference_update(cached)
2179
# populate; todo: do by name
2180
id_to_entry = self._chk_inventory.id_to_entry
2181
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2182
entry = self._chk_inventory._bytes_to_entry(bytes)
2183
result[entry.name] = entry
2184
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2185
self._children = result
2189
'directory': InventoryDirectory,
2190
'file': InventoryFile,
2191
'symlink': InventoryLink,
2192
'tree-reference': TreeReference
2195
def make_entry(kind, name, parent_id, file_id=None):
2196
"""Create an inventory entry.
2198
:param kind: the type of inventory entry to create.
2199
:param name: the basename of the entry.
2200
:param parent_id: the parent_id of the entry.
2201
:param file_id: the file_id to use. if None, one will be created.
2204
file_id = generate_ids.gen_file_id(name)
2205
name = ensure_normalized_name(name)
2207
factory = entry_factory[kind]
2209
raise errors.BadFileKindError(name, kind)
2210
return factory(file_id, name, parent_id)
2213
def ensure_normalized_name(name):
2216
:raises InvalidNormalization: When name is not normalized, and cannot be
2217
accessed on this platform by the normalized path.
2218
:return: The NFC normalised version of name.
2220
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2221
# keep them synchronised.
2222
# we dont import normalized_filename directly because we want to be
2223
# able to change the implementation at runtime for tests.
2224
norm_name, can_access = osutils.normalized_filename(name)
2225
if norm_name != name:
2229
# TODO: jam 20060701 This would probably be more useful
2230
# if the error was raised with the full path
2231
raise errors.InvalidNormalization(name)
649
2237
def is_valid_name(name):
2239
if _NAME_RE is None:
652
2240
_NAME_RE = re.compile(r'^[^/\\]+$')
654
2242
return bool(_NAME_RE.match(name))
2245
def _check_delta_unique_ids(delta):
2246
"""Decorate a delta and check that the file ids in it are unique.
2248
:return: A generator over delta.
2252
length = len(ids) + 1
2254
if len(ids) != length:
2255
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2260
def _check_delta_unique_new_paths(delta):
2261
"""Decorate a delta and check that the new paths in it are unique.
2263
:return: A generator over delta.
2267
length = len(paths) + 1
2269
if path is not None:
2271
if len(paths) != length:
2272
raise errors.InconsistentDelta(path, item[2], "repeated path")
2276
def _check_delta_unique_old_paths(delta):
2277
"""Decorate a delta and check that the old paths in it are unique.
2279
:return: A generator over delta.
2283
length = len(paths) + 1
2285
if path is not None:
2287
if len(paths) != length:
2288
raise errors.InconsistentDelta(path, item[2], "repeated path")
2292
def _check_delta_ids_are_valid(delta):
2293
"""Decorate a delta and check that the ids in it are valid.
2295
:return: A generator over delta.
2300
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2301
"entry with file_id None %r" % entry)
2302
if type(item[2]) != str:
2303
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2304
"entry with non bytes file_id %r" % entry)
2308
def _check_delta_ids_match_entry(delta):
2309
"""Decorate a delta and check that the ids in it match the entry.file_id.
2311
:return: A generator over delta.
2315
if entry is not None:
2316
if entry.file_id != item[2]:
2317
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2318
"mismatched id with %r" % entry)
2322
def _check_delta_new_path_entry_both_or_None(delta):
2323
"""Decorate a delta and check that the new_path and entry are paired.
2325
:return: A generator over delta.
2330
if new_path is None and entry is not None:
2331
raise errors.InconsistentDelta(item[0], item[1],
2332
"Entry with no new_path")
2333
if new_path is not None and entry is None:
2334
raise errors.InconsistentDelta(new_path, item[1],
2335
"new_path with no entry")