85
120
src/wibble/wibble.c
86
121
>>> i.id2path('2326')
87
122
'src/wibble/wibble.c'
89
TODO: Maybe also keep the full path of the entry, and the children?
90
But those depend on its position within a particular inventory, and
91
it would be nice not to need to hold the backpointer here.
94
# TODO: split InventoryEntry into subclasses for files,
95
# directories, etc etc.
97
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
98
'text_id', 'parent_id', 'children', ]
100
def __init__(self, file_id, name, kind, parent_id, text_id=None):
125
# Constants returned by describe_change()
127
# TODO: These should probably move to some kind of FileChangeDescription
128
# class; that's like what's inside a TreeDelta but we want to be able to
129
# generate them just for one file at a time.
131
MODIFIED_AND_RENAMED = 'modified and renamed'
135
def detect_changes(self, old_entry):
136
"""Return a (text_modified, meta_modified) from this to old_entry.
138
_read_tree_state must have been called on self and old_entry prior to
139
calling detect_changes.
143
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
144
output_to, reverse=False):
145
"""Perform a diff between two entries of the same kind."""
147
def parent_candidates(self, previous_inventories):
148
"""Find possible per-file graph parents.
150
This is currently defined by:
151
- Select the last changed revision in the parent inventory.
152
- Do deal with a short lived bug in bzr 0.8's development two entries
153
that have the same last changed but different 'x' bit settings are
156
# revision:ie mapping for each ie found in previous_inventories.
158
# identify candidate head revision ids.
159
for inv in previous_inventories:
160
if self.file_id in inv:
161
ie = inv[self.file_id]
162
if ie.revision in candidates:
163
# same revision value in two different inventories:
164
# correct possible inconsistencies:
165
# * there was a bug in revision updates with 'x' bit
168
if candidates[ie.revision].executable != ie.executable:
169
candidates[ie.revision].executable = False
170
ie.executable = False
171
except AttributeError:
174
# add this revision as a candidate.
175
candidates[ie.revision] = ie
178
@deprecated_method(deprecated_in((1, 6, 0)))
179
def get_tar_item(self, root, dp, now, tree):
180
"""Get a tarfile item and a file stream for its content."""
181
item = tarfile.TarInfo(osutils.pathjoin(root, dp).encode('utf8'))
182
# TODO: would be cool to actually set it to the timestamp of the
183
# revision it was last changed
185
fileobj = self._put_in_tar(item, tree)
189
"""Return true if the object this entry represents has textual data.
191
Note that textual data includes binary content.
193
Also note that all entries get weave files created for them.
194
This attribute is primarily used when upgrading from old trees that
195
did not have the weave index for all inventory entries.
199
def __init__(self, file_id, name, parent_id, text_id=None):
101
200
"""Create an InventoryEntry
103
202
The filename must be a single component, relative to the
104
203
parent directory; it cannot be a whole path or relative name.
106
>>> e = InventoryEntry('123', 'hello.c', 'file', ROOT_ID)
205
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
111
>>> e = InventoryEntry('123', 'src/hello.c', 'file', ROOT_ID)
210
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
112
211
Traceback (most recent call last):
113
BzrCheckError: InventoryEntry name 'src/hello.c' is invalid
212
InvalidEntryName: Invalid entry name: src/hello.c
115
214
if '/' in name or '\\' in name:
116
raise BzrCheckError('InventoryEntry name %r is invalid' % name)
215
raise errors.InvalidEntryName(name=name)
216
self.executable = False
118
218
self.text_sha1 = None
119
219
self.text_size = None
121
220
self.file_id = file_id
124
222
self.text_id = text_id
125
223
self.parent_id = parent_id
126
if kind == 'directory':
131
raise BzrError("unhandled entry kind %r" % kind)
224
self.symlink_target = None
225
self.reference_revision = None
227
def kind_character(self):
228
"""Return a short kind indicator useful for appending to names."""
229
raise BzrError('unknown kind %r' % self.kind)
231
known_kinds = ('file', 'directory', 'symlink')
233
def _put_in_tar(self, item, tree):
234
"""populate item for stashing in a tar, and return the content stream.
236
If no content is available, return None.
238
raise BzrError("don't know how to export {%s} of kind %r" %
239
(self.file_id, self.kind))
241
@deprecated_method(deprecated_in((1, 6, 0)))
242
def put_on_disk(self, dest, dp, tree):
243
"""Create a representation of self on disk in the prefix dest.
245
This is a template method - implement _put_on_disk in subclasses.
247
fullpath = osutils.pathjoin(dest, dp)
248
self._put_on_disk(fullpath, tree)
249
# mutter(" export {%s} kind %s to %s", self.file_id,
250
# self.kind, fullpath)
252
def _put_on_disk(self, fullpath, tree):
253
"""Put this entry onto disk at fullpath, from tree tree."""
254
raise BzrError("don't know how to export {%s} of kind %r" % (self.file_id, self.kind))
135
256
def sorted_children(self):
136
l = self.children.items()
257
return sorted(self.children.items())
260
def versionable_kind(kind):
261
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
263
def check(self, checker, rev_id, inv):
264
"""Check this inventory entry is intact.
266
This is a template method, override _check for kind specific
269
:param checker: Check object providing context for the checks;
270
can be used to find out what parts of the repository have already
272
:param rev_id: Revision id from which this InventoryEntry was loaded.
273
Not necessarily the last-changed revision for this file.
274
:param inv: Inventory from which the entry was loaded.
276
if self.parent_id is not None:
277
if not inv.has_id(self.parent_id):
278
raise BzrCheckError('missing parent {%s} in inventory for revision {%s}'
279
% (self.parent_id, rev_id))
280
checker._add_entry_to_text_key_references(inv, self)
281
self._check(checker, rev_id)
283
def _check(self, checker, rev_id):
284
"""Check this inventory entry for kind specific errors."""
285
checker._report_items.append(
286
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
142
other = InventoryEntry(self.file_id, self.name, self.kind,
143
self.parent_id, text_id=self.text_id)
144
other.text_sha1 = self.text_sha1
145
other.text_size = self.text_size
146
# note that children are *not* copied; they're pulled across when
289
"""Clone this inventory entry."""
290
raise NotImplementedError
293
def describe_change(old_entry, new_entry):
294
"""Describe the change between old_entry and this.
296
This smells of being an InterInventoryEntry situation, but as its
297
the first one, we're making it a static method for now.
299
An entry with a different parent, or different name is considered
300
to be renamed. Reparenting is an internal detail.
301
Note that renaming the parent does not trigger a rename for the
304
# TODO: Perhaps return an object rather than just a string
305
if old_entry is new_entry:
306
# also the case of both being None
308
elif old_entry is None:
310
elif new_entry is None:
312
if old_entry.kind != new_entry.kind:
314
text_modified, meta_modified = new_entry.detect_changes(old_entry)
315
if text_modified or meta_modified:
319
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
320
if old_entry.parent_id != new_entry.parent_id:
322
elif old_entry.name != new_entry.name:
326
if renamed and not modified:
327
return InventoryEntry.RENAMED
328
if modified and not renamed:
330
if modified and renamed:
331
return InventoryEntry.MODIFIED_AND_RENAMED
151
334
def __repr__(self):
152
return ("%s(%r, %r, kind=%r, parent_id=%r)"
335
return ("%s(%r, %r, parent_id=%r, revision=%r)"
153
336
% (self.__class__.__name__,
160
def to_element(self):
161
"""Convert to XML element"""
162
from bzrlib.xml import Element
166
e.set('name', self.name)
167
e.set('file_id', self.file_id)
168
e.set('kind', self.kind)
170
if self.text_size != None:
171
e.set('text_size', '%d' % self.text_size)
173
for f in ['text_id', 'text_sha1']:
178
# to be conservative, we don't externalize the root pointers
179
# for now, leaving them as null in the xml form. in a future
180
# version it will be implied by nested elements.
181
if self.parent_id != ROOT_ID:
182
assert isinstance(self.parent_id, basestring)
183
e.set('parent_id', self.parent_id)
190
def from_element(cls, elt):
191
assert elt.tag == 'entry'
193
## original format inventories don't have a parent_id for
194
## nodes in the root directory, but it's cleaner to use one
196
parent_id = elt.get('parent_id')
197
if parent_id == None:
200
self = cls(elt.get('file_id'), elt.get('name'), elt.get('kind'), parent_id)
201
self.text_id = elt.get('text_id')
202
self.text_sha1 = elt.get('text_sha1')
204
## mutter("read inventoryentry: %r" % (elt.attrib))
206
v = elt.get('text_size')
207
self.text_size = v and int(v)
212
from_element = classmethod(from_element)
214
342
def __eq__(self, other):
344
# For the case when objects are cached
215
346
if not isinstance(other, InventoryEntry):
216
347
return NotImplemented
218
return (self.file_id == other.file_id) \
219
and (self.name == other.name) \
220
and (self.text_sha1 == other.text_sha1) \
221
and (self.text_size == other.text_size) \
222
and (self.text_id == other.text_id) \
223
and (self.parent_id == other.parent_id) \
224
and (self.kind == other.kind)
349
return ((self.file_id == other.file_id)
350
and (self.name == other.name)
351
and (other.symlink_target == self.symlink_target)
352
and (self.text_sha1 == other.text_sha1)
353
and (self.text_size == other.text_size)
354
and (self.text_id == other.text_id)
355
and (self.parent_id == other.parent_id)
356
and (self.kind == other.kind)
357
and (self.revision == other.revision)
358
and (self.executable == other.executable)
359
and (self.reference_revision == other.reference_revision)
227
362
def __ne__(self, other):
228
363
return not (self == other)
230
365
def __hash__(self):
231
366
raise ValueError('not hashable')
368
def _unchanged(self, previous_ie):
369
"""Has this entry changed relative to previous_ie.
371
This method should be overridden in child classes.
374
# different inv parent
375
if previous_ie.parent_id != self.parent_id:
378
elif previous_ie.name != self.name:
380
elif previous_ie.kind != self.kind:
384
def _read_tree_state(self, path, work_tree):
385
"""Populate fields in the inventory entry from the given tree.
387
Note that this should be modified to be a noop on virtual trees
388
as all entries created there are prepopulated.
390
# TODO: Rather than running this manually, we should check the
391
# working sha1 and other expensive properties when they're
392
# first requested, or preload them if they're already known
393
pass # nothing to do by default
395
def _forget_tree_state(self):
235
399
class RootEntry(InventoryEntry):
401
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
402
'text_id', 'parent_id', 'children', 'executable',
403
'revision', 'symlink_target', 'reference_revision']
405
def _check(self, checker, rev_id):
406
"""See InventoryEntry._check"""
236
408
def __init__(self, file_id):
237
409
self.file_id = file_id
238
410
self.children = {}
239
self.kind = 'root_directory'
411
self.kind = 'directory'
240
412
self.parent_id = None
415
symbol_versioning.warn('RootEntry is deprecated as of bzr 0.10.'
416
' Please use InventoryDirectory instead.',
417
DeprecationWarning, stacklevel=2)
243
419
def __eq__(self, other):
244
420
if not isinstance(other, RootEntry):
245
421
return NotImplemented
247
423
return (self.file_id == other.file_id) \
248
424
and (self.children == other.children)
252
class Inventory(object):
253
"""Inventory of versioned files in a tree.
255
This describes which file_id is present at each point in the tree,
256
and possibly the SHA-1 or other information about the file.
427
class InventoryDirectory(InventoryEntry):
428
"""A directory in an inventory."""
430
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
431
'text_id', 'parent_id', 'children', 'executable',
432
'revision', 'symlink_target', 'reference_revision']
434
def _check(self, checker, rev_id):
435
"""See InventoryEntry._check"""
436
if (self.text_sha1 is not None or self.text_size is not None or
437
self.text_id is not None):
438
checker._report_items.append('directory {%s} has text in revision {%s}'
439
% (self.file_id, rev_id))
440
# In non rich root repositories we do not expect a file graph for the
442
if self.name == '' and not checker.rich_roots:
444
# Directories are stored as an empty file, but the file should exist
445
# to provide a per-fileid log. The hash of every directory content is
446
# "da..." below (the sha1sum of '').
447
checker.add_pending_item(rev_id,
448
('texts', self.file_id, self.revision), 'text',
449
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
452
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
453
other.revision = self.revision
454
# note that children are *not* copied; they're pulled across when
458
def __init__(self, file_id, name, parent_id):
459
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
461
self.kind = 'directory'
463
def kind_character(self):
464
"""See InventoryEntry.kind_character."""
467
def _put_in_tar(self, item, tree):
468
"""See InventoryEntry._put_in_tar."""
469
item.type = tarfile.DIRTYPE
476
def _put_on_disk(self, fullpath, tree):
477
"""See InventoryEntry._put_on_disk."""
481
class InventoryFile(InventoryEntry):
482
"""A file in an inventory."""
484
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
485
'text_id', 'parent_id', 'children', 'executable',
486
'revision', 'symlink_target', 'reference_revision']
488
def _check(self, checker, tree_revision_id):
489
"""See InventoryEntry._check"""
490
# TODO: check size too.
491
checker.add_pending_item(tree_revision_id,
492
('texts', self.file_id, self.revision), 'text',
494
if self.text_size is None:
495
checker._report_items.append(
496
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
500
other = InventoryFile(self.file_id, self.name, self.parent_id)
501
other.executable = self.executable
502
other.text_id = self.text_id
503
other.text_sha1 = self.text_sha1
504
other.text_size = self.text_size
505
other.revision = self.revision
508
def detect_changes(self, old_entry):
509
"""See InventoryEntry.detect_changes."""
510
text_modified = (self.text_sha1 != old_entry.text_sha1)
511
meta_modified = (self.executable != old_entry.executable)
512
return text_modified, meta_modified
514
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
515
output_to, reverse=False):
516
"""See InventoryEntry._diff."""
517
from bzrlib.diff import DiffText
518
from_file_id = self.file_id
520
to_file_id = to_entry.file_id
524
to_file_id, from_file_id = from_file_id, to_file_id
525
tree, to_tree = to_tree, tree
526
from_label, to_label = to_label, from_label
527
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
529
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
532
"""See InventoryEntry.has_text."""
535
def __init__(self, file_id, name, parent_id):
536
super(InventoryFile, self).__init__(file_id, name, parent_id)
539
def kind_character(self):
540
"""See InventoryEntry.kind_character."""
543
def _put_in_tar(self, item, tree):
544
"""See InventoryEntry._put_in_tar."""
545
item.type = tarfile.REGTYPE
546
fileobj = tree.get_file(self.file_id)
547
item.size = self.text_size
548
if tree.is_executable(self.file_id):
554
def _put_on_disk(self, fullpath, tree):
555
"""See InventoryEntry._put_on_disk."""
556
osutils.pumpfile(tree.get_file(self.file_id), file(fullpath, 'wb'))
557
if tree.is_executable(self.file_id):
558
os.chmod(fullpath, 0755)
560
def _read_tree_state(self, path, work_tree):
561
"""See InventoryEntry._read_tree_state."""
562
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
563
# FIXME: 20050930 probe for the text size when getting sha1
564
# in _read_tree_state
565
self.executable = work_tree.is_executable(self.file_id, path=path)
568
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
569
% (self.__class__.__name__,
577
def _forget_tree_state(self):
578
self.text_sha1 = None
580
def _unchanged(self, previous_ie):
581
"""See InventoryEntry._unchanged."""
582
compatible = super(InventoryFile, self)._unchanged(previous_ie)
583
if self.text_sha1 != previous_ie.text_sha1:
586
# FIXME: 20050930 probe for the text size when getting sha1
587
# in _read_tree_state
588
self.text_size = previous_ie.text_size
589
if self.executable != previous_ie.executable:
594
class InventoryLink(InventoryEntry):
595
"""A file in an inventory."""
597
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
598
'text_id', 'parent_id', 'children', 'executable',
599
'revision', 'symlink_target', 'reference_revision']
601
def _check(self, checker, tree_revision_id):
602
"""See InventoryEntry._check"""
603
if self.text_sha1 is not None or self.text_size is not None or self.text_id is not None:
604
checker._report_items.append(
605
'symlink {%s} has text in revision {%s}'
606
% (self.file_id, tree_revision_id))
607
if self.symlink_target is None:
608
checker._report_items.append(
609
'symlink {%s} has no target in revision {%s}'
610
% (self.file_id, tree_revision_id))
611
# Symlinks are stored as ''
612
checker.add_pending_item(tree_revision_id,
613
('texts', self.file_id, self.revision), 'text',
614
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
617
other = InventoryLink(self.file_id, self.name, self.parent_id)
618
other.symlink_target = self.symlink_target
619
other.revision = self.revision
622
def detect_changes(self, old_entry):
623
"""See InventoryEntry.detect_changes."""
624
# FIXME: which _modified field should we use ? RBC 20051003
625
text_modified = (self.symlink_target != old_entry.symlink_target)
627
mutter(" symlink target changed")
628
meta_modified = False
629
return text_modified, meta_modified
631
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
632
output_to, reverse=False):
633
"""See InventoryEntry._diff."""
634
from bzrlib.diff import DiffSymlink
635
old_target = self.symlink_target
636
if to_entry is not None:
637
new_target = to_entry.symlink_target
646
new_target, old_target = old_target, new_target
647
differ = DiffSymlink(old_tree, new_tree, output_to)
648
return differ.diff_symlink(old_target, new_target)
650
def __init__(self, file_id, name, parent_id):
651
super(InventoryLink, self).__init__(file_id, name, parent_id)
652
self.kind = 'symlink'
654
def kind_character(self):
655
"""See InventoryEntry.kind_character."""
658
def _put_in_tar(self, item, tree):
659
"""See InventoryEntry._put_in_tar."""
660
item.type = tarfile.SYMTYPE
664
item.linkname = self.symlink_target
667
def _put_on_disk(self, fullpath, tree):
668
"""See InventoryEntry._put_on_disk."""
670
os.symlink(self.symlink_target, fullpath)
672
raise BzrError("Failed to create symlink %r -> %r, error: %s" % (fullpath, self.symlink_target, e))
674
def _read_tree_state(self, path, work_tree):
675
"""See InventoryEntry._read_tree_state."""
676
self.symlink_target = work_tree.get_symlink_target(self.file_id)
678
def _forget_tree_state(self):
679
self.symlink_target = None
681
def _unchanged(self, previous_ie):
682
"""See InventoryEntry._unchanged."""
683
compatible = super(InventoryLink, self)._unchanged(previous_ie)
684
if self.symlink_target != previous_ie.symlink_target:
689
class TreeReference(InventoryEntry):
691
kind = 'tree-reference'
693
def __init__(self, file_id, name, parent_id, revision=None,
694
reference_revision=None):
695
InventoryEntry.__init__(self, file_id, name, parent_id)
696
self.revision = revision
697
self.reference_revision = reference_revision
700
return TreeReference(self.file_id, self.name, self.parent_id,
701
self.revision, self.reference_revision)
703
def _read_tree_state(self, path, work_tree):
704
"""Populate fields in the inventory entry from the given tree.
706
self.reference_revision = work_tree.get_reference_revision(
709
def _forget_tree_state(self):
710
self.reference_revision = None
712
def _unchanged(self, previous_ie):
713
"""See InventoryEntry._unchanged."""
714
compatible = super(TreeReference, self)._unchanged(previous_ie)
715
if self.reference_revision != previous_ie.reference_revision:
720
class CommonInventory(object):
721
"""Basic inventory logic, defined in terms of primitives like has_id.
723
An inventory is the metadata about the contents of a tree.
725
This is broadly a map from file_id to entries such as directories, files,
726
symlinks and tree references. Each entry maintains its own metadata like
727
SHA1 and length for files, or children for a directory.
257
729
Entries can be looked up either by path or by file_id.
259
The inventory represents a typical unix file tree, with
260
directories containing files and subdirectories. We never store
261
the full path to a file, because renaming a directory implicitly
262
moves all of its contents. This class internally maintains a
263
lookup tree that allows the children under a directory to be
266
731
InventoryEntry objects must not be modified after they are
267
732
inserted, other than through the Inventory API.
269
>>> inv = Inventory()
270
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
271
>>> inv['123-123'].name
274
May be treated as an iterator or set to look up file ids:
276
>>> bool(inv.path2id('hello.c'))
281
May also look up by name:
283
>>> [x[0] for x in inv.iter_entries()]
285
>>> inv = Inventory('TREE_ROOT-12345678-12345678')
286
>>> inv.add(InventoryEntry('123-123', 'hello.c', 'file', ROOT_ID))
288
def __init__(self, root_id=ROOT_ID):
289
"""Create or read an inventory.
291
If a working directory is specified, the inventory is read
292
from there. If the file is specified, read from that. If not,
293
the inventory is created empty.
295
The inventory is created with a default root directory, with
298
# We are letting Branch(init=True) create a unique inventory
299
# root id. Rather than generating a random one here.
301
# root_id = bzrlib.branch.gen_file_id('TREE_ROOT')
302
self.root = RootEntry(root_id)
303
self._byid = {self.root.file_id: self.root}
307
return iter(self._byid)
311
"""Returns number of entries."""
312
return len(self._byid)
315
def iter_entries(self, from_dir=None):
316
"""Return (path, entry) pairs, in order by name."""
320
elif isinstance(from_dir, basestring):
321
from_dir = self._byid[from_dir]
323
kids = from_dir.children.items()
325
for name, ie in kids:
327
if ie.kind == 'directory':
328
for cn, cie in self.iter_entries(from_dir=ie.file_id):
329
yield os.path.join(name, cn), cie
735
def __contains__(self, file_id):
736
"""True if this entry contains a file with given id.
738
>>> inv = Inventory()
739
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
740
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
746
Note that this method along with __iter__ are not encouraged for use as
747
they are less clear than specific query methods - they may be rmeoved
750
return self.has_id(file_id)
752
def has_filename(self, filename):
753
return bool(self.path2id(filename))
755
def id2path(self, file_id):
756
"""Return as a string the path to file_id.
759
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
760
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
761
>>> print i.id2path('foo-id')
764
:raises NoSuchId: If file_id is not present in the inventory.
766
# get all names, skipping root
767
return '/'.join(reversed(
768
[parent.name for parent in
769
self._iter_file_id_parents(file_id)][:-1]))
771
def iter_entries(self, from_dir=None, recursive=True):
772
"""Return (path, entry) pairs, in order by name.
774
:param from_dir: if None, start from the root,
775
otherwise start from this directory (either file-id or entry)
776
:param recursive: recurse into directories or not
779
if self.root is None:
783
elif isinstance(from_dir, basestring):
784
from_dir = self[from_dir]
786
# unrolling the recursive called changed the time from
787
# 440ms/663ms (inline/total) to 116ms/116ms
788
children = from_dir.children.items()
791
for name, ie in children:
794
children = collections.deque(children)
795
stack = [(u'', children)]
797
from_dir_relpath, children = stack[-1]
800
name, ie = children.popleft()
802
# we know that from_dir_relpath never ends in a slash
803
# and 'f' doesn't begin with one, we can do a string op, rather
804
# than the checks of pathjoin(), though this means that all paths
806
path = from_dir_relpath + '/' + name
810
if ie.kind != 'directory':
813
# But do this child first
814
new_children = ie.children.items()
816
new_children = collections.deque(new_children)
817
stack.append((path, new_children))
818
# Break out of inner loop, so that we start outer loop with child
821
# if we finished all children, pop it off the stack
824
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
825
yield_parents=False):
826
"""Iterate over the entries in a directory first order.
828
This returns all entries for a directory before returning
829
the entries for children of a directory. This is not
830
lexicographically sorted order, and is a hybrid between
831
depth-first and breadth-first.
833
:param yield_parents: If True, yield the parents from the root leading
834
down to specific_file_ids that have been requested. This has no
835
impact if specific_file_ids is None.
836
:return: This yields (path, entry) pairs
838
if specific_file_ids and not isinstance(specific_file_ids, set):
839
specific_file_ids = set(specific_file_ids)
840
# TODO? Perhaps this should return the from_dir so that the root is
841
# yielded? or maybe an option?
843
if self.root is None:
845
# Optimize a common case
846
if (not yield_parents and specific_file_ids is not None and
847
len(specific_file_ids) == 1):
848
file_id = list(specific_file_ids)[0]
850
yield self.id2path(file_id), self[file_id]
853
if (specific_file_ids is None or yield_parents or
854
self.root.file_id in specific_file_ids):
856
elif isinstance(from_dir, basestring):
857
from_dir = self[from_dir]
859
if specific_file_ids is not None:
860
# TODO: jam 20070302 This could really be done as a loop rather
861
# than a bunch of recursive calls.
864
def add_ancestors(file_id):
865
if file_id not in byid:
867
parent_id = byid[file_id].parent_id
868
if parent_id is None:
870
if parent_id not in parents:
871
parents.add(parent_id)
872
add_ancestors(parent_id)
873
for file_id in specific_file_ids:
874
add_ancestors(file_id)
878
stack = [(u'', from_dir)]
880
cur_relpath, cur_dir = stack.pop()
883
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
885
child_relpath = cur_relpath + child_name
887
if (specific_file_ids is None or
888
child_ie.file_id in specific_file_ids or
889
(yield_parents and child_ie.file_id in parents)):
890
yield child_relpath, child_ie
892
if child_ie.kind == 'directory':
893
if parents is None or child_ie.file_id in parents:
894
child_dirs.append((child_relpath+'/', child_ie))
895
stack.extend(reversed(child_dirs))
897
def _make_delta(self, old):
898
"""Make an inventory delta from two inventories."""
901
adds = new_ids - old_ids
902
deletes = old_ids - new_ids
903
common = old_ids.intersection(new_ids)
905
for file_id in deletes:
906
delta.append((old.id2path(file_id), None, file_id, None))
908
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
909
for file_id in common:
910
if old[file_id] != self[file_id]:
911
delta.append((old.id2path(file_id), self.id2path(file_id),
912
file_id, self[file_id]))
915
def _get_mutable_inventory(self):
916
"""Returns a mutable copy of the object.
918
Some inventories are immutable, yet working trees, for example, needs
919
to mutate exisiting inventories instead of creating a new one.
921
raise NotImplementedError(self._get_mutable_inventory)
923
def make_entry(self, kind, name, parent_id, file_id=None):
924
"""Simple thunk to bzrlib.inventory.make_entry."""
925
return make_entry(kind, name, parent_id, file_id)
332
927
def entries(self):
333
928
"""Return list of (path, ie) for all entries except the root.
339
934
kids = dir_ie.children.items()
341
936
for name, ie in kids:
342
child_path = os.path.join(dir_path, name)
937
child_path = osutils.pathjoin(dir_path, name)
343
938
accum.append((child_path, ie))
344
939
if ie.kind == 'directory':
345
940
descend(ie, child_path)
347
descend(self.root, '')
942
descend(self.root, u'')
351
945
def directories(self):
352
946
"""Return (path, entry) pairs for all directories, including the root.
355
949
def descend(parent_ie, parent_path):
356
950
accum.append((parent_path, parent_ie))
358
952
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
361
955
for name, child_ie in kids:
362
child_path = os.path.join(parent_path, name)
956
child_path = osutils.pathjoin(parent_path, name)
363
957
descend(child_ie, child_path)
364
descend(self.root, '')
958
descend(self.root, u'')
961
def path2id(self, name):
962
"""Walk down through directories to return entry of last component.
964
names may be either a list of path components, or a single
965
string, in which case it is automatically split.
967
This returns the entry of the last component in the path,
968
which may be either a file or a directory.
970
Returns None IFF the path is not found.
972
if isinstance(name, basestring):
973
name = osutils.splitpath(name)
975
# mutter("lookup path %r" % name)
979
except errors.NoSuchId:
980
# root doesn't exist yet so nothing else can
986
children = getattr(parent, 'children', None)
995
return parent.file_id
997
def filter(self, specific_fileids):
998
"""Get an inventory view filtered against a set of file-ids.
1000
Children of directories and parents are included.
1002
The result may or may not reference the underlying inventory
1003
so it should be treated as immutable.
1005
interesting_parents = set()
1006
for fileid in specific_fileids:
1008
interesting_parents.update(self.get_idpath(fileid))
1009
except errors.NoSuchId:
1010
# This fileid is not in the inventory - that's ok
1012
entries = self.iter_entries()
1013
if self.root is None:
1014
return Inventory(root_id=None)
1015
other = Inventory(entries.next()[1].file_id)
1016
other.root.revision = self.root.revision
1017
other.revision_id = self.revision_id
1018
directories_to_expand = set()
1019
for path, entry in entries:
1020
file_id = entry.file_id
1021
if (file_id in specific_fileids
1022
or entry.parent_id in directories_to_expand):
1023
if entry.kind == 'directory':
1024
directories_to_expand.add(file_id)
1025
elif file_id not in interesting_parents:
1027
other.add(entry.copy())
1030
def get_idpath(self, file_id):
1031
"""Return a list of file_ids for the path to an entry.
1033
The list contains one element for each directory followed by
1034
the id of the file itself. So the length of the returned list
1035
is equal to the depth of the file in the tree, counting the
1036
root directory as depth 1.
1039
for parent in self._iter_file_id_parents(file_id):
1040
p.insert(0, parent.file_id)
1044
class Inventory(CommonInventory):
1045
"""Mutable dict based in-memory inventory.
1047
We never store the full path to a file, because renaming a directory
1048
implicitly moves all of its contents. This class internally maintains a
1049
lookup tree that allows the children under a directory to be
1052
>>> inv = Inventory()
1053
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
1054
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1055
>>> inv['123-123'].name
1058
Id's may be looked up from paths:
1060
>>> inv.path2id('hello.c')
1062
>>> '123-123' in inv
1065
There are iterators over the contents:
1067
>>> [entry[0] for entry in inv.iter_entries()]
1071
def __init__(self, root_id=ROOT_ID, revision_id=None):
1072
"""Create or read an inventory.
1074
If a working directory is specified, the inventory is read
1075
from there. If the file is specified, read from that. If not,
1076
the inventory is created empty.
1078
The inventory is created with a default root directory, with
1081
if root_id is not None:
1082
self._set_root(InventoryDirectory(root_id, u'', None))
1086
self.revision_id = revision_id
1089
# More than one page of ouput is not useful anymore to debug
1092
contents = repr(self._byid)
1093
if len(contents) > max_len:
1094
contents = contents[:(max_len-len(closing))] + closing
1095
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
1097
def apply_delta(self, delta):
1098
"""Apply a delta to this inventory.
1100
See the inventory developers documentation for the theory behind
1103
If delta application fails the inventory is left in an indeterminate
1104
state and must not be used.
1106
:param delta: A list of changes to apply. After all the changes are
1107
applied the final inventory must be internally consistent, but it
1108
is ok to supply changes which, if only half-applied would have an
1109
invalid result - such as supplying two changes which rename two
1110
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
1111
('B', 'A', 'B-id', b_entry)].
1113
Each change is a tuple, of the form (old_path, new_path, file_id,
1116
When new_path is None, the change indicates the removal of an entry
1117
from the inventory and new_entry will be ignored (using None is
1118
appropriate). If new_path is not None, then new_entry must be an
1119
InventoryEntry instance, which will be incorporated into the
1120
inventory (and replace any existing entry with the same file id).
1122
When old_path is None, the change indicates the addition of
1123
a new entry to the inventory.
1125
When neither new_path nor old_path are None, the change is a
1126
modification to an entry, such as a rename, reparent, kind change
1129
The children attribute of new_entry is ignored. This is because
1130
this method preserves children automatically across alterations to
1131
the parent of the children, and cases where the parent id of a
1132
child is changing require the child to be passed in as a separate
1133
change regardless. E.g. in the recursive deletion of a directory -
1134
the directory's children must be included in the delta, or the
1135
final inventory will be invalid.
1137
Note that a file_id must only appear once within a given delta.
1138
An AssertionError is raised otherwise.
1140
# Check that the delta is legal. It would be nice if this could be
1141
# done within the loops below but it's safer to validate the delta
1142
# before starting to mutate the inventory, as there isn't a rollback
1144
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1145
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1146
_check_delta_ids_are_valid(
1147
_check_delta_new_path_entry_both_or_None(
1151
# Remove all affected items which were in the original inventory,
1152
# starting with the longest paths, thus ensuring parents are examined
1153
# after their children, which means that everything we examine has no
1154
# modified children remaining by the time we examine it.
1155
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1156
if op is not None), reverse=True):
1157
# Preserve unaltered children of file_id for later reinsertion.
1158
file_id_children = getattr(self[file_id], 'children', {})
1159
if len(file_id_children):
1160
children[file_id] = file_id_children
1161
if self.id2path(file_id) != old_path:
1162
raise errors.InconsistentDelta(old_path, file_id,
1163
"Entry was at wrong other path %r." % self.id2path(file_id))
1164
# Remove file_id and the unaltered children. If file_id is not
1165
# being deleted it will be reinserted back later.
1166
self.remove_recursive_id(file_id)
1167
# Insert all affected which should be in the new inventory, reattaching
1168
# their children if they had any. This is done from shortest path to
1169
# longest, ensuring that items which were modified and whose parents in
1170
# the resulting inventory were also modified, are inserted after their
1172
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1173
delta if np is not None):
1174
if new_entry.kind == 'directory':
1175
# Pop the child which to allow detection of children whose
1176
# parents were deleted and which were not reattached to a new
1178
replacement = InventoryDirectory(new_entry.file_id,
1179
new_entry.name, new_entry.parent_id)
1180
replacement.revision = new_entry.revision
1181
replacement.children = children.pop(replacement.file_id, {})
1182
new_entry = replacement
1185
except errors.DuplicateFileId:
1186
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1187
"New id is already present in target.")
1188
except AttributeError:
1189
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1190
"Parent is not a directory.")
1191
if self.id2path(new_entry.file_id) != new_path:
1192
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1193
"New path is not consistent with parent path.")
1195
# Get the parent id that was deleted
1196
parent_id, children = children.popitem()
1197
raise errors.InconsistentDelta("<deleted>", parent_id,
1198
"The file id was deleted but its children were not deleted.")
1200
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1201
propagate_caches=False):
1202
"""See CHKInventory.create_by_apply_delta()"""
1203
new_inv = self.copy()
1204
new_inv.apply_delta(inventory_delta)
1205
new_inv.revision_id = new_revision_id
1208
def _set_root(self, ie):
1210
self._byid = {self.root.file_id: self.root}
1213
# TODO: jam 20051218 Should copy also copy the revision_id?
1214
entries = self.iter_entries()
1215
if self.root is None:
1216
return Inventory(root_id=None)
1217
other = Inventory(entries.next()[1].file_id)
1218
other.root.revision = self.root.revision
1219
# copy recursively so we know directories will be added before
1220
# their children. There are more efficient ways than this...
1221
for path, entry in entries:
1222
other.add(entry.copy())
1225
def _get_mutable_inventory(self):
1226
"""See CommonInventory._get_mutable_inventory."""
1227
return copy.deepcopy(self)
1230
"""Iterate over all file-ids."""
1231
return iter(self._byid)
1233
def iter_just_entries(self):
1234
"""Iterate over all entries.
369
def __contains__(self, file_id):
370
"""True if this entry contains a file with given id.
372
>>> inv = Inventory()
373
>>> inv.add(InventoryEntry('123', 'foo.c', 'file', ROOT_ID))
1236
Unlike iter_entries(), just the entries are returned (not (path, ie))
1237
and the order of entries is undefined.
1239
XXX: We may not want to merge this into bzr.dev.
379
return file_id in self._byid
1241
if self.root is None:
1243
for _, ie in self._byid.iteritems():
1247
"""Returns number of entries."""
1248
return len(self._byid)
382
1250
def __getitem__(self, file_id):
383
1251
"""Return the entry for given file_id.
385
1253
>>> inv = Inventory()
386
>>> inv.add(InventoryEntry('123123', 'hello.c', 'file', ROOT_ID))
1254
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1255
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
387
1256
>>> inv['123123'].name
391
1260
return self._byid[file_id]
392
1261
except KeyError:
394
raise BzrError("can't look up file_id None")
396
raise BzrError("file_id {%s} not in inventory" % file_id)
1262
# really we're passing an inventory, not a tree...
1263
raise errors.NoSuchId(self, file_id)
399
1265
def get_file_kind(self, file_id):
400
1266
return self._byid[file_id].kind
638
1464
del old_parent.children[file_ie.name]
639
1465
new_parent.children[new_name] = file_ie
641
1467
file_ie.name = new_name
642
1468
file_ie.parent_id = new_parent_id
1470
def is_root(self, file_id):
1471
return self.root is not None and file_id == self.root.file_id
1474
class CHKInventory(CommonInventory):
1475
"""An inventory persisted in a CHK store.
1477
By design, a CHKInventory is immutable so many of the methods
1478
supported by Inventory - add, rename, apply_delta, etc - are *not*
1479
supported. To create a new CHKInventory, use create_by_apply_delta()
1480
or from_inventory(), say.
1482
Internally, a CHKInventory has one or two CHKMaps:
1484
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1485
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1488
The second map is optional and not present in early CHkRepository's.
1490
No caching is performed: every method call or item access will perform
1491
requests to the storage layer. As such, keep references to objects you
1495
def __init__(self, search_key_name):
1496
CommonInventory.__init__(self)
1497
self._fileid_to_entry_cache = {}
1498
self._path_to_fileid_cache = {}
1499
self._search_key_name = search_key_name
1502
def __eq__(self, other):
1503
"""Compare two sets by comparing their contents."""
1504
if not isinstance(other, CHKInventory):
1505
return NotImplemented
1507
this_key = self.id_to_entry.key()
1508
other_key = other.id_to_entry.key()
1509
this_pid_key = self.parent_id_basename_to_file_id.key()
1510
other_pid_key = other.parent_id_basename_to_file_id.key()
1511
if None in (this_key, this_pid_key, other_key, other_pid_key):
1513
return this_key == other_key and this_pid_key == other_pid_key
1515
def _entry_to_bytes(self, entry):
1516
"""Serialise entry as a single bytestring.
1518
:param Entry: An inventory entry.
1519
:return: A bytestring for the entry.
1522
ENTRY ::= FILE | DIR | SYMLINK | TREE
1523
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1524
DIR ::= "dir: " COMMON
1525
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1526
TREE ::= "tree: " COMMON REFERENCE_REVISION
1527
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1530
if entry.parent_id is not None:
1531
parent_str = entry.parent_id
1534
name_str = entry.name.encode("utf8")
1535
if entry.kind == 'file':
1536
if entry.executable:
1540
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1541
entry.file_id, parent_str, name_str, entry.revision,
1542
entry.text_sha1, entry.text_size, exec_str)
1543
elif entry.kind == 'directory':
1544
return "dir: %s\n%s\n%s\n%s" % (
1545
entry.file_id, parent_str, name_str, entry.revision)
1546
elif entry.kind == 'symlink':
1547
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1548
entry.file_id, parent_str, name_str, entry.revision,
1549
entry.symlink_target.encode("utf8"))
1550
elif entry.kind == 'tree-reference':
1551
return "tree: %s\n%s\n%s\n%s\n%s" % (
1552
entry.file_id, parent_str, name_str, entry.revision,
1553
entry.reference_revision)
1555
raise ValueError("unknown kind %r" % entry.kind)
1557
def _expand_fileids_to_parents_and_children(self, file_ids):
1558
"""Give a more wholistic view starting with the given file_ids.
1560
For any file_id which maps to a directory, we will include all children
1561
of that directory. We will also include all directories which are
1562
parents of the given file_ids, but we will not include their children.
1569
fringle # fringle-id
1573
if given [foo-id] we will include
1574
TREE_ROOT as interesting parents
1576
foo-id, baz-id, frob-id, fringle-id
1580
# TODO: Pre-pass over the list of fileids to see if anything is already
1581
# deserialized in self._fileid_to_entry_cache
1583
directories_to_expand = set()
1584
children_of_parent_id = {}
1585
# It is okay if some of the fileids are missing
1586
for entry in self._getitems(file_ids):
1587
if entry.kind == 'directory':
1588
directories_to_expand.add(entry.file_id)
1589
interesting.add(entry.parent_id)
1590
children_of_parent_id.setdefault(entry.parent_id, []
1591
).append(entry.file_id)
1593
# Now, interesting has all of the direct parents, but not the
1594
# parents of those parents. It also may have some duplicates with
1596
remaining_parents = interesting.difference(file_ids)
1597
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1598
# but we don't actually want to recurse into that
1599
interesting.add(None) # this will auto-filter it in the loop
1600
remaining_parents.discard(None)
1601
while remaining_parents:
1602
if None in remaining_parents:
1603
import pdb; pdb.set_trace()
1604
next_parents = set()
1605
for entry in self._getitems(remaining_parents):
1606
next_parents.add(entry.parent_id)
1607
children_of_parent_id.setdefault(entry.parent_id, []
1608
).append(entry.file_id)
1609
# Remove any search tips we've already processed
1610
remaining_parents = next_parents.difference(interesting)
1611
interesting.update(remaining_parents)
1612
# We should probably also .difference(directories_to_expand)
1613
interesting.update(file_ids)
1614
interesting.discard(None)
1615
while directories_to_expand:
1616
# Expand directories by looking in the
1617
# parent_id_basename_to_file_id map
1618
keys = [(f,) for f in directories_to_expand]
1619
directories_to_expand = set()
1620
items = self.parent_id_basename_to_file_id.iteritems(keys)
1621
next_file_ids = set([item[1] for item in items])
1622
next_file_ids = next_file_ids.difference(interesting)
1623
interesting.update(next_file_ids)
1624
for entry in self._getitems(next_file_ids):
1625
if entry.kind == 'directory':
1626
directories_to_expand.add(entry.file_id)
1627
children_of_parent_id.setdefault(entry.parent_id, []
1628
).append(entry.file_id)
1629
return interesting, children_of_parent_id
1631
def filter(self, specific_fileids):
1632
"""Get an inventory view filtered against a set of file-ids.
1634
Children of directories and parents are included.
1636
The result may or may not reference the underlying inventory
1637
so it should be treated as immutable.
1640
parent_to_children) = self._expand_fileids_to_parents_and_children(
1642
# There is some overlap here, but we assume that all interesting items
1643
# are in the _fileid_to_entry_cache because we had to read them to
1644
# determine if they were a dir we wanted to recurse, or just a file
1645
# This should give us all the entries we'll want to add, so start
1647
other = Inventory(self.root_id)
1648
other.root.revision = self.root.revision
1649
other.revision_id = self.revision_id
1650
if not interesting or not parent_to_children:
1651
# empty filter, or filtering entrys that don't exist
1652
# (if even 1 existed, then we would have populated
1653
# parent_to_children with at least the tree root.)
1655
cache = self._fileid_to_entry_cache
1657
remaining_children = collections.deque(parent_to_children[self.root_id])
1659
import pdb; pdb.set_trace()
1661
while remaining_children:
1662
file_id = remaining_children.popleft()
1664
if ie.kind == 'directory':
1665
ie = ie.copy() # We create a copy to depopulate the .children attribute
1666
# TODO: depending on the uses of 'other' we should probably alwyas
1667
# '.copy()' to prevent someone from mutating other and
1668
# invaliding our internal cache
1670
if file_id in parent_to_children:
1671
remaining_children.extend(parent_to_children[file_id])
1675
def _bytes_to_utf8name_key(bytes):
1676
"""Get the file_id, revision_id key out of bytes."""
1677
# We don't normally care about name, except for times when we want
1678
# to filter out empty names because of non rich-root...
1679
sections = bytes.split('\n')
1680
kind, file_id = sections[0].split(': ')
1681
return (sections[2], file_id, sections[3])
1683
def _bytes_to_entry(self, bytes):
1684
"""Deserialise a serialised entry."""
1685
sections = bytes.split('\n')
1686
if sections[0].startswith("file: "):
1687
result = InventoryFile(sections[0][6:],
1688
sections[2].decode('utf8'),
1690
result.text_sha1 = sections[4]
1691
result.text_size = int(sections[5])
1692
result.executable = sections[6] == "Y"
1693
elif sections[0].startswith("dir: "):
1694
result = CHKInventoryDirectory(sections[0][5:],
1695
sections[2].decode('utf8'),
1697
elif sections[0].startswith("symlink: "):
1698
result = InventoryLink(sections[0][9:],
1699
sections[2].decode('utf8'),
1701
result.symlink_target = sections[4].decode('utf8')
1702
elif sections[0].startswith("tree: "):
1703
result = TreeReference(sections[0][6:],
1704
sections[2].decode('utf8'),
1706
result.reference_revision = sections[4]
1708
raise ValueError("Not a serialised entry %r" % bytes)
1709
result.revision = sections[3]
1710
if result.parent_id == '':
1711
result.parent_id = None
1712
self._fileid_to_entry_cache[result.file_id] = result
1715
def _get_mutable_inventory(self):
1716
"""See CommonInventory._get_mutable_inventory."""
1717
entries = self.iter_entries()
1718
inv = Inventory(None, self.revision_id)
1719
for path, inv_entry in entries:
1720
inv.add(inv_entry.copy())
1723
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1724
propagate_caches=False):
1725
"""Create a new CHKInventory by applying inventory_delta to this one.
1727
See the inventory developers documentation for the theory behind
1730
:param inventory_delta: The inventory delta to apply. See
1731
Inventory.apply_delta for details.
1732
:param new_revision_id: The revision id of the resulting CHKInventory.
1733
:param propagate_caches: If True, the caches for this inventory are
1734
copied to and updated for the result.
1735
:return: The new CHKInventory.
1737
split = osutils.split
1738
result = CHKInventory(self._search_key_name)
1739
if propagate_caches:
1740
# Just propagate the path-to-fileid cache for now
1741
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1742
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1743
self.id_to_entry._ensure_root()
1744
maximum_size = self.id_to_entry._root_node.maximum_size
1745
result.revision_id = new_revision_id
1746
result.id_to_entry = chk_map.CHKMap(
1747
self.id_to_entry._store,
1748
self.id_to_entry.key(),
1749
search_key_func=search_key_func)
1750
result.id_to_entry._ensure_root()
1751
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1752
# Change to apply to the parent_id_basename delta. The dict maps
1753
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1754
# when a path has its id replaced (e.g. the root is changed, or someone
1755
# does bzr mv a b, bzr mv c a, we should output a single change to this
1756
# map rather than two.
1757
parent_id_basename_delta = {}
1758
if self.parent_id_basename_to_file_id is not None:
1759
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1760
self.parent_id_basename_to_file_id._store,
1761
self.parent_id_basename_to_file_id.key(),
1762
search_key_func=search_key_func)
1763
result.parent_id_basename_to_file_id._ensure_root()
1764
self.parent_id_basename_to_file_id._ensure_root()
1765
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1766
p_id_root = self.parent_id_basename_to_file_id._root_node
1767
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1768
result_p_id_root._key_width = p_id_root._key_width
1770
result.parent_id_basename_to_file_id = None
1771
result.root_id = self.root_id
1772
id_to_entry_delta = []
1773
# inventory_delta is only traversed once, so we just update the
1775
# Check for repeated file ids
1776
inventory_delta = _check_delta_unique_ids(inventory_delta)
1777
# Repeated old paths
1778
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1779
# Check for repeated new paths
1780
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1781
# Check for entries that don't match the fileid
1782
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1783
# Check for nonsense fileids
1784
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1785
# Check for new_path <-> entry consistency
1786
inventory_delta = _check_delta_new_path_entry_both_or_None(
1788
# All changed entries need to have their parents be directories and be
1789
# at the right path. This set contains (path, id) tuples.
1791
# When we delete an item, all the children of it must be either deleted
1792
# or altered in their own right. As we batch process the change via
1793
# CHKMap.apply_delta, we build a set of things to use to validate the
1797
for old_path, new_path, file_id, entry in inventory_delta:
1800
result.root_id = file_id
1801
if new_path is None:
1806
if propagate_caches:
1808
del result._path_to_fileid_cache[old_path]
1811
deletes.add(file_id)
1813
new_key = (file_id,)
1814
new_value = result._entry_to_bytes(entry)
1815
# Update caches. It's worth doing this whether
1816
# we're propagating the old caches or not.
1817
result._path_to_fileid_cache[new_path] = file_id
1818
parents.add((split(new_path)[0], entry.parent_id))
1819
if old_path is None:
1822
old_key = (file_id,)
1823
if self.id2path(file_id) != old_path:
1824
raise errors.InconsistentDelta(old_path, file_id,
1825
"Entry was at wrong other path %r." %
1826
self.id2path(file_id))
1827
altered.add(file_id)
1828
id_to_entry_delta.append((old_key, new_key, new_value))
1829
if result.parent_id_basename_to_file_id is not None:
1830
# parent_id, basename changes
1831
if old_path is None:
1834
old_entry = self[file_id]
1835
old_key = self._parent_id_basename_key(old_entry)
1836
if new_path is None:
1840
new_key = self._parent_id_basename_key(entry)
1842
# If the two keys are the same, the value will be unchanged
1843
# as its always the file id for this entry.
1844
if old_key != new_key:
1845
# Transform a change into explicit delete/add preserving
1846
# a possible match on the key from a different file id.
1847
if old_key is not None:
1848
parent_id_basename_delta.setdefault(
1849
old_key, [None, None])[0] = old_key
1850
if new_key is not None:
1851
parent_id_basename_delta.setdefault(
1852
new_key, [None, None])[1] = new_value
1853
# validate that deletes are complete.
1854
for file_id in deletes:
1855
entry = self[file_id]
1856
if entry.kind != 'directory':
1858
# This loop could potentially be better by using the id_basename
1859
# map to just get the child file ids.
1860
for child in entry.children.values():
1861
if child.file_id not in altered:
1862
raise errors.InconsistentDelta(self.id2path(child.file_id),
1863
child.file_id, "Child not deleted or reparented when "
1865
result.id_to_entry.apply_delta(id_to_entry_delta)
1866
if parent_id_basename_delta:
1867
# Transform the parent_id_basename delta data into a linear delta
1868
# with only one record for a given key. Optimally this would allow
1869
# re-keying, but its simpler to just output that as a delete+add
1870
# to spend less time calculating the delta.
1872
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1873
if value is not None:
1874
delta_list.append((old_key, key, value))
1876
delta_list.append((old_key, None, None))
1877
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1878
parents.discard(('', None))
1879
for parent_path, parent in parents:
1881
if result[parent].kind != 'directory':
1882
raise errors.InconsistentDelta(result.id2path(parent), parent,
1883
'Not a directory, but given children')
1884
except errors.NoSuchId:
1885
raise errors.InconsistentDelta("<unknown>", parent,
1886
"Parent is not present in resulting inventory.")
1887
if result.path2id(parent_path) != parent:
1888
raise errors.InconsistentDelta(parent_path, parent,
1889
"Parent has wrong path %r." % result.path2id(parent_path))
1893
def deserialise(klass, chk_store, bytes, expected_revision_id):
1894
"""Deserialise a CHKInventory.
1896
:param chk_store: A CHK capable VersionedFiles instance.
1897
:param bytes: The serialised bytes.
1898
:param expected_revision_id: The revision ID we think this inventory is
1900
:return: A CHKInventory
1902
lines = bytes.split('\n')
1904
raise AssertionError('bytes to deserialize must end with an eol')
1906
if lines[0] != 'chkinventory:':
1907
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1909
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1910
'parent_id_basename_to_file_id',
1912
for line in lines[1:]:
1913
key, value = line.split(': ', 1)
1914
if key not in allowed_keys:
1915
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1918
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1921
revision_id = info['revision_id']
1922
root_id = info['root_id']
1923
search_key_name = info.get('search_key_name', 'plain')
1924
parent_id_basename_to_file_id = info.get(
1925
'parent_id_basename_to_file_id', None)
1926
id_to_entry = info['id_to_entry']
1928
result = CHKInventory(search_key_name)
1929
result.revision_id = revision_id
1930
result.root_id = root_id
1931
search_key_func = chk_map.search_key_registry.get(
1932
result._search_key_name)
1933
if parent_id_basename_to_file_id is not None:
1934
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1935
chk_store, (parent_id_basename_to_file_id,),
1936
search_key_func=search_key_func)
1938
result.parent_id_basename_to_file_id = None
1940
result.id_to_entry = chk_map.CHKMap(chk_store, (id_to_entry,),
1941
search_key_func=search_key_func)
1942
if (result.revision_id,) != expected_revision_id:
1943
raise ValueError("Mismatched revision id and expected: %r, %r" %
1944
(result.revision_id, expected_revision_id))
1948
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1949
"""Create a CHKInventory from an existing inventory.
1951
The content of inventory is copied into the chk_store, and a
1952
CHKInventory referencing that is returned.
1954
:param chk_store: A CHK capable VersionedFiles instance.
1955
:param inventory: The inventory to copy.
1956
:param maximum_size: The CHKMap node size limit.
1957
:param search_key_name: The identifier for the search key function
1959
result = klass(search_key_name)
1960
result.revision_id = inventory.revision_id
1961
result.root_id = inventory.root.file_id
1963
entry_to_bytes = result._entry_to_bytes
1964
parent_id_basename_key = result._parent_id_basename_key
1965
id_to_entry_dict = {}
1966
parent_id_basename_dict = {}
1967
for path, entry in inventory.iter_entries():
1968
id_to_entry_dict[(entry.file_id,)] = entry_to_bytes(entry)
1969
p_id_key = parent_id_basename_key(entry)
1970
parent_id_basename_dict[p_id_key] = entry.file_id
1972
result._populate_from_dicts(chk_store, id_to_entry_dict,
1973
parent_id_basename_dict, maximum_size=maximum_size)
1976
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1977
parent_id_basename_dict, maximum_size):
1978
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1979
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1980
maximum_size=maximum_size, key_width=1,
1981
search_key_func=search_key_func)
1982
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1984
root_key = chk_map.CHKMap.from_dict(chk_store,
1985
parent_id_basename_dict,
1986
maximum_size=maximum_size, key_width=2,
1987
search_key_func=search_key_func)
1988
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1989
root_key, search_key_func)
1991
def _parent_id_basename_key(self, entry):
1992
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1993
if entry.parent_id is not None:
1994
parent_id = entry.parent_id
1997
return parent_id, entry.name.encode('utf8')
1999
def __getitem__(self, file_id):
2000
"""map a single file_id -> InventoryEntry."""
2002
raise errors.NoSuchId(self, file_id)
2003
result = self._fileid_to_entry_cache.get(file_id, None)
2004
if result is not None:
2007
return self._bytes_to_entry(
2008
self.id_to_entry.iteritems([(file_id,)]).next()[1])
2009
except StopIteration:
2010
# really we're passing an inventory, not a tree...
2011
raise errors.NoSuchId(self, file_id)
2013
def _getitems(self, file_ids):
2014
"""Similar to __getitem__, but lets you query for multiple.
2016
The returned order is undefined. And currently if an item doesn't
2017
exist, it isn't included in the output.
2021
for file_id in file_ids:
2022
entry = self._fileid_to_entry_cache.get(file_id, None)
2024
remaining.append(file_id)
2026
result.append(entry)
2027
file_keys = [(f,) for f in remaining]
2028
for file_key, value in self.id_to_entry.iteritems(file_keys):
2029
entry = self._bytes_to_entry(value)
2030
result.append(entry)
2031
self._fileid_to_entry_cache[entry.file_id] = entry
2034
def has_id(self, file_id):
2035
# Perhaps have an explicit 'contains' method on CHKMap ?
2036
if self._fileid_to_entry_cache.get(file_id, None) is not None:
2038
return len(list(self.id_to_entry.iteritems([(file_id,)]))) == 1
2040
def is_root(self, file_id):
2041
return file_id == self.root_id
2043
def _iter_file_id_parents(self, file_id):
2044
"""Yield the parents of file_id up to the root."""
2045
while file_id is not None:
2049
raise errors.NoSuchId(tree=self, file_id=file_id)
2051
file_id = ie.parent_id
2054
"""Iterate over all file-ids."""
2055
for key, _ in self.id_to_entry.iteritems():
2058
def iter_just_entries(self):
2059
"""Iterate over all entries.
2061
Unlike iter_entries(), just the entries are returned (not (path, ie))
2062
and the order of entries is undefined.
2064
XXX: We may not want to merge this into bzr.dev.
2066
for key, entry in self.id_to_entry.iteritems():
2068
ie = self._fileid_to_entry_cache.get(file_id, None)
2070
ie = self._bytes_to_entry(entry)
2071
self._fileid_to_entry_cache[file_id] = ie
2074
def iter_changes(self, basis):
2075
"""Generate a Tree.iter_changes change list between this and basis.
2077
:param basis: Another CHKInventory.
2078
:return: An iterator over the changes between self and basis, as per
2079
tree.iter_changes().
2081
# We want: (file_id, (path_in_source, path_in_target),
2082
# changed_content, versioned, parent, name, kind,
2084
for key, basis_value, self_value in \
2085
self.id_to_entry.iter_changes(basis.id_to_entry):
2087
if basis_value is not None:
2088
basis_entry = basis._bytes_to_entry(basis_value)
2089
path_in_source = basis.id2path(file_id)
2090
basis_parent = basis_entry.parent_id
2091
basis_name = basis_entry.name
2092
basis_executable = basis_entry.executable
2094
path_in_source = None
2097
basis_executable = None
2098
if self_value is not None:
2099
self_entry = self._bytes_to_entry(self_value)
2100
path_in_target = self.id2path(file_id)
2101
self_parent = self_entry.parent_id
2102
self_name = self_entry.name
2103
self_executable = self_entry.executable
2105
path_in_target = None
2108
self_executable = None
2109
if basis_value is None:
2111
kind = (None, self_entry.kind)
2112
versioned = (False, True)
2113
elif self_value is None:
2115
kind = (basis_entry.kind, None)
2116
versioned = (True, False)
2118
kind = (basis_entry.kind, self_entry.kind)
2119
versioned = (True, True)
2120
changed_content = False
2121
if kind[0] != kind[1]:
2122
changed_content = True
2123
elif kind[0] == 'file':
2124
if (self_entry.text_size != basis_entry.text_size or
2125
self_entry.text_sha1 != basis_entry.text_sha1):
2126
changed_content = True
2127
elif kind[0] == 'symlink':
2128
if self_entry.symlink_target != basis_entry.symlink_target:
2129
changed_content = True
2130
elif kind[0] == 'tree-reference':
2131
if (self_entry.reference_revision !=
2132
basis_entry.reference_revision):
2133
changed_content = True
2134
parent = (basis_parent, self_parent)
2135
name = (basis_name, self_name)
2136
executable = (basis_executable, self_executable)
2137
if (not changed_content
2138
and parent[0] == parent[1]
2139
and name[0] == name[1]
2140
and executable[0] == executable[1]):
2141
# Could happen when only the revision changed for a directory
2144
yield (file_id, (path_in_source, path_in_target), changed_content,
2145
versioned, parent, name, kind, executable)
2148
"""Return the number of entries in the inventory."""
2149
return len(self.id_to_entry)
2151
def _make_delta(self, old):
2152
"""Make an inventory delta from two inventories."""
2153
if type(old) != CHKInventory:
2154
return CommonInventory._make_delta(self, old)
2156
for key, old_value, self_value in \
2157
self.id_to_entry.iter_changes(old.id_to_entry):
2159
if old_value is not None:
2160
old_path = old.id2path(file_id)
2163
if self_value is not None:
2164
entry = self._bytes_to_entry(self_value)
2165
self._fileid_to_entry_cache[file_id] = entry
2166
new_path = self.id2path(file_id)
2170
delta.append((old_path, new_path, file_id, entry))
2173
def path2id(self, name):
2174
"""See CommonInventory.path2id()."""
2175
# TODO: perhaps support negative hits?
2176
result = self._path_to_fileid_cache.get(name, None)
2177
if result is not None:
2179
if isinstance(name, basestring):
2180
names = osutils.splitpath(name)
2183
current_id = self.root_id
2184
if current_id is None:
2186
parent_id_index = self.parent_id_basename_to_file_id
2187
for basename in names:
2188
# TODO: Cache each path we figure out in this function.
2189
basename_utf8 = basename.encode('utf8')
2190
key_filter = [(current_id, basename_utf8)]
2192
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2193
key_filter=key_filter):
2194
if parent_id != current_id or name_utf8 != basename_utf8:
2195
raise errors.BzrError("corrupt inventory lookup! "
2196
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2200
current_id = file_id
2201
self._path_to_fileid_cache[name] = current_id
2205
"""Serialise the inventory to lines."""
2206
lines = ["chkinventory:\n"]
2207
if self._search_key_name != 'plain':
2208
# custom ordering grouping things that don't change together
2209
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2210
lines.append("root_id: %s\n" % self.root_id)
2211
lines.append('parent_id_basename_to_file_id: %s\n' %
2212
self.parent_id_basename_to_file_id.key())
2213
lines.append("revision_id: %s\n" % self.revision_id)
2214
lines.append("id_to_entry: %s\n" % self.id_to_entry.key())
2216
lines.append("revision_id: %s\n" % self.revision_id)
2217
lines.append("root_id: %s\n" % self.root_id)
2218
if self.parent_id_basename_to_file_id is not None:
2219
lines.append('parent_id_basename_to_file_id: %s\n' %
2220
self.parent_id_basename_to_file_id.key())
2221
lines.append("id_to_entry: %s\n" % self.id_to_entry.key())
2226
"""Get the root entry."""
2227
return self[self.root_id]
2230
class CHKInventoryDirectory(InventoryDirectory):
2231
"""A directory in an inventory."""
2233
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
2234
'text_id', 'parent_id', '_children', 'executable',
2235
'revision', 'symlink_target', 'reference_revision',
2238
def __init__(self, file_id, name, parent_id, chk_inventory):
2239
# Don't call InventoryDirectory.__init__ - it isn't right for this
2241
InventoryEntry.__init__(self, file_id, name, parent_id)
2242
self._children = None
2243
self.kind = 'directory'
2244
self._chk_inventory = chk_inventory
2248
"""Access the list of children of this directory.
2250
With a parent_id_basename_to_file_id index, loads all the children,
2251
without loads the entire index. Without is bad. A more sophisticated
2252
proxy object might be nice, to allow partial loading of children as
2253
well when specific names are accessed. (So path traversal can be
2254
written in the obvious way but not examine siblings.).
2256
if self._children is not None:
2257
return self._children
2258
# No longer supported
2259
if self._chk_inventory.parent_id_basename_to_file_id is None:
2260
raise AssertionError("Inventories without"
2261
" parent_id_basename_to_file_id are no longer supported")
2263
# XXX: Todo - use proxy objects for the children rather than loading
2264
# all when the attribute is referenced.
2265
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2267
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2268
key_filter=[(self.file_id,)]):
2269
child_keys.add((file_id,))
2271
for file_id_key in child_keys:
2272
entry = self._chk_inventory._fileid_to_entry_cache.get(
2273
file_id_key[0], None)
2274
if entry is not None:
2275
result[entry.name] = entry
2276
cached.add(file_id_key)
2277
child_keys.difference_update(cached)
2278
# populate; todo: do by name
2279
id_to_entry = self._chk_inventory.id_to_entry
2280
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2281
entry = self._chk_inventory._bytes_to_entry(bytes)
2282
result[entry.name] = entry
2283
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2284
self._children = result
2288
'directory': InventoryDirectory,
2289
'file': InventoryFile,
2290
'symlink': InventoryLink,
2291
'tree-reference': TreeReference
2294
def make_entry(kind, name, parent_id, file_id=None):
2295
"""Create an inventory entry.
2297
:param kind: the type of inventory entry to create.
2298
:param name: the basename of the entry.
2299
:param parent_id: the parent_id of the entry.
2300
:param file_id: the file_id to use. if None, one will be created.
2303
file_id = generate_ids.gen_file_id(name)
2304
name = ensure_normalized_name(name)
2306
factory = entry_factory[kind]
2308
raise errors.BadFileKindError(name, kind)
2309
return factory(file_id, name, parent_id)
2312
def ensure_normalized_name(name):
2315
:raises InvalidNormalization: When name is not normalized, and cannot be
2316
accessed on this platform by the normalized path.
2317
:return: The NFC normalised version of name.
2319
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2320
# keep them synchronised.
2321
# we dont import normalized_filename directly because we want to be
2322
# able to change the implementation at runtime for tests.
2323
norm_name, can_access = osutils.normalized_filename(name)
2324
if norm_name != name:
2328
# TODO: jam 20060701 This would probably be more useful
2329
# if the error was raised with the full path
2330
raise errors.InvalidNormalization(name)
649
2336
def is_valid_name(name):
2338
if _NAME_RE is None:
652
2339
_NAME_RE = re.compile(r'^[^/\\]+$')
654
2341
return bool(_NAME_RE.match(name))
2344
def _check_delta_unique_ids(delta):
2345
"""Decorate a delta and check that the file ids in it are unique.
2347
:return: A generator over delta.
2351
length = len(ids) + 1
2353
if len(ids) != length:
2354
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2359
def _check_delta_unique_new_paths(delta):
2360
"""Decorate a delta and check that the new paths in it are unique.
2362
:return: A generator over delta.
2366
length = len(paths) + 1
2368
if path is not None:
2370
if len(paths) != length:
2371
raise errors.InconsistentDelta(path, item[2], "repeated path")
2375
def _check_delta_unique_old_paths(delta):
2376
"""Decorate a delta and check that the old paths in it are unique.
2378
:return: A generator over delta.
2382
length = len(paths) + 1
2384
if path is not None:
2386
if len(paths) != length:
2387
raise errors.InconsistentDelta(path, item[2], "repeated path")
2391
def _check_delta_ids_are_valid(delta):
2392
"""Decorate a delta and check that the ids in it are valid.
2394
:return: A generator over delta.
2399
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2400
"entry with file_id None %r" % entry)
2401
if type(item[2]) != str:
2402
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2403
"entry with non bytes file_id %r" % entry)
2407
def _check_delta_ids_match_entry(delta):
2408
"""Decorate a delta and check that the ids in it match the entry.file_id.
2410
:return: A generator over delta.
2414
if entry is not None:
2415
if entry.file_id != item[2]:
2416
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2417
"mismatched id with %r" % entry)
2421
def _check_delta_new_path_entry_both_or_None(delta):
2422
"""Decorate a delta and check that the new_path and entry are paired.
2424
:return: A generator over delta.
2429
if new_path is None and entry is not None:
2430
raise errors.InconsistentDelta(item[0], item[1],
2431
"Entry with no new_path")
2432
if new_path is not None and entry is None:
2433
raise errors.InconsistentDelta(new_path, item[1],
2434
"new_path with no entry")