78
90
>>> i.add(InventoryDirectory('123', 'src', ROOT_ID))
79
InventoryDirectory('123', 'src', parent_id='TREE_ROOT')
91
InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None)
80
92
>>> i.add(InventoryFile('2323', 'hello.c', parent_id='123'))
81
InventoryFile('2323', 'hello.c', parent_id='123')
82
>>> shouldbe = {0: 'src', 1: pathjoin('src','hello.c')}
93
InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None)
94
>>> shouldbe = {0: '', 1: 'src', 2: 'src/hello.c'}
83
95
>>> for ix, j in enumerate(i.iter_entries()):
84
96
... print (j[0] == shouldbe[ix], j[1])
86
(True, InventoryDirectory('123', 'src', parent_id='TREE_ROOT'))
87
(True, InventoryFile('2323', 'hello.c', parent_id='123'))
88
>>> i.add(InventoryFile('2323', 'bye.c', '123'))
89
Traceback (most recent call last):
91
BzrError: inventory already contains entry with id {2323}
98
(True, InventoryDirectory('TREE_ROOT', u'', parent_id=None, revision=None))
99
(True, InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None))
100
(True, InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None))
92
101
>>> i.add(InventoryFile('2324', 'bye.c', '123'))
93
InventoryFile('2324', 'bye.c', parent_id='123')
102
InventoryFile('2324', 'bye.c', parent_id='123', sha1=None, len=None)
94
103
>>> i.add(InventoryDirectory('2325', 'wibble', '123'))
95
InventoryDirectory('2325', 'wibble', parent_id='123')
104
InventoryDirectory('2325', 'wibble', parent_id='123', revision=None)
96
105
>>> i.path2id('src/wibble')
100
109
>>> i.add(InventoryFile('2326', 'wibble.c', '2325'))
101
InventoryFile('2326', 'wibble.c', parent_id='2325')
110
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None)
103
InventoryFile('2326', 'wibble.c', parent_id='2325')
112
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None)
104
113
>>> for path, entry in i.iter_entries():
106
115
... assert i.path2id(path)
150
162
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
151
163
output_to, reverse=False):
152
164
"""Perform a diff between two entries of the same kind."""
154
def find_previous_heads(self, previous_inventories, entry_weave):
155
"""Return the revisions and entries that directly preceed this.
157
Returned as a map from revision to inventory entry.
159
This is a map containing the file revisions in all parents
160
for which the file exists, and its revision is not a parent of
161
any other. If the file is new, the set will be empty.
166
def parent_candidates(self, previous_inventories):
167
"""Find possible per-file graph parents.
169
This is currently defined by:
170
- Select the last changed revision in the parent inventory.
171
- Do deal with a short lived bug in bzr 0.8's development two entries
172
that have the same last changed but different 'x' bit settings are
163
def get_ancestors(weave, entry):
164
return set(map(weave.idx_to_name,
165
weave.inclusions([weave.lookup(entry.revision)])))
175
# revision:ie mapping for each ie found in previous_inventories.
177
# identify candidate head revision ids.
168
178
for inv in previous_inventories:
169
179
if self.file_id in inv:
170
180
ie = inv[self.file_id]
171
181
assert ie.file_id == self.file_id
172
if ie.revision in heads:
173
# fixup logic, there was a bug in revision updates.
174
# with x bit support.
182
if ie.revision in candidates:
183
# same revision value in two different inventories:
184
# correct possible inconsistencies:
185
# * there was a bug in revision updates with 'x' bit
176
if heads[ie.revision].executable != ie.executable:
177
heads[ie.revision].executable = False
188
if candidates[ie.revision].executable != ie.executable:
189
candidates[ie.revision].executable = False
178
190
ie.executable = False
179
191
except AttributeError:
181
assert heads[ie.revision] == ie
193
# must now be the same.
194
assert candidates[ie.revision] == ie
183
# may want to add it.
184
# may already be covered:
185
already_present = 0 != len(
186
[head for head in heads
187
if ie.revision in head_ancestors[head]])
189
# an ancestor of a known head.
192
ancestors = get_ancestors(entry_weave, ie)
193
# may knock something else out:
194
check_heads = list(heads.keys())
195
for head in check_heads:
196
if head in ancestors:
197
# this head is not really a head
199
head_ancestors[ie.revision] = ancestors
200
heads[ie.revision] = ie
196
# add this revision as a candidate.
197
candidates[ie.revision] = ie
200
@deprecated_method(zero_ninetyone)
201
def find_previous_heads(self, previous_inventories,
202
versioned_file_store,
205
"""Return the revisions and entries that directly precede this.
207
Returned as a map from revision to inventory entry.
209
This is a map containing the file revisions in all parents
210
for which the file exists, and its revision is not a parent of
211
any other. If the file is new, the set will be empty.
213
:param versioned_file_store: A store where ancestry data on this
214
file id can be queried.
215
:param transaction: The transaction that queries to the versioned
216
file store should be completed under.
217
:param entry_vf: The entry versioned file, if its already available.
219
candidates = self.parent_candidates(previous_inventories)
221
# revision:ie mapping with one revision for each head.
223
# common case optimisation
224
if len(candidates) == 1:
225
# if there is only one candidate revision found
226
# then we can avoid opening the versioned file to access ancestry:
227
# there cannot be any ancestors to eliminate when there is
228
# only one revision available.
231
# --- what follows is now encapsulated in repository.get_graph.heads(),
232
# but that is not accessible from here as we have no repository
233
# pointer. Note that the repository.get_graph.heads() call can return
234
# different results *at the moment* because of the kind-changing check
235
# we have in parent_candidates().
237
# eliminate ancestors amongst the available candidates:
238
# heads are those that are not an ancestor of any other candidate
239
# - this provides convergence at a per-file level.
240
def get_ancestors(weave, entry):
241
return set(weave.get_ancestry(entry.revision, topo_sorted=False))
242
# revision: ancestor list for each head
244
for ie in candidates.values():
245
# may be an ancestor of a known head:
246
already_present = 0 != len(
247
[head for head in heads
248
if ie.revision in head_ancestors[head]])
250
# an ancestor of an analyzed candidate.
252
# not an ancestor of a known head:
253
# load the versioned file for this file id if needed
255
entry_vf = versioned_file_store.get_weave_or_empty(
256
self.file_id, transaction)
257
ancestors = get_ancestors(entry_vf, ie)
258
# may knock something else out:
259
check_heads = list(heads.keys())
260
for head in check_heads:
261
if head in ancestors:
262
# this previously discovered 'head' is not
263
# really a head - its an ancestor of the newly
266
head_ancestors[ie.revision] = ancestors
267
heads[ie.revision] = ie
203
270
def get_tar_item(self, root, dp, now, tree):
204
271
"""Get a tarfile item and a file stream for its content."""
205
item = tarfile.TarInfo(pathjoin(root, dp))
272
item = tarfile.TarInfo(osutils.pathjoin(root, dp).encode('utf8'))
206
273
# TODO: would be cool to actually set it to the timestamp of the
207
274
# revision it was last changed
268
338
This is a template method - implement _put_on_disk in subclasses.
270
fullpath = pathjoin(dest, dp)
340
fullpath = osutils.pathjoin(dest, dp)
271
341
self._put_on_disk(fullpath, tree)
272
mutter(" export {%s} kind %s to %s", self.file_id,
342
# mutter(" export {%s} kind %s to %s", self.file_id,
343
# self.kind, fullpath)
275
345
def _put_on_disk(self, fullpath, tree):
276
346
"""Put this entry onto disk at fullpath, from tree tree."""
277
347
raise BzrError("don't know how to export {%s} of kind %r" % (self.file_id, self.kind))
279
349
def sorted_children(self):
280
l = self.children.items()
350
return sorted(self.children.items())
285
353
def versionable_kind(kind):
286
return kind in ('file', 'directory', 'symlink')
354
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
288
356
def check(self, checker, rev_id, inv, tree):
289
357
"""Check this inventory entry is intact.
291
359
This is a template method, override _check for kind specific
362
:param checker: Check object providing context for the checks;
363
can be used to find out what parts of the repository have already
365
:param rev_id: Revision id from which this InventoryEntry was loaded.
366
Not necessarily the last-changed revision for this file.
367
:param inv: Inventory from which the entry was loaded.
368
:param tree: RevisionTree for this entry.
294
if self.parent_id != None:
370
if self.parent_id is not None:
295
371
if not inv.has_id(self.parent_id):
296
372
raise BzrCheckError('missing parent {%s} in inventory for revision {%s}'
297
373
% (self.parent_id, rev_id))
302
378
raise BzrCheckError('unknown entry kind %r in revision {%s}' %
303
379
(self.kind, rev_id))
307
382
"""Clone this inventory entry."""
308
383
raise NotImplementedError
310
def _get_snapshot_change(self, previous_entries):
311
if len(previous_entries) > 1:
313
elif len(previous_entries) == 0:
386
def describe_change(old_entry, new_entry):
387
"""Describe the change between old_entry and this.
389
This smells of being an InterInventoryEntry situation, but as its
390
the first one, we're making it a static method for now.
392
An entry with a different parent, or different name is considered
393
to be renamed. Reparenting is an internal detail.
394
Note that renaming the parent does not trigger a rename for the
397
# TODO: Perhaps return an object rather than just a string
398
if old_entry is new_entry:
399
# also the case of both being None
401
elif old_entry is None:
316
return 'modified/renamed/reparented'
403
elif new_entry is None:
405
if old_entry.kind != new_entry.kind:
407
text_modified, meta_modified = new_entry.detect_changes(old_entry)
408
if text_modified or meta_modified:
412
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
413
if old_entry.parent_id != new_entry.parent_id:
415
elif old_entry.name != new_entry.name:
419
if renamed and not modified:
420
return InventoryEntry.RENAMED
421
if modified and not renamed:
423
if modified and renamed:
424
return InventoryEntry.MODIFIED_AND_RENAMED
318
427
def __repr__(self):
319
return ("%s(%r, %r, parent_id=%r)"
428
return ("%s(%r, %r, parent_id=%r, revision=%r)"
320
429
% (self.__class__.__name__,
325
def snapshot(self, revision, path, previous_entries,
326
work_tree, weave_store, transaction):
327
"""Make a snapshot of this entry which may or may not have changed.
329
This means that all its fields are populated, that it has its
330
text stored in the text store or weave.
332
mutter('new parents of %s are %r', path, previous_entries)
333
self._read_tree_state(path, work_tree)
334
if len(previous_entries) == 1:
335
# cannot be unchanged unless there is only one parent file rev.
336
parent_ie = previous_entries.values()[0]
337
if self._unchanged(parent_ie):
338
mutter("found unchanged entry")
339
self.revision = parent_ie.revision
341
return self.snapshot_revision(revision, previous_entries,
342
work_tree, weave_store, transaction)
344
def snapshot_revision(self, revision, previous_entries, work_tree,
345
weave_store, transaction):
346
"""Record this revision unconditionally."""
347
mutter('new revision for {%s}', self.file_id)
348
self.revision = revision
349
change = self._get_snapshot_change(previous_entries)
350
self._snapshot_text(previous_entries, work_tree, weave_store,
354
def _snapshot_text(self, file_parents, work_tree, weave_store, transaction):
355
"""Record the 'text' of this entry, whatever form that takes.
357
This default implementation simply adds an empty text.
359
mutter('storing file {%s} in revision {%s}',
360
self.file_id, self.revision)
361
self._add_text_to_weave([], file_parents, weave_store, transaction)
363
435
def __eq__(self, other):
364
436
if not isinstance(other, InventoryEntry):
470
560
class InventoryFile(InventoryEntry):
471
561
"""A file in an inventory."""
473
def _check(self, checker, rev_id, tree):
563
__slots__ = ['text_sha1', 'text_size', 'file_id', 'name', 'kind',
564
'text_id', 'parent_id', 'children', 'executable',
565
'revision', 'symlink_target', 'reference_revision']
567
def _check(self, checker, tree_revision_id, tree):
474
568
"""See InventoryEntry._check"""
475
revision = self.revision
476
t = (self.file_id, revision)
569
t = (self.file_id, self.revision)
477
570
if t in checker.checked_texts:
478
prev_sha = checker.checked_texts[t]
571
prev_sha = checker.checked_texts[t]
479
572
if prev_sha != self.text_sha1:
480
573
raise BzrCheckError('mismatched sha1 on {%s} in {%s}' %
481
(self.file_id, rev_id))
574
(self.file_id, tree_revision_id))
483
576
checker.repeated_text_cnt += 1
486
579
if self.file_id not in checker.checked_weaves:
487
580
mutter('check weave {%s}', self.file_id)
488
w = tree.get_weave(self.file_id)
581
w = tree._get_weave(self.file_id)
489
582
# Not passing a progress bar, because it creates a new
490
583
# progress, which overwrites the current progress,
491
584
# and doesn't look nice
493
586
checker.checked_weaves[self.file_id] = True
495
w = tree.get_weave_prelude(self.file_id)
588
w = tree._get_weave(self.file_id)
497
mutter('check version {%s} of {%s}', rev_id, self.file_id)
498
checker.checked_text_cnt += 1
590
mutter('check version {%s} of {%s}', tree_revision_id, self.file_id)
591
checker.checked_text_cnt += 1
499
592
# We can't check the length, because Weave doesn't store that
500
593
# information, and the whole point of looking at the weave's
501
594
# sha1sum is that we don't have to extract the text.
562
663
def _put_on_disk(self, fullpath, tree):
563
664
"""See InventoryEntry._put_on_disk."""
564
pumpfile(tree.get_file(self.file_id), file(fullpath, 'wb'))
665
osutils.pumpfile(tree.get_file(self.file_id), file(fullpath, 'wb'))
565
666
if tree.is_executable(self.file_id):
566
667
os.chmod(fullpath, 0755)
568
669
def _read_tree_state(self, path, work_tree):
569
670
"""See InventoryEntry._read_tree_state."""
570
self.text_sha1 = work_tree.get_file_sha1(self.file_id)
571
self.executable = work_tree.is_executable(self.file_id)
573
def _snapshot_text(self, file_parents, work_tree, weave_store, transaction):
574
"""See InventoryEntry._snapshot_text."""
575
mutter('storing file {%s} in revision {%s}',
576
self.file_id, self.revision)
577
# special case to avoid diffing on renames or
579
if (len(file_parents) == 1
580
and self.text_sha1 == file_parents.values()[0].text_sha1
581
and self.text_size == file_parents.values()[0].text_size):
582
previous_ie = file_parents.values()[0]
583
weave_store.add_identical_text(
584
self.file_id, previous_ie.revision,
585
self.revision, file_parents, transaction)
587
new_lines = work_tree.get_file(self.file_id).readlines()
588
self._add_text_to_weave(new_lines, file_parents, weave_store,
590
self.text_sha1 = sha_strings(new_lines)
591
self.text_size = sum(map(len, new_lines))
671
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
672
# FIXME: 20050930 probe for the text size when getting sha1
673
# in _read_tree_state
674
self.executable = work_tree.is_executable(self.file_id, path=path)
677
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s)"
678
% (self.__class__.__name__,
685
def _forget_tree_state(self):
686
self.text_sha1 = None
594
688
def _unchanged(self, previous_ie):
595
689
"""See InventoryEntry._unchanged."""
735
868
The inventory is created with a default root directory, with
738
# We are letting Branch.initialize() create a unique inventory
739
# root id. Rather than generating a random one here.
741
# root_id = bzrlib.branch.gen_file_id('TREE_ROOT')
742
self.root = RootEntry(root_id)
871
if root_id is not None:
872
assert root_id.__class__ == str
873
self._set_root(InventoryDirectory(root_id, u'', None))
877
self.revision_id = revision_id
880
return "<Inventory object at %x, contents=%r>" % (id(self), self._byid)
882
def apply_delta(self, delta):
883
"""Apply a delta to this inventory.
885
:param delta: A list of changes to apply. After all the changes are
886
applied the final inventory must be internally consistent, but it
887
is ok to supply changes which, if only half-applied would have an
888
invalid result - such as supplying two changes which rename two
889
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
890
('B', 'A', 'B-id', b_entry)].
892
Each change is a tuple, of the form (old_path, new_path, file_id,
895
When new_path is None, the change indicates the removal of an entry
896
from the inventory and new_entry will be ignored (using None is
897
appropriate). If new_path is not None, then new_entry must be an
898
InventoryEntry instance, which will be incorporated into the
899
inventory (and replace any existing entry with the same file id).
901
When old_path is None, the change indicates the addition of
902
a new entry to the inventory.
904
When neither new_path nor old_path are None, the change is a
905
modification to an entry, such as a rename, reparent, kind change
908
The children attribute of new_entry is ignored. This is because
909
this method preserves children automatically across alterations to
910
the parent of the children, and cases where the parent id of a
911
child is changing require the child to be passed in as a separate
912
change regardless. E.g. in the recursive deletion of a directory -
913
the directory's children must be included in the delta, or the
914
final inventory will be invalid.
917
# Remove all affected items which were in the original inventory,
918
# starting with the longest paths, thus ensuring parents are examined
919
# after their children, which means that everything we examine has no
920
# modified children remaining by the time we examine it.
921
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
922
if op is not None), reverse=True):
923
if file_id not in self:
926
# Preserve unaltered children of file_id for later reinsertion.
927
children[file_id] = getattr(self[file_id], 'children', {})
928
# Remove file_id and the unaltered children. If file_id is not
929
# being deleted it will be reinserted back later.
930
self.remove_recursive_id(file_id)
931
# Insert all affected which should be in the new inventory, reattaching
932
# their children if they had any. This is done from shortest path to
933
# longest, ensuring that items which were modified and whose parents in
934
# the resulting inventory were also modified, are inserted after their
936
for new_path, new_entry in sorted((np, e) for op, np, f, e in
937
delta if np is not None):
938
if new_entry.kind == 'directory':
939
new_entry.children = children.get(new_entry.file_id, {})
942
def _set_root(self, ie):
743
944
self._byid = {self.root.file_id: self.root}
747
other = Inventory(self.root.file_id)
947
# TODO: jam 20051218 Should copy also copy the revision_id?
948
entries = self.iter_entries()
949
other = Inventory(entries.next()[1].file_id)
748
950
# copy recursively so we know directories will be added before
749
951
# their children. There are more efficient ways than this...
750
for path, entry in self.iter_entries():
751
if entry == self.root:
952
for path, entry in entries():
753
953
other.add(entry.copy())
757
956
def __iter__(self):
758
957
return iter(self._byid)
761
959
def __len__(self):
762
960
"""Returns number of entries."""
763
961
return len(self._byid)
766
963
def iter_entries(self, from_dir=None):
767
964
"""Return (path, entry) pairs, in order by name."""
771
elif isinstance(from_dir, basestring):
772
from_dir = self._byid[from_dir]
774
kids = from_dir.children.items()
776
for name, ie in kids:
778
if ie.kind == 'directory':
779
for cn, cie in self.iter_entries(from_dir=ie.file_id):
780
yield pathjoin(name, cn), cie
966
if self.root is None:
970
elif isinstance(from_dir, basestring):
971
from_dir = self._byid[from_dir]
973
# unrolling the recursive called changed the time from
974
# 440ms/663ms (inline/total) to 116ms/116ms
975
children = from_dir.children.items()
977
children = collections.deque(children)
978
stack = [(u'', children)]
980
from_dir_relpath, children = stack[-1]
983
name, ie = children.popleft()
985
# we know that from_dir_relpath never ends in a slash
986
# and 'f' doesn't begin with one, we can do a string op, rather
987
# than the checks of pathjoin(), though this means that all paths
989
path = from_dir_relpath + '/' + name
993
if ie.kind != 'directory':
996
# But do this child first
997
new_children = ie.children.items()
999
new_children = collections.deque(new_children)
1000
stack.append((path, new_children))
1001
# Break out of inner loop, so that we start outer loop with child
1004
# if we finished all children, pop it off the stack
1007
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
1008
yield_parents=False):
1009
"""Iterate over the entries in a directory first order.
1011
This returns all entries for a directory before returning
1012
the entries for children of a directory. This is not
1013
lexicographically sorted order, and is a hybrid between
1014
depth-first and breadth-first.
1016
:param yield_parents: If True, yield the parents from the root leading
1017
down to specific_file_ids that have been requested. This has no
1018
impact if specific_file_ids is None.
1019
:return: This yields (path, entry) pairs
1021
if specific_file_ids:
1022
safe = osutils.safe_file_id
1023
specific_file_ids = set(safe(fid) for fid in specific_file_ids)
1024
# TODO? Perhaps this should return the from_dir so that the root is
1025
# yielded? or maybe an option?
1026
if from_dir is None:
1027
if self.root is None:
1029
# Optimize a common case
1030
if (not yield_parents and specific_file_ids is not None and
1031
len(specific_file_ids) == 1):
1032
file_id = list(specific_file_ids)[0]
1034
yield self.id2path(file_id), self[file_id]
1036
from_dir = self.root
1037
if (specific_file_ids is None or yield_parents or
1038
self.root.file_id in specific_file_ids):
1039
yield u'', self.root
1040
elif isinstance(from_dir, basestring):
1041
from_dir = self._byid[from_dir]
1043
if specific_file_ids is not None:
1044
# TODO: jam 20070302 This could really be done as a loop rather
1045
# than a bunch of recursive calls.
1048
def add_ancestors(file_id):
1049
if file_id not in byid:
1051
parent_id = byid[file_id].parent_id
1052
if parent_id is None:
1054
if parent_id not in parents:
1055
parents.add(parent_id)
1056
add_ancestors(parent_id)
1057
for file_id in specific_file_ids:
1058
add_ancestors(file_id)
1062
stack = [(u'', from_dir)]
1064
cur_relpath, cur_dir = stack.pop()
1067
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
1069
child_relpath = cur_relpath + child_name
1071
if (specific_file_ids is None or
1072
child_ie.file_id in specific_file_ids or
1073
(yield_parents and child_ie.file_id in parents)):
1074
yield child_relpath, child_ie
1076
if child_ie.kind == 'directory':
1077
if parents is None or child_ie.file_id in parents:
1078
child_dirs.append((child_relpath+'/', child_ie))
1079
stack.extend(reversed(child_dirs))
1081
def make_entry(self, kind, name, parent_id, file_id=None):
1082
"""Simple thunk to bzrlib.inventory.make_entry."""
1083
return make_entry(kind, name, parent_id, file_id)
783
1085
def entries(self):
784
1086
"""Return list of (path, ie) for all entries except the root.
812
1113
for name, child_ie in kids:
813
child_path = pathjoin(parent_path, name)
1114
child_path = osutils.pathjoin(parent_path, name)
814
1115
descend(child_ie, child_path)
815
1116
descend(self.root, u'')
820
1119
def __contains__(self, file_id):
821
1120
"""True if this entry contains a file with given id.
823
1122
>>> inv = Inventory()
824
1123
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
825
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT')
1124
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None)
826
1125
>>> '123' in inv
828
1127
>>> '456' in inv
831
return file_id in self._byid
1130
file_id = osutils.safe_file_id(file_id)
1131
return (file_id in self._byid)
834
1133
def __getitem__(self, file_id):
835
1134
"""Return the entry for given file_id.
837
1136
>>> inv = Inventory()
838
1137
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
839
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT')
1138
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None)
840
1139
>>> inv['123123'].name
1142
file_id = osutils.safe_file_id(file_id)
844
1144
return self._byid[file_id]
845
1145
except KeyError:
847
raise BzrError("can't look up file_id None")
849
raise BzrError("file_id {%s} not in inventory" % file_id)
1146
# really we're passing an inventory, not a tree...
1147
raise errors.NoSuchId(self, file_id)
852
1149
def get_file_kind(self, file_id):
1150
file_id = osutils.safe_file_id(file_id)
853
1151
return self._byid[file_id].kind
855
1153
def get_child(self, parent_id, filename):
1154
parent_id = osutils.safe_file_id(parent_id)
856
1155
return self[parent_id].children.get(filename)
1157
def _add_child(self, entry):
1158
"""Add an entry to the inventory, without adding it to its parent"""
1159
if entry.file_id in self._byid:
1160
raise BzrError("inventory already contains entry with id {%s}" %
1162
self._byid[entry.file_id] = entry
1163
for child in getattr(entry, 'children', {}).itervalues():
1164
self._add_child(child)
859
1167
def add(self, entry):
860
1168
"""Add entry to inventory.
865
1173
Returns the new entry object.
867
1175
if entry.file_id in self._byid:
868
raise BzrError("inventory already contains entry with id {%s}" % entry.file_id)
870
if entry.parent_id == ROOT_ID or entry.parent_id is None:
871
entry.parent_id = self.root.file_id
874
parent = self._byid[entry.parent_id]
876
raise BzrError("parent_id {%s} not in inventory" % entry.parent_id)
878
if parent.children.has_key(entry.name):
879
raise BzrError("%s is already versioned" %
880
pathjoin(self.id2path(parent.file_id), entry.name))
882
self._byid[entry.file_id] = entry
883
parent.children[entry.name] = entry
887
def add_path(self, relpath, kind, file_id=None):
1176
raise errors.DuplicateFileId(entry.file_id,
1177
self._byid[entry.file_id])
1179
if entry.parent_id is None:
1180
assert self.root is None and len(self._byid) == 0
1184
parent = self._byid[entry.parent_id]
1186
raise BzrError("parent_id {%s} not in inventory" %
1189
if entry.name in parent.children:
1190
raise BzrError("%s is already versioned" %
1191
osutils.pathjoin(self.id2path(parent.file_id),
1192
entry.name).encode('utf-8'))
1193
parent.children[entry.name] = entry
1194
return self._add_child(entry)
1196
def add_path(self, relpath, kind, file_id=None, parent_id=None):
888
1197
"""Add entry from a path.
890
1199
The immediate parent must already be versioned.
892
1201
Returns the new entry object."""
893
from bzrlib.workingtree import gen_file_id
895
parts = bzrlib.osutils.splitpath(relpath)
1203
parts = osutils.splitpath(relpath)
896
1205
if len(parts) == 0:
897
raise BzrError("cannot re-add root of inventory")
900
file_id = gen_file_id(relpath)
902
parent_path = parts[:-1]
903
parent_id = self.path2id(parent_path)
904
if parent_id == None:
905
raise NotVersionedError(path=parent_path)
906
if kind == 'directory':
907
ie = InventoryDirectory(file_id, parts[-1], parent_id)
909
ie = InventoryFile(file_id, parts[-1], parent_id)
910
elif kind == 'symlink':
911
ie = InventoryLink(file_id, parts[-1], parent_id)
1207
file_id = generate_ids.gen_root_id()
1209
file_id = osutils.safe_file_id(file_id)
1210
self.root = InventoryDirectory(file_id, '', None)
1211
self._byid = {self.root.file_id: self.root}
913
raise BzrError("unknown kind %r" % kind)
1214
parent_path = parts[:-1]
1215
parent_id = self.path2id(parent_path)
1216
if parent_id is None:
1217
raise errors.NotVersionedError(path=parent_path)
1218
ie = make_entry(kind, parts[-1], parent_id, file_id)
914
1219
return self.add(ie)
917
1221
def __delitem__(self, file_id):
918
1222
"""Remove entry by id.
920
1224
>>> inv = Inventory()
921
1225
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
922
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT')
1226
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None)
923
1227
>>> '123' in inv
925
1229
>>> del inv['123']
926
1230
>>> '123' in inv
1233
file_id = osutils.safe_file_id(file_id)
929
1234
ie = self[file_id]
931
assert self[ie.parent_id].children[ie.name] == ie
1236
assert ie.parent_id is None or \
1237
self[ie.parent_id].children[ie.name] == ie
933
# TODO: Test deleting all children; maybe hoist to a separate
935
if ie.kind == 'directory':
936
for cie in ie.children.values():
937
del self[cie.file_id]
940
1239
del self._byid[file_id]
941
del self[ie.parent_id].children[ie.name]
1240
if ie.parent_id is not None:
1241
del self[ie.parent_id].children[ie.name]
944
1243
def __eq__(self, other):
945
1244
"""Compare two sets by comparing their contents.
1079
1401
file_ie.name = new_name
1080
1402
file_ie.parent_id = new_parent_id
1404
def is_root(self, file_id):
1405
file_id = osutils.safe_file_id(file_id)
1406
return self.root is not None and file_id == self.root.file_id
1410
'directory': InventoryDirectory,
1411
'file': InventoryFile,
1412
'symlink': InventoryLink,
1413
'tree-reference': TreeReference
1416
def make_entry(kind, name, parent_id, file_id=None):
1417
"""Create an inventory entry.
1419
:param kind: the type of inventory entry to create.
1420
:param name: the basename of the entry.
1421
:param parent_id: the parent_id of the entry.
1422
:param file_id: the file_id to use. if None, one will be created.
1425
file_id = generate_ids.gen_file_id(name)
1427
file_id = osutils.safe_file_id(file_id)
1428
name = ensure_normalized_name(name)
1430
factory = entry_factory[kind]
1432
raise BzrError("unknown kind %r" % kind)
1433
return factory(file_id, name, parent_id)
1436
def ensure_normalized_name(name):
1439
:raises InvalidNormalization: When name is not normalized, and cannot be
1440
accessed on this platform by the normalized path.
1441
:return: The NFC/NFKC normalised version of name.
1443
#------- This has been copied to bzrlib.dirstate.DirState.add, please
1444
# keep them synchronised.
1445
# we dont import normalized_filename directly because we want to be
1446
# able to change the implementation at runtime for tests.
1447
norm_name, can_access = osutils.normalized_filename(name)
1448
if norm_name != name:
1452
# TODO: jam 20060701 This would probably be more useful
1453
# if the error was raised with the full path
1454
raise errors.InvalidNormalization(name)
1085
1458
_NAME_RE = None
1087
1460
def is_valid_name(name):
1088
1461
global _NAME_RE
1089
if _NAME_RE == None:
1462
if _NAME_RE is None:
1090
1463
_NAME_RE = re.compile(r'^[^/\\]+$')
1092
1465
return bool(_NAME_RE.match(name))