52
52
from bzrlib.symbol_versioning import deprecated_in, deprecated_method
53
53
from bzrlib.trace import mutter
54
from bzrlib.static_tuple import StaticTuple
56
57
class InventoryEntry(object):
958
959
descend(self.root, u'')
961
def path2id(self, name):
962
def path2id(self, relpath):
962
963
"""Walk down through directories to return entry of last component.
964
names may be either a list of path components, or a single
965
string, in which case it is automatically split.
965
:param relpath: may be either a list of path components, or a single
966
string, in which case it is automatically split.
967
968
This returns the entry of the last component in the path,
968
969
which may be either a file or a directory.
970
971
Returns None IFF the path is not found.
972
if isinstance(name, basestring):
973
name = osutils.splitpath(name)
975
# mutter("lookup path %r" % name)
973
if isinstance(relpath, basestring):
974
names = osutils.splitpath(relpath)
978
979
parent = self.root
1599
1600
interesting.add(None) # this will auto-filter it in the loop
1600
1601
remaining_parents.discard(None)
1601
1602
while remaining_parents:
1602
if None in remaining_parents:
1603
import pdb; pdb.set_trace()
1604
1603
next_parents = set()
1605
1604
for entry in self._getitems(remaining_parents):
1606
1605
next_parents.add(entry.parent_id)
1615
1614
while directories_to_expand:
1616
1615
# Expand directories by looking in the
1617
1616
# parent_id_basename_to_file_id map
1618
keys = [(f,) for f in directories_to_expand]
1617
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1619
1618
directories_to_expand = set()
1620
1619
items = self.parent_id_basename_to_file_id.iteritems(keys)
1621
1620
next_file_ids = set([item[1] for item in items])
1678
1677
# to filter out empty names because of non rich-root...
1679
1678
sections = bytes.split('\n')
1680
1679
kind, file_id = sections[0].split(': ')
1681
return (sections[2], file_id, sections[3])
1680
return (sections[2], intern(file_id), intern(sections[3]))
1683
1682
def _bytes_to_entry(self, bytes):
1684
1683
"""Deserialise a serialised entry."""
1706
1705
result.reference_revision = sections[4]
1708
1707
raise ValueError("Not a serialised entry %r" % bytes)
1709
result.revision = sections[3]
1708
result.file_id = intern(result.file_id)
1709
result.revision = intern(sections[3])
1710
1710
if result.parent_id == '':
1711
1711
result.parent_id = None
1712
1712
self._fileid_to_entry_cache[result.file_id] = result
1811
1811
deletes.add(file_id)
1813
new_key = (file_id,)
1813
new_key = StaticTuple(file_id,)
1814
1814
new_value = result._entry_to_bytes(entry)
1815
1815
# Update caches. It's worth doing this whether
1816
1816
# we're propagating the old caches or not.
1819
1819
if old_path is None:
1822
old_key = (file_id,)
1822
old_key = StaticTuple(file_id,)
1823
1823
if self.id2path(file_id) != old_path:
1824
1824
raise errors.InconsistentDelta(old_path, file_id,
1825
1825
"Entry was at wrong other path %r." %
1826
1826
self.id2path(file_id))
1827
1827
altered.add(file_id)
1828
id_to_entry_delta.append((old_key, new_key, new_value))
1828
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1829
1829
if result.parent_id_basename_to_file_id is not None:
1830
1830
# parent_id, basename changes
1831
1831
if old_path is None:
1918
1918
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1919
1919
% (key, bytes))
1920
1920
info[key] = value
1921
revision_id = info['revision_id']
1922
root_id = info['root_id']
1923
search_key_name = info.get('search_key_name', 'plain')
1924
parent_id_basename_to_file_id = info.get(
1925
'parent_id_basename_to_file_id', None)
1921
revision_id = intern(info['revision_id'])
1922
root_id = intern(info['root_id'])
1923
search_key_name = intern(info.get('search_key_name', 'plain'))
1924
parent_id_basename_to_file_id = intern(info.get(
1925
'parent_id_basename_to_file_id', None))
1926
if not parent_id_basename_to_file_id.startswith('sha1:'):
1927
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1928
' key not %r' % (parent_id_basename_to_file_id,))
1926
1929
id_to_entry = info['id_to_entry']
1930
if not id_to_entry.startswith('sha1:'):
1931
raise ValueError('id_to_entry should be a sha1'
1932
' key not %r' % (id_to_entry,))
1928
1934
result = CHKInventory(search_key_name)
1929
1935
result.revision_id = revision_id
1932
1938
result._search_key_name)
1933
1939
if parent_id_basename_to_file_id is not None:
1934
1940
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1935
chk_store, (parent_id_basename_to_file_id,),
1941
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1936
1942
search_key_func=search_key_func)
1938
1944
result.parent_id_basename_to_file_id = None
1940
result.id_to_entry = chk_map.CHKMap(chk_store, (id_to_entry,),
1946
result.id_to_entry = chk_map.CHKMap(chk_store,
1947
StaticTuple(id_to_entry,),
1941
1948
search_key_func=search_key_func)
1942
1949
if (result.revision_id,) != expected_revision_id:
1943
1950
raise ValueError("Mismatched revision id and expected: %r, %r" %
1965
1972
id_to_entry_dict = {}
1966
1973
parent_id_basename_dict = {}
1967
1974
for path, entry in inventory.iter_entries():
1968
id_to_entry_dict[(entry.file_id,)] = entry_to_bytes(entry)
1975
key = StaticTuple(entry.file_id,).intern()
1976
id_to_entry_dict[key] = entry_to_bytes(entry)
1969
1977
p_id_key = parent_id_basename_key(entry)
1970
1978
parent_id_basename_dict[p_id_key] = entry.file_id
1994
2002
parent_id = entry.parent_id
1997
return parent_id, entry.name.encode('utf8')
2005
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1999
2007
def __getitem__(self, file_id):
2000
2008
"""map a single file_id -> InventoryEntry."""
2007
2015
return self._bytes_to_entry(
2008
self.id_to_entry.iteritems([(file_id,)]).next()[1])
2016
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
2009
2017
except StopIteration:
2010
2018
# really we're passing an inventory, not a tree...
2011
2019
raise errors.NoSuchId(self, file_id)
2024
2032
remaining.append(file_id)
2026
2034
result.append(entry)
2027
file_keys = [(f,) for f in remaining]
2035
file_keys = [StaticTuple(f,).intern() for f in remaining]
2028
2036
for file_key, value in self.id_to_entry.iteritems(file_keys):
2029
2037
entry = self._bytes_to_entry(value)
2030
2038
result.append(entry)
2035
2043
# Perhaps have an explicit 'contains' method on CHKMap ?
2036
2044
if self._fileid_to_entry_cache.get(file_id, None) is not None:
2038
return len(list(self.id_to_entry.iteritems([(file_id,)]))) == 1
2047
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
2040
2049
def is_root(self, file_id):
2041
2050
return file_id == self.root_id
2170
2179
delta.append((old_path, new_path, file_id, entry))
2173
def path2id(self, name):
2182
def path2id(self, relpath):
2174
2183
"""See CommonInventory.path2id()."""
2175
2184
# TODO: perhaps support negative hits?
2176
result = self._path_to_fileid_cache.get(name, None)
2185
result = self._path_to_fileid_cache.get(relpath, None)
2177
2186
if result is not None:
2179
if isinstance(name, basestring):
2180
names = osutils.splitpath(name)
2188
if isinstance(relpath, basestring):
2189
names = osutils.splitpath(relpath)
2183
2192
current_id = self.root_id
2184
2193
if current_id is None:
2186
2195
parent_id_index = self.parent_id_basename_to_file_id
2187
2197
for basename in names:
2188
# TODO: Cache each path we figure out in this function.
2198
if cur_path is None:
2201
cur_path = cur_path + '/' + basename
2189
2202
basename_utf8 = basename.encode('utf8')
2190
key_filter = [(current_id, basename_utf8)]
2192
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2193
key_filter=key_filter):
2194
if parent_id != current_id or name_utf8 != basename_utf8:
2195
raise errors.BzrError("corrupt inventory lookup! "
2196
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2203
file_id = self._path_to_fileid_cache.get(cur_path, None)
2198
2204
if file_id is None:
2205
key_filter = [StaticTuple(current_id, basename_utf8)]
2206
items = parent_id_index.iteritems(key_filter)
2207
for (parent_id, name_utf8), file_id in items:
2208
if parent_id != current_id or name_utf8 != basename_utf8:
2209
raise errors.BzrError("corrupt inventory lookup! "
2210
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2215
self._path_to_fileid_cache[cur_path] = file_id
2200
2216
current_id = file_id
2201
self._path_to_fileid_cache[name] = current_id
2202
2217
return current_id
2204
2219
def to_lines(self):
2209
2224
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2210
2225
lines.append("root_id: %s\n" % self.root_id)
2211
2226
lines.append('parent_id_basename_to_file_id: %s\n' %
2212
self.parent_id_basename_to_file_id.key())
2227
(self.parent_id_basename_to_file_id.key()[0],))
2213
2228
lines.append("revision_id: %s\n" % self.revision_id)
2214
lines.append("id_to_entry: %s\n" % self.id_to_entry.key())
2229
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2216
2231
lines.append("revision_id: %s\n" % self.revision_id)
2217
2232
lines.append("root_id: %s\n" % self.root_id)
2218
2233
if self.parent_id_basename_to_file_id is not None:
2219
2234
lines.append('parent_id_basename_to_file_id: %s\n' %
2220
self.parent_id_basename_to_file_id.key())
2221
lines.append("id_to_entry: %s\n" % self.id_to_entry.key())
2235
(self.parent_id_basename_to_file_id.key()[0],))
2236
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2265
2280
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2266
2281
child_keys = set()
2267
2282
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2268
key_filter=[(self.file_id,)]):
2269
child_keys.add((file_id,))
2283
key_filter=[StaticTuple(self.file_id,)]):
2284
child_keys.add(StaticTuple(file_id,))
2271
2286
for file_id_key in child_keys:
2272
2287
entry = self._chk_inventory._fileid_to_entry_cache.get(