20
20
from sets import Set
21
21
import os.path, os, fnmatch
23
from osutils import pumpfile, compare_files, filesize, quotefn, sha_file, \
24
joinpath, splitpath, appendpath, isdir, isfile, file_kind, fingerprint_file
26
from stat import S_ISREG, S_ISDIR, ST_MODE, ST_SIZE
23
28
from inventory import Inventory
24
29
from trace import mutter, note
25
from osutils import pumpfile, compare_files, filesize, quotefn, sha_file, \
26
joinpath, splitpath, appendpath, isdir, isfile, file_kind
27
30
from errors import bailout
29
from stat import S_ISREG, S_ISDIR, ST_MODE, ST_SIZE
73
75
doc="Inventory of this Tree")
75
77
def _check_retrieved(self, ie, f):
76
# TODO: Test this check by damaging the store?
77
if ie.text_size is not None:
79
if fs != ie.text_size:
78
fp = fingerprint_file(f)
81
if ie.text_size != None:
82
if ie.text_size != fp['size']:
80
83
bailout("mismatched size for file %r in %r" % (ie.file_id, self._store),
81
84
["inventory expects %d bytes" % ie.text_size,
82
"file is actually %d bytes" % fs,
85
"file is actually %d bytes" % fp['size'],
83
86
"store is probably damaged/corrupt"])
87
if ie.text_sha1 != f_hash:
88
if ie.text_sha1 != fp['sha1']:
88
89
bailout("wrong SHA-1 for file %r in %r" % (ie.file_id, self._store),
89
90
["inventory expects %s" % ie.text_sha1,
90
"file is actually %s" % f_hash,
91
"file is actually %s" % fp['sha1'],
91
92
"store is probably damaged/corrupt"])
94
def export(self, dest):
95
def print_file(self, fileid):
96
"""Print file with id `fileid` to stdout."""
98
pumpfile(self.get_file(fileid), sys.stdout)
101
def export(self, dest):
95
102
"""Export this tree to a new directory.
97
104
`dest` should not exist, and will be created holding the
98
105
contents of this tree.
100
:todo: To handle subdirectories we need to create the
107
TODO: To handle subdirectories we need to create the
101
108
directories first.
103
110
:note: If the export fails, the destination directory will be
197
196
inv = self.inventory
199
def descend(from_dir, from_dir_id, dp):
198
def descend(from_dir_relpath, from_dir_id, dp):
200
199
ls = os.listdir(dp)
202
## TODO: If we find a subdirectory with its own .bzr
203
## directory, then that is a separate tree and we
204
## should exclude it.
203
205
if bzrlib.BZRDIR == f:
206
208
# path within tree
207
fp = appendpath(from_dir, f)
209
fp = appendpath(from_dir_relpath, f)
210
212
fap = appendpath(dp, f)
236
238
for ff in descend(fp, f_ie.file_id, fap):
239
for f in descend('', None, self.basedir):
241
for f in descend('', inv.root.file_id, self.basedir):
244
def unknowns(self, path='', dir_id=None):
245
"""Yield names of unknown files in this WorkingTree.
247
for subp in self.extras():
248
if not self.is_ignored(subp):
253
"""Yield all unknown files in this WorkingTree.
247
255
If there are any unknown directories then only the directory is
248
256
returned, not all its children. But if there are unknown files
251
259
Currently returned depth-first, sorted by name within directories.
253
for fpath, fclass, fkind, fid in self.list_files():
261
## TODO: Work from given directory downwards
263
for path, dir_entry in self.inventory.directories():
264
mutter("search for unknowns in %r" % path)
265
dirabs = self.abspath(path)
266
if not isdir(dirabs):
267
# e.g. directory deleted
271
for subf in os.listdir(dirabs):
273
and (subf not in dir_entry.children)):
278
subp = appendpath(path, subf)
258
282
def ignored_files(self):
259
for fpath, fclass, fkind, fid in self.list_files():
283
"""Yield list of PATH, IGNORE_PATTERN"""
284
for subp in self.extras():
285
pat = self.is_ignored(subp)
264
290
def get_ignore_list(self):
265
"""Return list of ignore patterns."""
291
"""Return list of ignore patterns.
293
Cached in the Tree object after the first call.
295
if hasattr(self, '_ignorelist'):
296
return self._ignorelist
298
l = bzrlib.DEFAULT_IGNORE[:]
266
299
if self.has_filename(bzrlib.IGNORE_FILENAME):
267
300
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
268
return [line.rstrip("\n\r") for line in f.readlines()]
270
return bzrlib.DEFAULT_IGNORE
301
l.extend([line.rstrip("\n\r") for line in f.readlines()])
273
306
def is_ignored(self, filename):
274
"""Check whether the filename matches an ignore pattern.
276
Patterns containing '/' need to match the whole path; others
277
match against only the last component."""
278
## TODO: Take them from a file, not hardcoded
279
## TODO: Use extended zsh-style globs maybe?
280
## TODO: Use '**' to match directories?
307
r"""Check whether the filename matches an ignore pattern.
309
Patterns containing '/' or '\' need to match the whole path;
310
others match against only the last component.
312
If the file is ignored, returns the pattern which caused it to
313
be ignored, otherwise None. So this can simply be used as a
314
boolean if desired."""
316
# TODO: Use '**' to match directories, and other extended
317
# globbing stuff from cvs/rsync.
319
# XXX: fnmatch is actually not quite what we want: it's only
320
# approximately the same as real Unix fnmatch, and doesn't
321
# treat dotfiles correctly and allows * to match /.
322
# Eventually it should be replaced with something more
281
325
for pat in self.get_ignore_list():
283
if fnmatch.fnmatchcase(filename, pat):
326
if '/' in pat or '\\' in pat:
328
# as a special case, you can put ./ at the start of a
329
# pattern; this is good to match in the top-level
332
if (pat[:2] == './') or (pat[:2] == '.\\'):
336
if fnmatch.fnmatchcase(filename, newpat):
286
339
if fnmatch.fnmatchcase(splitpath(filename)[-1], pat):