32
24
realpath as _nt_realpath,
33
25
splitdrive as _nt_splitdrive,
28
from os import listdir
33
from shutil import copyfile
35
from stat import (S_ISREG, S_ISDIR, S_ISLNK, ST_MODE, ST_SIZE,
36
S_ISCHR, S_ISBLK, S_ISFIFO, S_ISSOCK)
43
from tempfile import (
56
from bzrlib.symbol_versioning import (
61
# sha and md5 modules are deprecated in python2.6 but hashlib is available as
63
if sys.version_info < (2, 5):
64
import md5 as _mod_md5
66
import sha as _mod_sha
76
from bzrlib import symbol_versioning
79
# Cross platform wall-clock time functionality with decent resolution.
80
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
81
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
82
# synchronized with ``time.time()``, this is only meant to be used to find
83
# delta times by subtracting from another call to this function.
84
timer_func = time.time
85
if sys.platform == 'win32':
86
timer_func = time.clock
45
from bzrlib.errors import (BzrError,
46
BzrBadParameterNotUnicode,
51
from bzrlib.symbol_versioning import (deprecated_function,
53
from bzrlib.trace import mutter
88
56
# On win32, O_BINARY is used to indicate the file should
89
57
# be opened in binary mode, rather than text mode.
90
58
# On other platforms, O_BINARY doesn't exist, because
91
59
# they always open in binary mode, so it is okay to
92
# OR with 0 on those platforms.
93
# O_NOINHERIT and O_TEXT exists only on win32 too.
60
# OR with 0 on those platforms
94
61
O_BINARY = getattr(os, 'O_BINARY', 0)
95
O_TEXT = getattr(os, 'O_TEXT', 0)
96
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
99
def get_unicode_argv():
101
user_encoding = get_user_encoding()
102
return [a.decode(user_encoding) for a in sys.argv[1:]]
103
except UnicodeDecodeError:
104
raise errors.BzrError(("Parameter '%r' is unsupported by the current "
108
64
def make_readonly(filename):
109
65
"""Make a filename read-only."""
110
mod = os.lstat(filename).st_mode
111
if not stat.S_ISLNK(mod):
113
os.chmod(filename, mod)
66
mod = os.stat(filename).st_mode
68
os.chmod(filename, mod)
116
71
def make_writable(filename):
117
mod = os.lstat(filename).st_mode
118
if not stat.S_ISLNK(mod):
120
os.chmod(filename, mod)
123
def minimum_path_selection(paths):
124
"""Return the smallset subset of paths which are outside paths.
126
:param paths: A container (and hence not None) of paths.
127
:return: A set of paths sufficient to include everything in paths via
128
is_inside, drawn from the paths parameter.
134
return path.split('/')
135
sorted_paths = sorted(list(paths), key=sort_key)
137
search_paths = [sorted_paths[0]]
138
for path in sorted_paths[1:]:
139
if not is_inside(search_paths[-1], path):
140
# This path is unique, add it
141
search_paths.append(path)
143
return set(search_paths)
72
mod = os.stat(filename).st_mode
74
os.chmod(filename, mod)
575
482
for dirname in dir_list:
576
483
if is_inside(dirname, fname) or is_inside(fname, dirname):
581
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
582
report_activity=None, direction='read'):
583
"""Copy contents of one file to another.
585
The read_length can either be -1 to read to end-of-file (EOF) or
586
it can specify the maximum number of bytes to read.
588
The buff_size represents the maximum size for each read operation
589
performed on from_file.
591
:param report_activity: Call this as bytes are read, see
592
Transport._report_activity
593
:param direction: Will be passed to report_activity
595
:return: The number of bytes copied.
599
# read specified number of bytes
601
while read_length > 0:
602
num_bytes_to_read = min(read_length, buff_size)
604
block = from_file.read(num_bytes_to_read)
608
if report_activity is not None:
609
report_activity(len(block), direction)
612
actual_bytes_read = len(block)
613
read_length -= actual_bytes_read
614
length += actual_bytes_read
618
block = from_file.read(buff_size)
622
if report_activity is not None:
623
report_activity(len(block), direction)
629
def pump_string_file(bytes, file_handle, segment_size=None):
630
"""Write bytes to file_handle in many smaller writes.
632
:param bytes: The string to write.
633
:param file_handle: The file to write to.
635
# Write data in chunks rather than all at once, because very large
636
# writes fail on some platforms (e.g. Windows with SMB mounted
639
segment_size = 5242880 # 5MB
640
segments = range(len(bytes) / segment_size + 1)
641
write = file_handle.write
642
for segment_index in segments:
643
segment = buffer(bytes, segment_index * segment_size, segment_size)
489
def pumpfile(fromfile, tofile):
490
"""Copy contents of one file to another."""
493
b = fromfile.read(BUFSIZE)
647
499
def file_iterator(input_file, readsize=32768):
731
555
def local_time_offset(t=None):
732
556
"""Return offset of local zone from GMT, either at present or at time t."""
557
# python2.3 localtime() can't take None
735
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
736
return offset.days * 86400 + offset.seconds
738
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
739
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
742
def format_date(t, offset=0, timezone='original', date_fmt=None,
561
if time.localtime(t).tm_isdst and time.daylight:
564
return -time.timezone
567
def format_date(t, offset=0, timezone='original', date_fmt=None,
743
568
show_offset=True):
744
"""Return a formatted date string.
746
:param t: Seconds since the epoch.
747
:param offset: Timezone offset in seconds east of utc.
748
:param timezone: How to display the time: 'utc', 'original' for the
749
timezone specified by offset, or 'local' for the process's current
751
:param date_fmt: strftime format.
752
:param show_offset: Whether to append the timezone.
754
(date_fmt, tt, offset_str) = \
755
_format_date(t, offset, timezone, date_fmt, show_offset)
756
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
757
date_str = time.strftime(date_fmt, tt)
758
return date_str + offset_str
761
# Cache of formatted offset strings
765
def format_date_with_offset_in_original_timezone(t, offset=0,
766
_cache=_offset_cache):
767
"""Return a formatted date string in the original timezone.
769
This routine may be faster then format_date.
771
:param t: Seconds since the epoch.
772
:param offset: Timezone offset in seconds east of utc.
776
tt = time.gmtime(t + offset)
777
date_fmt = _default_format_by_weekday_num[tt[6]]
778
date_str = time.strftime(date_fmt, tt)
779
offset_str = _cache.get(offset, None)
780
if offset_str is None:
781
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
782
_cache[offset] = offset_str
783
return date_str + offset_str
786
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
788
"""Return an unicode date string formatted according to the current locale.
790
:param t: Seconds since the epoch.
791
:param offset: Timezone offset in seconds east of utc.
792
:param timezone: How to display the time: 'utc', 'original' for the
793
timezone specified by offset, or 'local' for the process's current
795
:param date_fmt: strftime format.
796
:param show_offset: Whether to append the timezone.
798
(date_fmt, tt, offset_str) = \
799
_format_date(t, offset, timezone, date_fmt, show_offset)
800
date_str = time.strftime(date_fmt, tt)
801
if not isinstance(date_str, unicode):
802
date_str = date_str.decode(get_user_encoding(), 'replace')
803
return date_str + offset_str
806
def _format_date(t, offset, timezone, date_fmt, show_offset):
569
## TODO: Perhaps a global option to use either universal or local time?
570
## Or perhaps just let people set $TZ?
571
assert isinstance(t, float)
807
573
if timezone == 'utc':
808
574
tt = time.gmtime(t)
815
581
tt = time.localtime(t)
816
582
offset = local_time_offset(t)
818
raise errors.UnsupportedTimezoneFormat(timezone)
584
raise BzrError("unsupported timezone format %r" % timezone,
585
['options are "utc", "original", "local"'])
819
586
if date_fmt is None:
820
587
date_fmt = "%a %Y-%m-%d %H:%M:%S"
822
589
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
825
return (date_fmt, tt, offset_str)
592
return (time.strftime(date_fmt, tt) + offset_str)
828
595
def compact_date(when):
829
596
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
832
def format_delta(delta):
833
"""Get a nice looking string for a time delta.
835
:param delta: The time difference in seconds, can be positive or negative.
836
positive indicates time in the past, negative indicates time in the
837
future. (usually time.time() - stored_time)
838
:return: String formatted to show approximate resolution
844
direction = 'in the future'
848
if seconds < 90: # print seconds up to 90 seconds
850
return '%d second %s' % (seconds, direction,)
852
return '%d seconds %s' % (seconds, direction)
854
minutes = int(seconds / 60)
855
seconds -= 60 * minutes
860
if minutes < 90: # print minutes, seconds up to 90 minutes
862
return '%d minute, %d second%s %s' % (
863
minutes, seconds, plural_seconds, direction)
865
return '%d minutes, %d second%s %s' % (
866
minutes, seconds, plural_seconds, direction)
868
hours = int(minutes / 60)
869
minutes -= 60 * hours
876
return '%d hour, %d minute%s %s' % (hours, minutes,
877
plural_minutes, direction)
878
return '%d hours, %d minute%s %s' % (hours, minutes,
879
plural_minutes, direction)
882
601
"""Return size of given open file."""
934
raise errors.BzrError("sorry, %r not allowed in path" % f)
669
raise BzrError("sorry, %r not allowed in path" % f)
935
670
elif (f == '.') or (f == ''):
677
assert isinstance(p, list)
944
679
if (f == '..') or (f is None) or (f == ''):
945
raise errors.BzrError("sorry, %r not allowed in path" % f)
680
raise BzrError("sorry, %r not allowed in path" % f)
946
681
return pathjoin(*p)
949
def parent_directories(filename):
950
"""Return the list of parent directories, deepest first.
952
For example, parent_directories("a/b/c") -> ["a/b", "a"].
955
parts = splitpath(dirname(filename))
957
parents.append(joinpath(parts))
962
_extension_load_failures = []
965
def failed_to_load_extension(exception):
966
"""Handle failing to load a binary extension.
968
This should be called from the ImportError block guarding the attempt to
969
import the native extension. If this function returns, the pure-Python
970
implementation should be loaded instead::
973
>>> import bzrlib._fictional_extension_pyx
974
>>> except ImportError, e:
975
>>> bzrlib.osutils.failed_to_load_extension(e)
976
>>> import bzrlib._fictional_extension_py
978
# NB: This docstring is just an example, not a doctest, because doctest
979
# currently can't cope with the use of lazy imports in this namespace --
982
# This currently doesn't report the failure at the time it occurs, because
983
# they tend to happen very early in startup when we can't check config
984
# files etc, and also we want to report all failures but not spam the user
986
from bzrlib import trace
987
exception_str = str(exception)
988
if exception_str not in _extension_load_failures:
989
trace.mutter("failed to load compiled extension: %s" % exception_str)
990
_extension_load_failures.append(exception_str)
993
def report_extension_load_failures():
994
if not _extension_load_failures:
996
from bzrlib.config import GlobalConfig
997
if GlobalConfig().get_user_option_as_bool('ignore_missing_extensions'):
999
# the warnings framework should by default show this only once
1000
from bzrlib.trace import warning
1002
"bzr: warning: some compiled extensions could not be loaded; "
1003
"see <https://answers.launchpad.net/bzr/+faq/703>")
1004
# we no longer show the specific missing extensions here, because it makes
1005
# the message too long and scary - see
1006
# https://bugs.launchpad.net/bzr/+bug/430529
1010
from bzrlib._chunks_to_lines_pyx import chunks_to_lines
1011
except ImportError, e:
1012
failed_to_load_extension(e)
1013
from bzrlib._chunks_to_lines_py import chunks_to_lines
684
@deprecated_function(zero_nine)
685
def appendpath(p1, p2):
689
return pathjoin(p1, p2)
1016
692
def split_lines(s):
1017
693
"""Split s into lines, but without removing the newline characters."""
1018
# Trivially convert a fulltext into a 'chunked' representation, and let
1019
# chunks_to_lines do the heavy lifting.
1020
if isinstance(s, str):
1021
# chunks_to_lines only supports 8-bit strings
1022
return chunks_to_lines([s])
1024
return _split_lines(s)
1027
def _split_lines(s):
1028
"""Split s into lines, but without removing the newline characters.
1030
This supports Unicode or plain string objects.
1032
694
lines = s.split('\n')
1033
695
result = [line + '\n' for line in lines[:-1]]
1155
759
os.path.commonprefix (python2.4) has a bad bug that it works just
1156
760
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
1157
761
avoids that problem.
1159
NOTE: `base` should not have a trailing slash otherwise you'll get
1160
PathNotChild exceptions regardless of `path`.
1163
if len(base) < MIN_ABS_PATHLENGTH:
1164
# must have space for e.g. a drive letter
1165
raise ValueError('%r is too short to calculate a relative path'
764
assert len(base) >= MIN_ABS_PATHLENGTH, ('Length of base must be equal or'
765
' exceed the platform minimum length (which is %d)' %
1168
768
rp = abspath(path)
1173
if len(head) <= len(base) and head != base:
1174
raise errors.PathNotChild(rp, base)
772
while len(head) >= len(base):
1175
773
if head == base:
1177
head, tail = split(head)
775
head, tail = os.path.split(head)
779
raise PathNotChild(rp, base)
1182
return pathjoin(*reversed(s))
1187
def _cicp_canonical_relpath(base, path):
1188
"""Return the canonical path relative to base.
1190
Like relpath, but on case-insensitive-case-preserving file-systems, this
1191
will return the relpath as stored on the file-system rather than in the
1192
case specified in the input string, for all existing portions of the path.
1194
This will cause O(N) behaviour if called for every path in a tree; if you
1195
have a number of paths to convert, you should use canonical_relpaths().
1197
# TODO: it should be possible to optimize this for Windows by using the
1198
# win32 API FindFiles function to look for the specified name - but using
1199
# os.listdir() still gives us the correct, platform agnostic semantics in
1202
rel = relpath(base, path)
1203
# '.' will have been turned into ''
1207
abs_base = abspath(base)
1209
_listdir = os.listdir
1211
# use an explicit iterator so we can easily consume the rest on early exit.
1212
bit_iter = iter(rel.split('/'))
1213
for bit in bit_iter:
1216
next_entries = _listdir(current)
1217
except OSError: # enoent, eperm, etc
1218
# We can't find this in the filesystem, so just append the
1220
current = pathjoin(current, bit, *list(bit_iter))
1222
for look in next_entries:
1223
if lbit == look.lower():
1224
current = pathjoin(current, look)
1227
# got to the end, nothing matched, so we just return the
1228
# non-existing bits as they were specified (the filename may be
1229
# the target of a move, for example).
1230
current = pathjoin(current, bit, *list(bit_iter))
1232
return current[len(abs_base):].lstrip('/')
1234
# XXX - TODO - we need better detection/integration of case-insensitive
1235
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1236
# filesystems), for example, so could probably benefit from the same basic
1237
# support there. For now though, only Windows and OSX get that support, and
1238
# they get it for *all* file-systems!
1239
if sys.platform in ('win32', 'darwin'):
1240
canonical_relpath = _cicp_canonical_relpath
1242
canonical_relpath = relpath
1244
def canonical_relpaths(base, paths):
1245
"""Create an iterable to canonicalize a sequence of relative paths.
1247
The intent is for this implementation to use a cache, vastly speeding
1248
up multiple transformations in the same directory.
1250
# but for now, we haven't optimized...
1251
return [canonical_relpath(base, p) for p in paths]
1253
787
def safe_unicode(unicode_or_utf8_string):
1254
788
"""Coerce unicode_or_utf8_string into unicode.
1256
790
If it is unicode, it is returned.
1257
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1258
wrapped in a BzrBadParameterNotUnicode exception.
791
Otherwise it is decoded from utf-8. If a decoding error
792
occurs, it is wrapped as a If the decoding fails, the exception is wrapped
793
as a BzrBadParameter exception.
1260
795
if isinstance(unicode_or_utf8_string, unicode):
1261
796
return unicode_or_utf8_string
1263
798
return unicode_or_utf8_string.decode('utf8')
1264
799
except UnicodeDecodeError:
1265
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1268
def safe_utf8(unicode_or_utf8_string):
1269
"""Coerce unicode_or_utf8_string to a utf8 string.
1271
If it is a str, it is returned.
1272
If it is Unicode, it is encoded into a utf-8 string.
1274
if isinstance(unicode_or_utf8_string, str):
1275
# TODO: jam 20070209 This is overkill, and probably has an impact on
1276
# performance if we are dealing with lots of apis that want a
1279
# Make sure it is a valid utf-8 string
1280
unicode_or_utf8_string.decode('utf-8')
1281
except UnicodeDecodeError:
1282
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1283
return unicode_or_utf8_string
1284
return unicode_or_utf8_string.encode('utf-8')
1287
_revision_id_warning = ('Unicode revision ids were deprecated in bzr 0.15.'
1288
' Revision id generators should be creating utf8'
1292
def safe_revision_id(unicode_or_utf8_string, warn=True):
1293
"""Revision ids should now be utf8, but at one point they were unicode.
1295
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1297
:param warn: Functions that are sanitizing user data can set warn=False
1298
:return: None or a utf8 revision id.
1300
if (unicode_or_utf8_string is None
1301
or unicode_or_utf8_string.__class__ == str):
1302
return unicode_or_utf8_string
1304
symbol_versioning.warn(_revision_id_warning, DeprecationWarning,
1306
return cache_utf8.encode(unicode_or_utf8_string)
1309
_file_id_warning = ('Unicode file ids were deprecated in bzr 0.15. File id'
1310
' generators should be creating utf8 file ids.')
1313
def safe_file_id(unicode_or_utf8_string, warn=True):
1314
"""File ids should now be utf8, but at one point they were unicode.
1316
This is the same as safe_utf8, except it uses the cached encode functions
1317
to save a little bit of performance.
1319
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1321
:param warn: Functions that are sanitizing user data can set warn=False
1322
:return: None or a utf8 file id.
1324
if (unicode_or_utf8_string is None
1325
or unicode_or_utf8_string.__class__ == str):
1326
return unicode_or_utf8_string
1328
symbol_versioning.warn(_file_id_warning, DeprecationWarning,
1330
return cache_utf8.encode(unicode_or_utf8_string)
800
raise BzrBadParameterNotUnicode(unicode_or_utf8_string)
1333
803
_platform_normalizes_filenames = False
1374
844
normalized_filename = _inaccessible_normalized_filename
1377
def set_signal_handler(signum, handler, restart_syscall=True):
1378
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1379
on platforms that support that.
1381
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1382
automatically restart (by calling `signal.siginterrupt(signum,
1383
False)`). May be ignored if the feature is not available on this
1384
platform or Python version.
1388
siginterrupt = signal.siginterrupt
1390
# This python implementation doesn't provide signal support, hence no
1393
except AttributeError:
1394
# siginterrupt doesn't exist on this platform, or for this version
1396
siginterrupt = lambda signum, flag: None
1398
def sig_handler(*args):
1399
# Python resets the siginterrupt flag when a signal is
1400
# received. <http://bugs.python.org/issue8354>
1401
# As a workaround for some cases, set it back the way we want it.
1402
siginterrupt(signum, False)
1403
# Now run the handler function passed to set_signal_handler.
1406
sig_handler = handler
1407
old_handler = signal.signal(signum, sig_handler)
1409
siginterrupt(signum, False)
1413
default_terminal_width = 80
1414
"""The default terminal width for ttys.
1416
This is defined so that higher levels can share a common fallback value when
1417
terminal_width() returns None.
1421
847
def terminal_width():
1422
"""Return terminal width.
1424
None is returned if the width can't established precisely.
1427
- if BZR_COLUMNS is set, returns its value
1428
- if there is no controlling terminal, returns None
1429
- if COLUMNS is set, returns its value,
1431
From there, we need to query the OS to get the size of the controlling
1435
- get termios.TIOCGWINSZ
1436
- if an error occurs or a negative value is obtained, returns None
1440
- win32utils.get_console_size() decides,
1441
- returns None on error (provided default value)
1444
# If BZR_COLUMNS is set, take it, user is always right
1446
return int(os.environ['BZR_COLUMNS'])
1447
except (KeyError, ValueError):
1450
isatty = getattr(sys.stdout, 'isatty', None)
1451
if isatty is None or not isatty():
1452
# Don't guess, setting BZR_COLUMNS is the recommended way to override.
1455
# If COLUMNS is set, take it, the terminal knows better (even inside a
1456
# given terminal, the application can decide to set COLUMNS to a lower
1457
# value (splitted screen) or a bigger value (scroll bars))
1459
return int(os.environ['COLUMNS'])
1460
except (KeyError, ValueError):
1463
width, height = _terminal_size(None, None)
1465
# Consider invalid values as meaning no width
1471
def _win32_terminal_size(width, height):
1472
width, height = win32utils.get_console_size(defaultx=width, defaulty=height)
1473
return width, height
1476
def _ioctl_terminal_size(width, height):
848
"""Return estimated terminal width."""
849
if sys.platform == 'win32':
850
import bzrlib.win32console
851
return bzrlib.win32console.get_console_size()[0]
1478
854
import struct, fcntl, termios
1479
855
s = struct.pack('HHHH', 0, 0, 0, 0)
1480
856
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
1481
height, width = struct.unpack('HHHH', x)[0:2]
1482
except (IOError, AttributeError):
857
width = struct.unpack('HHHH', x)[1]
1484
return width, height
1486
_terminal_size = None
1487
"""Returns the terminal size as (width, height).
1489
:param width: Default value for width.
1490
:param height: Default value for height.
1492
This is defined specifically for each OS and query the size of the controlling
1493
terminal. If any error occurs, the provided default values should be returned.
1495
if sys.platform == 'win32':
1496
_terminal_size = _win32_terminal_size
1498
_terminal_size = _ioctl_terminal_size
1501
def _terminal_size_changed(signum, frame):
1502
"""Set COLUMNS upon receiving a SIGnal for WINdow size CHange."""
1503
width, height = _terminal_size(None, None)
1504
if width is not None:
1505
os.environ['COLUMNS'] = str(width)
1508
_registered_sigwinch = False
1509
def watch_sigwinch():
1510
"""Register for SIGWINCH, once and only once.
1512
Do nothing if the signal module is not available.
1514
global _registered_sigwinch
1515
if not _registered_sigwinch:
1518
if getattr(signal, "SIGWINCH", None) is not None:
1519
set_signal_handler(signal.SIGWINCH, _terminal_size_changed)
1521
# python doesn't provide signal support, nothing we can do about it
862
width = int(os.environ['COLUMNS'])
1523
_registered_sigwinch = True
1526
870
def supports_executable():
1527
871
return sys.platform != "win32"
1530
def supports_posix_readonly():
1531
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1533
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1534
directory controls creation/deletion, etc.
1536
And under win32, readonly means that the directory itself cannot be
1537
deleted. The contents of a readonly directory can be changed, unlike POSIX
1538
where files in readonly directories cannot be added, deleted or renamed.
1540
return sys.platform != "win32"
1543
def set_or_unset_env(env_variable, value):
1544
"""Modify the environment, setting or removing the env_variable.
1546
:param env_variable: The environment variable in question
1547
:param value: The value to set the environment to. If None, then
1548
the variable will be removed.
1549
:return: The original value of the environment variable.
1551
orig_val = os.environ.get(env_variable)
1553
if orig_val is not None:
1554
del os.environ[env_variable]
1556
if isinstance(value, unicode):
1557
value = value.encode(get_user_encoding())
1558
os.environ[env_variable] = value
1562
874
_validWin32PathRE = re.compile(r'^([A-Za-z]:[/\\])?[^:<>*"?\|]*$')
1565
877
def check_legal_path(path):
1566
"""Check whether the supplied path is legal.
878
"""Check whether the supplied path is legal.
1567
879
This is only required on Windows, so we don't test on other platforms
1570
882
if sys.platform != "win32":
1572
884
if _validWin32PathRE.match(path) is None:
1573
raise errors.IllegalPath(path)
1576
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1578
def _is_error_enotdir(e):
1579
"""Check if this exception represents ENOTDIR.
1581
Unfortunately, python is very inconsistent about the exception
1582
here. The cases are:
1583
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1584
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1585
which is the windows error code.
1586
3) Windows, Python2.5 uses errno == EINVAL and
1587
winerror == ERROR_DIRECTORY
1589
:param e: An Exception object (expected to be OSError with an errno
1590
attribute, but we should be able to cope with anything)
1591
:return: True if this represents an ENOTDIR error. False otherwise.
1593
en = getattr(e, 'errno', None)
1594
if (en == errno.ENOTDIR
1595
or (sys.platform == 'win32'
1596
and (en == _WIN32_ERROR_DIRECTORY
1597
or (en == errno.EINVAL
1598
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
885
raise IllegalPath(path)
1604
888
def walkdirs(top, prefix=""):
1605
889
"""Yield data about all the directories in a tree.
1607
891
This yields all the data about the contents of a directory at a time.
1608
892
After each directory has been yielded, if the caller has mutated the list
1609
893
to exclude some directories, they are then not descended into.
1611
895
The data yielded is of the form:
1612
896
((directory-relpath, directory-path-from-top),
1613
[(relpath, basename, kind, lstat, path-from-top), ...]),
897
[(relpath, basename, kind, lstat), ...]),
1614
898
- directory-relpath is the relative path of the directory being returned
1615
899
with respect to top. prefix is prepended to this.
1616
- directory-path-from-root is the path including top for this directory.
900
- directory-path-from-root is the path including top for this directory.
1617
901
It is suitable for use with os functions.
1618
902
- relpath is the relative path within the subtree being walked.
1619
903
- basename is the basename of the path
1621
905
present within the tree - but it may be recorded as versioned. See
1623
907
- lstat is the stat data *if* the file was statted.
1624
- planned, not implemented:
908
- planned, not implemented:
1625
909
path_from_tree_root is the path from the root of the tree.
1627
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
911
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1628
912
allows one to walk a subtree but get paths that are relative to a tree
1629
913
rooted higher up.
1630
914
:return: an iterator over the dirs.
1632
916
#TODO there is a bit of a smell where the results of the directory-
1633
# summary in this, and the path from the root, may not agree
917
# summary in this, and the path from the root, may not agree
1634
918
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
1635
919
# potentially confusing output. We should make this more robust - but
1636
920
# not at a speed cost. RBC 20060731
1638
923
_directory = _directory_kind
1639
_listdir = os.listdir
1640
_kind_from_mode = file_kind_from_stat_mode
1641
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
925
pending = [(prefix, "", _directory, None, top)]
928
currentdir = pending.pop()
1643
929
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1644
relroot, _, _, _, top = pending.pop()
1646
relprefix = relroot + u'/'
1649
top_slash = top + u'/'
1652
append = dirblock.append
1654
names = sorted(_listdir(top))
1656
if not _is_error_enotdir(e):
1660
abspath = top_slash + name
1661
statvalue = _lstat(abspath)
1662
kind = _kind_from_mode(statvalue.st_mode)
1663
append((relprefix + name, name, kind, statvalue, abspath))
1664
yield (relroot, top), dirblock
1666
# push the user specified dirs from dirblock
1667
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1670
class DirReader(object):
1671
"""An interface for reading directories."""
1673
def top_prefix_to_starting_dir(self, top, prefix=""):
1674
"""Converts top and prefix to a starting dir entry
1676
:param top: A utf8 path
1677
:param prefix: An optional utf8 path to prefix output relative paths
1679
:return: A tuple starting with prefix, and ending with the native
1682
raise NotImplementedError(self.top_prefix_to_starting_dir)
1684
def read_dir(self, prefix, top):
1685
"""Read a specific dir.
1687
:param prefix: A utf8 prefix to be preprended to the path basenames.
1688
:param top: A natively encoded path to read.
1689
:return: A list of the directories contents. Each item contains:
1690
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1692
raise NotImplementedError(self.read_dir)
1695
_selected_dir_reader = None
1698
def _walkdirs_utf8(top, prefix=""):
1699
"""Yield data about all the directories in a tree.
1701
This yields the same information as walkdirs() only each entry is yielded
1702
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1703
are returned as exact byte-strings.
1705
:return: yields a tuple of (dir_info, [file_info])
1706
dir_info is (utf8_relpath, path-from-top)
1707
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1708
if top is an absolute path, path-from-top is also an absolute path.
1709
path-from-top might be unicode or utf8, but it is the correct path to
1710
pass to os functions to affect the file in question. (such as os.lstat)
1712
global _selected_dir_reader
1713
if _selected_dir_reader is None:
1714
fs_encoding = _fs_enc.upper()
1715
if sys.platform == "win32" and win32utils.winver == 'Windows NT':
1716
# Win98 doesn't have unicode apis like FindFirstFileW
1717
# TODO: We possibly could support Win98 by falling back to the
1718
# original FindFirstFile, and using TCHAR instead of WCHAR,
1719
# but that gets a bit tricky, and requires custom compiling
1722
from bzrlib._walkdirs_win32 import Win32ReadDir
1723
_selected_dir_reader = Win32ReadDir()
1726
elif fs_encoding in ('UTF-8', 'US-ASCII', 'ANSI_X3.4-1968'):
1727
# ANSI_X3.4-1968 is a form of ASCII
1729
from bzrlib._readdir_pyx import UTF8DirReader
1730
_selected_dir_reader = UTF8DirReader()
1731
except ImportError, e:
1732
failed_to_load_extension(e)
1735
if _selected_dir_reader is None:
1736
# Fallback to the python version
1737
_selected_dir_reader = UnicodeDirReader()
1739
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1740
# But we don't actually uses 1-3 in pending, so set them to None
1741
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1742
read_dir = _selected_dir_reader.read_dir
1743
_directory = _directory_kind
1745
relroot, _, _, _, top = pending[-1].pop()
1748
dirblock = sorted(read_dir(relroot, top))
1749
yield (relroot, top), dirblock
1750
# push the user specified dirs from dirblock
1751
next = [d for d in reversed(dirblock) if d[2] == _directory]
1753
pending.append(next)
1756
class UnicodeDirReader(DirReader):
1757
"""A dir reader for non-utf8 file systems, which transcodes."""
1759
__slots__ = ['_utf8_encode']
1762
self._utf8_encode = codecs.getencoder('utf8')
1764
def top_prefix_to_starting_dir(self, top, prefix=""):
1765
"""See DirReader.top_prefix_to_starting_dir."""
1766
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1768
def read_dir(self, prefix, top):
1769
"""Read a single directory from a non-utf8 file system.
1771
top, and the abspath element in the output are unicode, all other paths
1772
are utf8. Local disk IO is done via unicode calls to listdir etc.
1774
This is currently the fallback code path when the filesystem encoding is
1775
not UTF-8. It may be better to implement an alternative so that we can
1776
safely handle paths that are not properly decodable in the current
1779
See DirReader.read_dir for details.
1781
_utf8_encode = self._utf8_encode
1783
_listdir = os.listdir
1784
_kind_from_mode = file_kind_from_stat_mode
1787
relprefix = prefix + '/'
1790
top_slash = top + u'/'
1793
append = dirblock.append
932
relroot = currentdir[0] + '/'
1794
935
for name in sorted(_listdir(top)):
1796
name_utf8 = _utf8_encode(name)[0]
1797
except UnicodeDecodeError:
1798
raise errors.BadFilenameEncoding(
1799
_utf8_encode(relprefix)[0] + name, _fs_enc)
1800
abspath = top_slash + name
1801
statvalue = _lstat(abspath)
1802
kind = _kind_from_mode(statvalue.st_mode)
1803
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
936
abspath = top + '/' + name
937
statvalue = lstat(abspath)
938
dirblock.append((relroot + name, name,
939
file_kind_from_stat_mode(statvalue.st_mode),
941
yield (currentdir[0], top), dirblock
942
# push the user specified dirs from dirblock
943
for dir in reversed(dirblock):
944
if dir[2] == _directory:
1807
948
def copy_tree(from_path, to_path, handlers={}):
1808
949
"""Copy all of the entries in from_path into to_path.
1810
:param from_path: The base directory to copy.
951
:param from_path: The base directory to copy.
1811
952
:param to_path: The target directory. If it does not exist, it will
1813
954
:param handlers: A dictionary of functions, which takes a source and
1927
user_encoding = locale.getpreferredencoding()
1032
_cached_user_encoding = locale.getpreferredencoding()
1928
1033
except locale.Error, e:
1929
1034
sys.stderr.write('bzr: warning: %s\n'
1930
' Could not determine what text encoding to use.\n'
1035
' Could not what text encoding to use.\n'
1931
1036
' This error usually means your Python interpreter\n'
1932
1037
' doesn\'t support the locale set by $LANG (%s)\n'
1933
1038
" Continuing with ascii encoding.\n"
1934
1039
% (e, os.environ.get('LANG')))
1935
user_encoding = 'ascii'
1937
# Windows returns 'cp0' to indicate there is no code page. So we'll just
1938
# treat that as ASCII, and not support printing unicode characters to the
1941
# For python scripts run under vim, we get '', so also treat that as ASCII
1942
if user_encoding in (None, 'cp0', ''):
1943
user_encoding = 'ascii'
1947
codecs.lookup(user_encoding)
1949
sys.stderr.write('bzr: warning:'
1950
' unknown encoding %s.'
1951
' Continuing with ascii encoding.\n'
1954
user_encoding = 'ascii'
1957
_cached_user_encoding = user_encoding
1959
return user_encoding
1962
def get_host_name():
1963
"""Return the current unicode host name.
1965
This is meant to be used in place of socket.gethostname() because that
1966
behaves inconsistently on different platforms.
1968
if sys.platform == "win32":
1970
return win32utils.get_host_name()
1973
return socket.gethostname().decode(get_user_encoding())
1976
# We must not read/write any more than 64k at a time from/to a socket so we
1977
# don't risk "no buffer space available" errors on some platforms. Windows in
1978
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
1980
MAX_SOCKET_CHUNK = 64 * 1024
1982
def read_bytes_from_socket(sock, report_activity=None,
1983
max_read_size=MAX_SOCKET_CHUNK):
1984
"""Read up to max_read_size of bytes from sock and notify of progress.
1986
Translates "Connection reset by peer" into file-like EOF (return an
1987
empty string rather than raise an error), and repeats the recv if
1988
interrupted by a signal.
1992
bytes = sock.recv(max_read_size)
1993
except socket.error, e:
1995
if eno == getattr(errno, "WSAECONNRESET", errno.ECONNRESET):
1996
# The connection was closed by the other side. Callers expect
1997
# an empty string to signal end-of-stream.
1999
elif eno == errno.EINTR:
2000
# Retry the interrupted recv.
2004
if report_activity is not None:
2005
report_activity(len(bytes), 'read')
2009
def recv_all(socket, count):
2010
"""Receive an exact number of bytes.
2012
Regular Socket.recv() may return less than the requested number of bytes,
2013
depending on what's in the OS buffer. MSG_WAITALL is not available
2014
on all platforms, but this should work everywhere. This will return
2015
less than the requested amount if the remote end closes.
2017
This isn't optimized and is intended mostly for use in testing.
2020
while len(b) < count:
2021
new = read_bytes_from_socket(socket, None, count - len(b))
2028
def send_all(sock, bytes, report_activity=None):
2029
"""Send all bytes on a socket.
2031
Breaks large blocks in smaller chunks to avoid buffering limitations on
2032
some platforms, and catches EINTR which may be thrown if the send is
2033
interrupted by a signal.
2035
This is preferred to socket.sendall(), because it avoids portability bugs
2036
and provides activity reporting.
2038
:param report_activity: Call this as bytes are read, see
2039
Transport._report_activity
2042
byte_count = len(bytes)
2043
while sent_total < byte_count:
2045
sent = sock.send(buffer(bytes, sent_total, MAX_SOCKET_CHUNK))
2046
except socket.error, e:
2047
if e.args[0] != errno.EINTR:
2051
report_activity(sent, 'write')
2054
def dereference_path(path):
2055
"""Determine the real path to a file.
2057
All parent elements are dereferenced. But the file itself is not
2059
:param path: The original path. May be absolute or relative.
2060
:return: the real path *to* the file
2062
parent, base = os.path.split(path)
2063
# The pathjoin for '.' is a workaround for Python bug #1213894.
2064
# (initial path components aren't dereferenced)
2065
return pathjoin(realpath(pathjoin('.', parent)), base)
2068
def supports_mapi():
2069
"""Return True if we can use MAPI to launch a mail client."""
2070
return sys.platform == "win32"
2073
def resource_string(package, resource_name):
2074
"""Load a resource from a package and return it as a string.
2076
Note: Only packages that start with bzrlib are currently supported.
2078
This is designed to be a lightweight implementation of resource
2079
loading in a way which is API compatible with the same API from
2081
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2082
If and when pkg_resources becomes a standard library, this routine
2085
# Check package name is within bzrlib
2086
if package == "bzrlib":
2087
resource_relpath = resource_name
2088
elif package.startswith("bzrlib."):
2089
package = package[len("bzrlib."):].replace('.', os.sep)
2090
resource_relpath = pathjoin(package, resource_name)
2092
raise errors.BzrError('resource package %s not in bzrlib' % package)
2094
# Map the resource to a file and read its contents
2095
base = dirname(bzrlib.__file__)
2096
if getattr(sys, 'frozen', None): # bzr.exe
2097
base = abspath(pathjoin(base, '..', '..'))
2098
filename = pathjoin(base, resource_relpath)
2099
return open(filename, 'rU').read()
2102
def file_kind_from_stat_mode_thunk(mode):
2103
global file_kind_from_stat_mode
2104
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2106
from bzrlib._readdir_pyx import UTF8DirReader
2107
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2108
except ImportError, e:
2109
# This is one time where we won't warn that an extension failed to
2110
# load. The extension is never available on Windows anyway.
2111
from bzrlib._readdir_py import (
2112
_kind_from_mode as file_kind_from_stat_mode
2114
return file_kind_from_stat_mode(mode)
2115
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2118
def file_kind(f, _lstat=os.lstat):
2120
return file_kind_from_stat_mode(_lstat(f).st_mode)
2122
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2123
raise errors.NoSuchFile(f)
2127
def until_no_eintr(f, *a, **kw):
2128
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2130
WARNING: you must be certain that it is safe to retry the call repeatedly
2131
if EINTR does occur. This is typically only true for low-level operations
2132
like os.read. If in any doubt, don't use this.
2134
Keep in mind that this is not a complete solution to EINTR. There is
2135
probably code in the Python standard library and other dependencies that
2136
may encounter EINTR if a signal arrives (and there is signal handler for
2137
that signal). So this function can reduce the impact for IO that bzrlib
2138
directly controls, but it is not a complete solution.
2140
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2144
except (IOError, OSError), e:
2145
if e.errno == errno.EINTR:
2150
def re_compile_checked(re_string, flags=0, where=""):
2151
"""Return a compiled re, or raise a sensible error.
2153
This should only be used when compiling user-supplied REs.
2155
:param re_string: Text form of regular expression.
2156
:param flags: eg re.IGNORECASE
2157
:param where: Message explaining to the user the context where
2158
it occurred, eg 'log search filter'.
2160
# from https://bugs.launchpad.net/bzr/+bug/251352
2162
re_obj = re.compile(re_string, flags)
2167
where = ' in ' + where
2168
# despite the name 'error' is a type
2169
raise errors.BzrCommandError('Invalid regular expression%s: %r: %s'
2170
% (where, re_string, e))
2173
if sys.platform == "win32":
2176
return msvcrt.getch()
2181
fd = sys.stdin.fileno()
2182
settings = termios.tcgetattr(fd)
2185
ch = sys.stdin.read(1)
2187
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2191
if sys.platform == 'linux2':
2192
def _local_concurrency():
2194
prefix = 'processor'
2195
for line in file('/proc/cpuinfo', 'rb'):
2196
if line.startswith(prefix):
2197
concurrency = int(line[line.find(':')+1:]) + 1
2199
elif sys.platform == 'darwin':
2200
def _local_concurrency():
2201
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2202
stdout=subprocess.PIPE).communicate()[0]
2203
elif sys.platform[0:7] == 'freebsd':
2204
def _local_concurrency():
2205
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2206
stdout=subprocess.PIPE).communicate()[0]
2207
elif sys.platform == 'sunos5':
2208
def _local_concurrency():
2209
return subprocess.Popen(['psrinfo', '-p',],
2210
stdout=subprocess.PIPE).communicate()[0]
2211
elif sys.platform == "win32":
2212
def _local_concurrency():
2213
# This appears to return the number of cores.
2214
return os.environ.get('NUMBER_OF_PROCESSORS')
2216
def _local_concurrency():
2221
_cached_local_concurrency = None
2223
def local_concurrency(use_cache=True):
2224
"""Return how many processes can be run concurrently.
2226
Rely on platform specific implementations and default to 1 (one) if
2227
anything goes wrong.
2229
global _cached_local_concurrency
2231
if _cached_local_concurrency is not None and use_cache:
2232
return _cached_local_concurrency
2234
concurrency = os.environ.get('BZR_CONCURRENCY', None)
2235
if concurrency is None:
2237
concurrency = _local_concurrency()
2238
except (OSError, IOError):
2241
concurrency = int(concurrency)
2242
except (TypeError, ValueError):
2245
_cached_concurrency = concurrency
2249
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2250
"""A stream writer that doesn't decode str arguments."""
2252
def __init__(self, encode, stream, errors='strict'):
2253
codecs.StreamWriter.__init__(self, stream, errors)
2254
self.encode = encode
2256
def write(self, object):
2257
if type(object) is str:
2258
self.stream.write(object)
2260
data, _ = self.encode(object, self.errors)
2261
self.stream.write(data)
2263
if sys.platform == 'win32':
2264
def open_file(filename, mode='r', bufsize=-1):
2265
"""This function is used to override the ``open`` builtin.
2267
But it uses O_NOINHERIT flag so the file handle is not inherited by
2268
child processes. Deleting or renaming a closed file opened with this
2269
function is not blocking child processes.
2271
writing = 'w' in mode
2272
appending = 'a' in mode
2273
updating = '+' in mode
2274
binary = 'b' in mode
2277
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2278
# for flags for each modes.
2288
flags |= os.O_WRONLY
2289
flags |= os.O_CREAT | os.O_TRUNC
2294
flags |= os.O_WRONLY
2295
flags |= os.O_CREAT | os.O_APPEND
2300
flags |= os.O_RDONLY
2302
return os.fdopen(os.open(filename, flags), mode, bufsize)
1041
if _cached_user_encoding is None:
1042
_cached_user_encoding = 'ascii'
1043
return _cached_user_encoding