17
18
"""Implementation of Transport over SFTP, using paramiko."""
32
33
from bzrlib.errors import (FileExists,
33
TransportNotPossible, NoSuchFile, PathNotChild,
34
NoSuchFile, PathNotChild,
36
from bzrlib.config import config_dir, ensure_config_dir_exists
37
from bzrlib.trace import mutter, warning, error
38
from bzrlib.transport import Transport, register_transport
39
from bzrlib.osutils import pathjoin, fancy_rename
41
from bzrlib.osutils import pathjoin, fancy_rename, getcwd
42
from bzrlib.trace import mutter, warning
43
from bzrlib.transport import (
44
register_urlparse_netloc_protocol,
50
import bzrlib.urlutils as urlutils
45
error('The SFTP transport requires paramiko.')
54
except ImportError, e:
55
raise ParamikoNotPresent(e)
48
57
from paramiko.sftp import (SFTP_FLAG_WRITE, SFTP_FLAG_CREATE,
49
58
SFTP_FLAG_EXCL, SFTP_FLAG_TRUNC,
50
59
CMD_HANDLE, CMD_OPEN)
51
60
from paramiko.sftp_attr import SFTPAttributes
52
61
from paramiko.sftp_file import SFTPFile
53
from paramiko.sftp_client import SFTPClient
55
if 'sftp' not in urlparse.uses_netloc: urlparse.uses_netloc.append('sftp')
59
if sys.platform == 'win32':
60
# close_fds not supported on win32
64
def _get_ssh_vendor():
65
"""Find out what version of SSH is on the system."""
67
if _ssh_vendor is not None:
73
p = subprocess.Popen(['ssh', '-V'],
75
stdin=subprocess.PIPE,
76
stdout=subprocess.PIPE,
77
stderr=subprocess.PIPE)
78
returncode = p.returncode
79
stdout, stderr = p.communicate()
83
if 'OpenSSH' in stderr:
84
mutter('ssh implementation is OpenSSH')
85
_ssh_vendor = 'openssh'
86
elif 'SSH Secure Shell' in stderr:
87
mutter('ssh implementation is SSH Corp.')
90
if _ssh_vendor != 'none':
93
# XXX: 20051123 jamesh
94
# A check for putty's plink or lsh would go here.
96
mutter('falling back to paramiko implementation')
100
class SFTPSubprocess:
101
"""A socket-like object that talks to an ssh subprocess via pipes."""
102
def __init__(self, hostname, port=None, user=None):
103
vendor = _get_ssh_vendor()
104
assert vendor in ['openssh', 'ssh']
105
if vendor == 'openssh':
107
'-oForwardX11=no', '-oForwardAgent=no',
108
'-oClearAllForwardings=yes', '-oProtocol=2',
109
'-oNoHostAuthenticationForLocalhost=yes']
111
args.extend(['-p', str(port)])
113
args.extend(['-l', user])
114
args.extend(['-s', hostname, 'sftp'])
115
elif vendor == 'ssh':
118
args.extend(['-p', str(port)])
120
args.extend(['-l', user])
121
args.extend(['-s', 'sftp', hostname])
123
self.proc = subprocess.Popen(args, close_fds=_close_fds,
124
stdin=subprocess.PIPE,
125
stdout=subprocess.PIPE)
127
def send(self, data):
128
return os.write(self.proc.stdin.fileno(), data)
130
def recv_ready(self):
131
# TODO: jam 20051215 this function is necessary to support the
132
# pipelined() function. In reality, it probably should use
133
# poll() or select() to actually return if there is data
134
# available, otherwise we probably don't get any benefit
137
def recv(self, count):
138
return os.read(self.proc.stdout.fileno(), count)
141
self.proc.stdin.close()
142
self.proc.stdout.close()
64
register_urlparse_netloc_protocol('sftp')
149
67
# This is a weakref dictionary, so that we can reuse connections
150
68
# that are still active. Long term, it might be nice to have some
305
223
extra = ': ' + ', '.join(error)
306
224
raise PathNotChild(abspath, self.base, extra=extra)
307
225
pl = len(self._path)
308
return path[pl:].lstrip('/')
226
return path[pl:].strip('/')
310
228
def has(self, relpath):
312
230
Does the target location exist?
315
self._sftp.stat(self._abspath(relpath))
233
self._sftp.stat(self._remote_path(relpath))
320
def get(self, relpath, decode=False):
238
def get(self, relpath):
322
240
Get the file at the given relative path.
324
242
:param relpath: The relative path to the file
327
path = self._abspath(relpath)
245
path = self._remote_path(relpath)
328
246
f = self._sftp.file(path, mode='rb')
329
if self._do_prefetch and hasattr(f, 'prefetch'):
247
if self._do_prefetch and (getattr(f, 'prefetch', None) is not None):
332
250
except (IOError, paramiko.SSHException), e:
333
251
self._translate_io_exception(e, path, ': error retrieving')
335
def get_partial(self, relpath, start, length=None):
337
Get just part of a file.
339
:param relpath: Path to the file, relative to base
340
:param start: The starting position to read from
341
:param length: The length to read. A length of None indicates
342
read to the end of the file.
343
:return: A file-like object containing at least the specified bytes.
344
Some implementations may return objects which can be read
345
past this length, but this is not guaranteed.
347
# TODO: implement get_partial_multi to help with knit support
348
f = self.get(relpath)
350
if self._do_prefetch and hasattr(f, 'prefetch'):
354
def put(self, relpath, f, mode=None):
356
Copy the file-like or string object into the location.
253
def readv(self, relpath, offsets):
254
"""See Transport.readv()"""
255
# We overload the default readv() because we want to use a file
256
# that does not have prefetch enabled.
257
# Also, if we have a new paramiko, it implements an async readv()
262
path = self._remote_path(relpath)
263
fp = self._sftp.file(path, mode='rb')
264
readv = getattr(fp, 'readv', None)
266
return self._sftp_readv(fp, offsets)
267
mutter('seek and read %s offsets', len(offsets))
268
return self._seek_and_read(fp, offsets)
269
except (IOError, paramiko.SSHException), e:
270
self._translate_io_exception(e, path, ': error retrieving')
272
def _sftp_readv(self, fp, offsets):
273
"""Use the readv() member of fp to do async readv.
275
And then read them using paramiko.readv(). paramiko.readv()
276
does not support ranges > 64K, so it caps the request size, and
277
just reads until it gets all the stuff it wants
279
offsets = list(offsets)
280
sorted_offsets = sorted(offsets)
282
# The algorithm works as follows:
283
# 1) Coalesce nearby reads into a single chunk
284
# This generates a list of combined regions, the total size
285
# and the size of the sub regions. This coalescing step is limited
286
# in the number of nearby chunks to combine, and is allowed to
287
# skip small breaks in the requests. Limiting it makes sure that
288
# we can start yielding some data earlier, and skipping means we
289
# make fewer requests. (Beneficial even when using async)
290
# 2) Break up this combined regions into chunks that are smaller
291
# than 64KiB. Technically the limit is 65536, but we are a
292
# little bit conservative. This is because sftp has a maximum
293
# return chunk size of 64KiB (max size of an unsigned short)
294
# 3) Issue a readv() to paramiko to create an async request for
296
# 4) Read in the data as it comes back, until we've read one
297
# continuous section as determined in step 1
298
# 5) Break up the full sections into hunks for the original requested
299
# offsets. And put them in a cache
300
# 6) Check if the next request is in the cache, and if it is, remove
301
# it from the cache, and yield its data. Continue until no more
302
# entries are in the cache.
303
# 7) loop back to step 4 until all data has been read
305
# TODO: jam 20060725 This could be optimized one step further, by
306
# attempting to yield whatever data we have read, even before
307
# the first coallesced section has been fully processed.
309
# When coalescing for use with readv(), we don't really need to
310
# use any fudge factor, because the requests are made asynchronously
311
coalesced = list(self._coalesce_offsets(sorted_offsets,
312
limit=self._max_readv_combine,
316
for c_offset in coalesced:
317
start = c_offset.start
318
size = c_offset.length
320
# We need to break this up into multiple requests
322
next_size = min(size, self._max_request_size)
323
requests.append((start, next_size))
327
mutter('SFTP.readv() %s offsets => %s coalesced => %s requests',
328
len(offsets), len(coalesced), len(requests))
330
# Queue the current read until we have read the full coalesced section
333
cur_coalesced_stack = iter(coalesced)
334
cur_coalesced = cur_coalesced_stack.next()
336
# Cache the results, but only until they have been fulfilled
338
# turn the list of offsets into a stack
339
offset_stack = iter(offsets)
340
cur_offset_and_size = offset_stack.next()
342
for data in fp.readv(requests):
344
cur_data_len += len(data)
346
if cur_data_len < cur_coalesced.length:
348
assert cur_data_len == cur_coalesced.length, \
349
"Somehow we read too much: %s != %s" % (cur_data_len,
350
cur_coalesced.length)
351
all_data = ''.join(cur_data)
355
for suboffset, subsize in cur_coalesced.ranges:
356
key = (cur_coalesced.start+suboffset, subsize)
357
data_map[key] = all_data[suboffset:suboffset+subsize]
359
# Now that we've read some data, see if we can yield anything back
360
while cur_offset_and_size in data_map:
361
this_data = data_map.pop(cur_offset_and_size)
362
yield cur_offset_and_size[0], this_data
363
cur_offset_and_size = offset_stack.next()
365
# Now that we've read all of the data for this coalesced section
367
cur_coalesced = cur_coalesced_stack.next()
369
def put_file(self, relpath, f, mode=None):
371
Copy the file-like object into the location.
358
373
:param relpath: Location to put the contents, relative to base.
359
:param f: File-like or string object.
374
:param f: File-like object.
360
375
:param mode: The final mode for the file
362
final_path = self._abspath(relpath)
377
final_path = self._remote_path(relpath)
363
378
self._put(final_path, f, mode=mode)
365
380
def _put(self, abspath, f, mode=None):
397
425
# raise the original with its traceback if we can.
428
def _put_non_atomic_helper(self, relpath, writer, mode=None,
429
create_parent_dir=False,
431
abspath = self._remote_path(relpath)
433
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
434
# set the file mode at create time. If it does, use it.
435
# But for now, we just chmod later anyway.
437
def _open_and_write_file():
438
"""Try to open the target file, raise error on failure"""
442
fout = self._sftp.file(abspath, mode='wb')
443
fout.set_pipelined(True)
445
except (paramiko.SSHException, IOError), e:
446
self._translate_io_exception(e, abspath,
449
# This is designed to chmod() right before we close.
450
# Because we set_pipelined() earlier, theoretically we might
451
# avoid the round trip for fout.close()
453
self._sftp.chmod(abspath, mode)
458
if not create_parent_dir:
459
_open_and_write_file()
462
# Try error handling to create the parent directory if we need to
464
_open_and_write_file()
466
# Try to create the parent directory, and then go back to
468
parent_dir = os.path.dirname(abspath)
469
self._mkdir(parent_dir, dir_mode)
470
_open_and_write_file()
472
def put_file_non_atomic(self, relpath, f, mode=None,
473
create_parent_dir=False,
475
"""Copy the file-like object into the target location.
477
This function is not strictly safe to use. It is only meant to
478
be used when you already know that the target does not exist.
479
It is not safe, because it will open and truncate the remote
480
file. So there may be a time when the file has invalid contents.
482
:param relpath: The remote location to put the contents.
483
:param f: File-like object.
484
:param mode: Possible access permissions for new file.
485
None means do not set remote permissions.
486
:param create_parent_dir: If we cannot create the target file because
487
the parent directory does not exist, go ahead and
488
create it, and then try again.
492
self._put_non_atomic_helper(relpath, writer, mode=mode,
493
create_parent_dir=create_parent_dir,
496
def put_bytes_non_atomic(self, relpath, bytes, mode=None,
497
create_parent_dir=False,
501
self._put_non_atomic_helper(relpath, writer, mode=mode,
502
create_parent_dir=create_parent_dir,
400
505
def iter_files_recursive(self):
401
506
"""Walk the relative paths of all files in this transport."""
402
507
queue = list(self.list_dir('.'))
404
relpath = urllib.quote(queue.pop(0))
509
relpath = queue.pop(0)
405
510
st = self.stat(relpath)
406
511
if stat.S_ISDIR(st.st_mode):
407
512
for i, basename in enumerate(self.list_dir(relpath)):
454
561
mutter('Raising exception with errno %s', e.errno)
457
def append(self, relpath, f):
564
def append_file(self, relpath, f, mode=None):
459
566
Append the text in the file-like object into the final
463
path = self._abspath(relpath)
570
path = self._remote_path(relpath)
464
571
fout = self._sftp.file(path, 'ab')
573
self._sftp.chmod(path, mode)
465
575
self._pump(f, fout)
466
577
except (IOError, paramiko.SSHException), e:
467
578
self._translate_io_exception(e, relpath, ': unable to append')
469
def copy(self, rel_from, rel_to):
470
"""Copy the item at rel_from to the location at rel_to"""
471
path_from = self._abspath(rel_from)
472
path_to = self._abspath(rel_to)
473
self._copy_abspaths(path_from, path_to)
475
def _copy_abspaths(self, path_from, path_to, mode=None):
476
"""Copy files given an absolute path
478
:param path_from: Path on remote server to read
479
:param path_to: Path on remote server to write
482
TODO: Should the destination location be atomically created?
483
This has not been specified
484
TODO: This should use some sort of remote copy, rather than
485
pulling the data locally, and then writing it remotely
580
def rename(self, rel_from, rel_to):
581
"""Rename without special overwriting"""
488
fin = self._sftp.file(path_from, 'rb')
490
self._put(path_to, fin, mode=mode)
583
self._sftp.rename(self._remote_path(rel_from),
584
self._remote_path(rel_to))
493
585
except (IOError, paramiko.SSHException), e:
494
self._translate_io_exception(e, path_from, ': unable copy to: %r' % path_to)
496
def copy_to(self, relpaths, other, mode=None, pb=None):
497
"""Copy a set of entries from self into another Transport.
499
:param relpaths: A list/generator of entries to be copied.
501
if isinstance(other, SFTPTransport) and other._sftp is self._sftp:
502
# Both from & to are on the same remote filesystem
503
# We can use a remote copy, instead of pulling locally, and pushing
505
total = self._get_total(relpaths)
507
for path in relpaths:
508
path_from = self._abspath(relpath)
509
path_to = other._abspath(relpath)
510
self._update_pb(pb, 'copy-to', count, total)
511
self._copy_abspaths(path_from, path_to, mode=mode)
515
return super(SFTPTransport, self).copy_to(relpaths, other, mode=mode, pb=pb)
517
def _rename(self, abs_from, abs_to):
586
self._translate_io_exception(e, rel_from,
587
': unable to rename to %r' % (rel_to))
589
def _rename_and_overwrite(self, abs_from, abs_to):
518
590
"""Do a fancy rename on the remote server.
520
592
Using the implementation provided by osutils.
589
673
# that we have taken the lock.
590
674
return SFTPLock(relpath, self)
593
676
def _unparse_url(self, path=None):
595
678
path = self._path
596
679
path = urllib.quote(path)
597
if path.startswith('/'):
598
path = '/%2F' + path[1:]
680
# handle homedir paths
681
if not path.startswith('/'):
601
683
netloc = urllib.quote(self._host)
602
684
if self._username is not None:
603
685
netloc = '%s@%s' % (urllib.quote(self._username), netloc)
604
686
if self._port is not None:
605
687
netloc = '%s:%d' % (netloc, self._port)
607
688
return urlparse.urlunparse(('sftp', netloc, path, '', '', ''))
609
690
def _split_url(self, url):
610
if isinstance(url, unicode):
611
url = url.encode('utf-8')
612
(scheme, netloc, path, params,
613
query, fragment) = urlparse.urlparse(url, allow_fragments=False)
691
(scheme, username, password, host, port, path) = split_url(url)
614
692
assert scheme == 'sftp'
615
username = password = host = port = None
617
username, host = netloc.split('@', 1)
619
username, password = username.split(':', 1)
620
password = urllib.unquote(password)
621
username = urllib.unquote(username)
626
host, port = host.rsplit(':', 1)
630
# TODO: Should this be ConnectionError?
631
raise TransportError('%s: invalid port number' % port)
632
host = urllib.unquote(host)
634
path = urllib.unquote(path)
636
694
# the initial slash should be removed from the path, and treated
637
695
# as a homedir relative path (the path begins with a double slash
638
696
# if it is absolute).
639
697
# see draft-ietf-secsh-scp-sftp-ssh-uri-03.txt
640
if path.startswith('/'):
698
# RBC 20060118 we are not using this as its too user hostile. instead
699
# we are following lftp and using /~/foo to mean '~/foo'.
700
# handle homedir paths
701
if path.startswith('/~/'):
643
705
return (username, password, host, port, path)
645
707
def _parse_url(self, url):
654
716
TODO: Raise a more reasonable ConnectionFailed exception
656
global _connected_hosts
658
idx = (self._host, self._port, self._username)
660
self._sftp = _connected_hosts[idx]
665
vendor = _get_ssh_vendor()
667
sock = SFTPSubprocess(self._host, self._port, self._username)
668
self._sftp = SFTPClient(sock)
670
self._paramiko_connect()
672
_connected_hosts[idx] = self._sftp
674
def _paramiko_connect(self):
675
global SYSTEM_HOSTKEYS, BZR_HOSTKEYS
680
t = paramiko.Transport((self._host, self._port or 22))
682
except paramiko.SSHException, e:
683
raise ConnectionError('Unable to reach SSH host %s:%d' %
684
(self._host, self._port), e)
686
server_key = t.get_remote_server_key()
687
server_key_hex = paramiko.util.hexify(server_key.get_fingerprint())
688
keytype = server_key.get_name()
689
if SYSTEM_HOSTKEYS.has_key(self._host) and SYSTEM_HOSTKEYS[self._host].has_key(keytype):
690
our_server_key = SYSTEM_HOSTKEYS[self._host][keytype]
691
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
692
elif BZR_HOSTKEYS.has_key(self._host) and BZR_HOSTKEYS[self._host].has_key(keytype):
693
our_server_key = BZR_HOSTKEYS[self._host][keytype]
694
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
696
warning('Adding %s host key for %s: %s' % (keytype, self._host, server_key_hex))
697
if not BZR_HOSTKEYS.has_key(self._host):
698
BZR_HOSTKEYS[self._host] = {}
699
BZR_HOSTKEYS[self._host][keytype] = server_key
700
our_server_key = server_key
701
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
703
if server_key != our_server_key:
704
filename1 = os.path.expanduser('~/.ssh/known_hosts')
705
filename2 = pathjoin(config_dir(), 'ssh_host_keys')
706
raise TransportError('Host keys for %s do not match! %s != %s' % \
707
(self._host, our_server_key_hex, server_key_hex),
708
['Try editing %s or %s' % (filename1, filename2)])
713
self._sftp = t.open_sftp_client()
714
except paramiko.SSHException, e:
715
raise ConnectionError('Unable to start sftp client %s:%d' %
716
(self._host, self._port), e)
718
def _sftp_auth(self, transport):
719
# paramiko requires a username, but it might be none if nothing was supplied
720
# use the local username, just in case.
721
# We don't override self._username, because if we aren't using paramiko,
722
# the username might be specified in ~/.ssh/config and we don't want to
723
# force it to something else
724
# Also, it would mess up the self.relpath() functionality
725
username = self._username or getpass.getuser()
727
# Paramiko tries to open a socket.AF_UNIX in order to connect
728
# to ssh-agent. That attribute doesn't exist on win32 (it does in cygwin)
729
# so we get an AttributeError exception. For now, just don't try to
730
# connect to an agent if we are on win32
731
if sys.platform != 'win32':
732
agent = paramiko.Agent()
733
for key in agent.get_keys():
734
mutter('Trying SSH agent key %s' % paramiko.util.hexify(key.get_fingerprint()))
736
transport.auth_publickey(username, key)
738
except paramiko.SSHException, e:
741
# okay, try finding id_rsa or id_dss? (posix only)
742
if self._try_pkey_auth(transport, paramiko.RSAKey, username, 'id_rsa'):
744
if self._try_pkey_auth(transport, paramiko.DSSKey, username, 'id_dsa'):
750
transport.auth_password(username, self._password)
752
except paramiko.SSHException, e:
755
# FIXME: Don't keep a password held in memory if you can help it
756
#self._password = None
758
# give up and ask for a password
759
password = bzrlib.ui.ui_factory.get_password(
760
prompt='SSH %(user)s@%(host)s password',
761
user=username, host=self._host)
763
transport.auth_password(username, password)
764
except paramiko.SSHException, e:
765
raise ConnectionError('Unable to authenticate to SSH host as %s@%s' %
766
(username, self._host), e)
768
def _try_pkey_auth(self, transport, pkey_class, username, filename):
769
filename = os.path.expanduser('~/.ssh/' + filename)
771
key = pkey_class.from_private_key_file(filename)
772
transport.auth_publickey(username, key)
774
except paramiko.PasswordRequiredException:
775
password = bzrlib.ui.ui_factory.get_password(
776
prompt='SSH %(filename)s password',
779
key = pkey_class.from_private_key_file(filename, password)
780
transport.auth_publickey(username, key)
782
except paramiko.SSHException:
783
mutter('SSH authentication via %s key failed.' % (os.path.basename(filename),))
784
except paramiko.SSHException:
785
mutter('SSH authentication via %s key failed.' % (os.path.basename(filename),))
718
self._sftp = _sftp_connect(self._host, self._port, self._username,
790
721
def _sftp_open_exclusive(self, abspath, mode=None):
791
722
"""Open a remote path exclusively.
817
753
self._translate_io_exception(e, abspath, ': unable to open',
818
754
failure_exc=FileExists)
756
def _can_roundtrip_unix_modebits(self):
757
if sys.platform == 'win32':
763
# ------------- server test implementation --------------
766
from bzrlib.tests.stub_sftp import StubServer, StubSFTPServer
768
STUB_SERVER_KEY = """
769
-----BEGIN RSA PRIVATE KEY-----
770
MIICWgIBAAKBgQDTj1bqB4WmayWNPB+8jVSYpZYk80Ujvj680pOTh2bORBjbIAyz
771
oWGW+GUjzKxTiiPvVmxFgx5wdsFvF03v34lEVVhMpouqPAYQ15N37K/ir5XY+9m/
772
d8ufMCkjeXsQkKqFbAlQcnWMCRnOoPHS3I4vi6hmnDDeeYTSRvfLbW0fhwIBIwKB
773
gBIiOqZYaoqbeD9OS9z2K9KR2atlTxGxOJPXiP4ESqP3NVScWNwyZ3NXHpyrJLa0
774
EbVtzsQhLn6rF+TzXnOlcipFvjsem3iYzCpuChfGQ6SovTcOjHV9z+hnpXvQ/fon
775
soVRZY65wKnF7IAoUwTmJS9opqgrN6kRgCd3DASAMd1bAkEA96SBVWFt/fJBNJ9H
776
tYnBKZGw0VeHOYmVYbvMSstssn8un+pQpUm9vlG/bp7Oxd/m+b9KWEh2xPfv6zqU
777
avNwHwJBANqzGZa/EpzF4J8pGti7oIAPUIDGMtfIcmqNXVMckrmzQ2vTfqtkEZsA
778
4rE1IERRyiJQx6EJsz21wJmGV9WJQ5kCQQDwkS0uXqVdFzgHO6S++tjmjYcxwr3g
779
H0CoFYSgbddOT6miqRskOQF3DZVkJT3kyuBgU2zKygz52ukQZMqxCb1fAkASvuTv
780
qfpH87Qq5kQhNKdbbwbmd2NxlNabazPijWuphGTdW0VfJdWfklyS2Kr+iqrs/5wV
781
HhathJt636Eg7oIjAkA8ht3MQ+XSl9yIJIS8gVpbPxSw5OMfw0PjVE7tBdQruiSc
782
nvuQES5C9BMHjF39LZiGH1iLQy7FgdHyoP+eodI7
783
-----END RSA PRIVATE KEY-----
787
class SocketListener(threading.Thread):
789
def __init__(self, callback):
790
threading.Thread.__init__(self)
791
self._callback = callback
792
self._socket = socket.socket()
793
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
794
self._socket.bind(('localhost', 0))
795
self._socket.listen(1)
796
self.port = self._socket.getsockname()[1]
797
self._stop_event = threading.Event()
800
# called from outside this thread
801
self._stop_event.set()
802
# use a timeout here, because if the test fails, the server thread may
803
# never notice the stop_event.
809
readable, writable_unused, exception_unused = \
810
select.select([self._socket], [], [], 0.1)
811
if self._stop_event.isSet():
813
if len(readable) == 0:
816
s, addr_unused = self._socket.accept()
817
# because the loopback socket is inline, and transports are
818
# never explicitly closed, best to launch a new thread.
819
threading.Thread(target=self._callback, args=(s,)).start()
820
except socket.error, x:
821
sys.excepthook(*sys.exc_info())
822
warning('Socket error during accept() within unit test server'
825
# probably a failed test; unit test thread will log the
827
sys.excepthook(*sys.exc_info())
828
warning('Exception from within unit test server thread: %r' %
832
class SocketDelay(object):
833
"""A socket decorator to make TCP appear slower.
835
This changes recv, send, and sendall to add a fixed latency to each python
836
call if a new roundtrip is detected. That is, when a recv is called and the
837
flag new_roundtrip is set, latency is charged. Every send and send_all
840
In addition every send, sendall and recv sleeps a bit per character send to
843
Not all methods are implemented, this is deliberate as this class is not a
844
replacement for the builtin sockets layer. fileno is not implemented to
845
prevent the proxy being bypassed.
849
_proxied_arguments = dict.fromkeys([
850
"close", "getpeername", "getsockname", "getsockopt", "gettimeout",
851
"setblocking", "setsockopt", "settimeout", "shutdown"])
853
def __init__(self, sock, latency, bandwidth=1.0,
856
:param bandwith: simulated bandwith (MegaBit)
857
:param really_sleep: If set to false, the SocketDelay will just
858
increase a counter, instead of calling time.sleep. This is useful for
859
unittesting the SocketDelay.
862
self.latency = latency
863
self.really_sleep = really_sleep
864
self.time_per_byte = 1 / (bandwidth / 8.0 * 1024 * 1024)
865
self.new_roundtrip = False
868
if self.really_sleep:
871
SocketDelay.simulated_time += s
873
def __getattr__(self, attr):
874
if attr in SocketDelay._proxied_arguments:
875
return getattr(self.sock, attr)
876
raise AttributeError("'SocketDelay' object has no attribute %r" %
880
return SocketDelay(self.sock.dup(), self.latency, self.time_per_byte,
883
def recv(self, *args):
884
data = self.sock.recv(*args)
885
if data and self.new_roundtrip:
886
self.new_roundtrip = False
887
self.sleep(self.latency)
888
self.sleep(len(data) * self.time_per_byte)
891
def sendall(self, data, flags=0):
892
if not self.new_roundtrip:
893
self.new_roundtrip = True
894
self.sleep(self.latency)
895
self.sleep(len(data) * self.time_per_byte)
896
return self.sock.sendall(data, flags)
898
def send(self, data, flags=0):
899
if not self.new_roundtrip:
900
self.new_roundtrip = True
901
self.sleep(self.latency)
902
bytes_sent = self.sock.send(data, flags)
903
self.sleep(bytes_sent * self.time_per_byte)
907
class SFTPServer(Server):
908
"""Common code for SFTP server facilities."""
911
self._original_vendor = None
913
self._server_homedir = None
914
self._listener = None
916
self._vendor = ssh.ParamikoVendor()
921
def _get_sftp_url(self, path):
922
"""Calculate an sftp url to this server for path."""
923
return 'sftp://foo:bar@localhost:%d/%s' % (self._listener.port, path)
925
def log(self, message):
926
"""StubServer uses this to log when a new server is created."""
927
self.logs.append(message)
929
def _run_server_entry(self, sock):
930
"""Entry point for all implementations of _run_server.
932
If self.add_latency is > 0.000001 then sock is given a latency adding
935
if self.add_latency > 0.000001:
936
sock = SocketDelay(sock, self.add_latency)
937
return self._run_server(sock)
939
def _run_server(self, s):
940
ssh_server = paramiko.Transport(s)
941
key_file = pathjoin(self._homedir, 'test_rsa.key')
942
f = open(key_file, 'w')
943
f.write(STUB_SERVER_KEY)
945
host_key = paramiko.RSAKey.from_private_key_file(key_file)
946
ssh_server.add_server_key(host_key)
947
server = StubServer(self)
948
ssh_server.set_subsystem_handler('sftp', paramiko.SFTPServer,
949
StubSFTPServer, root=self._root,
950
home=self._server_homedir)
951
event = threading.Event()
952
ssh_server.start_server(event, server)
956
self._original_vendor = ssh._ssh_vendor
957
ssh._ssh_vendor = self._vendor
958
if sys.platform == 'win32':
959
# Win32 needs to use the UNICODE api
960
self._homedir = getcwd()
962
# But Linux SFTP servers should just deal in bytestreams
963
self._homedir = os.getcwd()
964
if self._server_homedir is None:
965
self._server_homedir = self._homedir
967
if sys.platform == 'win32':
969
self._listener = SocketListener(self._run_server_entry)
970
self._listener.setDaemon(True)
971
self._listener.start()
974
"""See bzrlib.transport.Server.tearDown."""
975
self._listener.stop()
976
ssh._ssh_vendor = self._original_vendor
978
def get_bogus_url(self):
979
"""See bzrlib.transport.Server.get_bogus_url."""
980
# this is chosen to try to prevent trouble with proxies, wierd dns, etc
981
# we bind a random socket, so that we get a guaranteed unused port
982
# we just never listen on that port
984
s.bind(('localhost', 0))
985
return 'sftp://%s:%s/' % s.getsockname()
988
class SFTPFullAbsoluteServer(SFTPServer):
989
"""A test server for sftp transports, using absolute urls and ssh."""
992
"""See bzrlib.transport.Server.get_url."""
993
return self._get_sftp_url(urlutils.escape(self._homedir[1:]))
996
class SFTPServerWithoutSSH(SFTPServer):
997
"""An SFTP server that uses a simple TCP socket pair rather than SSH."""
1000
super(SFTPServerWithoutSSH, self).__init__()
1001
self._vendor = ssh.LoopbackVendor()
1003
def _run_server(self, sock):
1004
# Re-import these as locals, so that they're still accessible during
1005
# interpreter shutdown (when all module globals get set to None, leading
1006
# to confusing errors like "'NoneType' object has no attribute 'error'".
1007
import socket, errno
1008
class FakeChannel(object):
1009
def get_transport(self):
1011
def get_log_channel(self):
1015
def get_hexdump(self):
1020
server = paramiko.SFTPServer(FakeChannel(), 'sftp', StubServer(self), StubSFTPServer,
1021
root=self._root, home=self._server_homedir)
1023
server.start_subsystem('sftp', None, sock)
1024
except socket.error, e:
1025
if (len(e.args) > 0) and (e.args[0] == errno.EPIPE):
1026
# it's okay for the client to disconnect abruptly
1027
# (bug in paramiko 1.6: it should absorb this exception)
1031
except Exception, e:
1032
import sys; sys.stderr.write('\nEXCEPTION %r\n\n' % e.__class__)
1033
server.finish_subsystem()
1036
class SFTPAbsoluteServer(SFTPServerWithoutSSH):
1037
"""A test server for sftp transports, using absolute urls."""
1040
"""See bzrlib.transport.Server.get_url."""
1041
if sys.platform == 'win32':
1042
return self._get_sftp_url(urlutils.escape(self._homedir))
1044
return self._get_sftp_url(urlutils.escape(self._homedir[1:]))
1047
class SFTPHomeDirServer(SFTPServerWithoutSSH):
1048
"""A test server for sftp transports, using homedir relative urls."""
1051
"""See bzrlib.transport.Server.get_url."""
1052
return self._get_sftp_url("~/")
1055
class SFTPSiblingAbsoluteServer(SFTPAbsoluteServer):
1056
"""A test servere for sftp transports, using absolute urls to non-home."""
1059
self._server_homedir = '/dev/noone/runs/tests/here'
1060
super(SFTPSiblingAbsoluteServer, self).setUp()
1063
def _sftp_connect(host, port, username, password):
1064
"""Connect to the remote sftp server.
1066
:raises: a TransportError 'could not connect'.
1068
:returns: an paramiko.sftp_client.SFTPClient
1070
TODO: Raise a more reasonable ConnectionFailed exception
1072
idx = (host, port, username)
1074
return _connected_hosts[idx]
1078
sftp = _sftp_connect_uncached(host, port, username, password)
1079
_connected_hosts[idx] = sftp
1082
def _sftp_connect_uncached(host, port, username, password):
1083
vendor = ssh._get_ssh_vendor()
1084
sftp = vendor.connect_sftp(username, password, host, port)
1088
def get_test_permutations():
1089
"""Return the permutations to be used in testing."""
1090
return [(SFTPTransport, SFTPAbsoluteServer),
1091
(SFTPTransport, SFTPHomeDirServer),
1092
(SFTPTransport, SFTPSiblingAbsoluteServer),