Skip to content

Commit

Permalink
Merge branch 'master' into switch-to-cryptography
Browse files Browse the repository at this point in the history
  • Loading branch information
alex committed Dec 18, 2015
2 parents 0a368f8 + fa0e17f commit 578ae9d
Show file tree
Hide file tree
Showing 6 changed files with 121 additions and 56 deletions.
12 changes: 0 additions & 12 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -65,18 +65,6 @@ Paramiko primarily supports POSIX platforms with standard OpenSSH
implementations, and is most frequently tested on Linux and OS X. Windows is
supported as well, though it may not be as straightforward.

Some Python distributions don't include the UTF-8 string encodings, for
reasons of space (misguided as that is). If your distribution is
missing encodings, you'll see an error like this::

LookupError: no codec search functions registered: can't find encoding

This means you need to copy string encodings over from a working system
(it probably only happens on embedded systems, not normal Python
installs). Valeriy Pogrebitskiy says the best place to look is
``.../lib/python*/encodings/__init__.py``.


Bugs & Support
--------------

Expand Down
64 changes: 53 additions & 11 deletions paramiko/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def __init__(self):

def __del__(self):
self.close()

def __iter__(self):
"""
Returns an iterator that can be used to iterate over the lines in this
Expand Down Expand Up @@ -97,7 +97,7 @@ def next(self):
:raises StopIteration: when the end of the file is reached.
:return: a line (`str`) read from the file.
:returns: a line (`str`) read from the file.
"""
line = self.readline()
if not line:
Expand All @@ -119,6 +119,48 @@ def __next__(self):
raise StopIteration
return line

def readable(self):
"""
Check if the file can be read from.
:returns:
`True` if the file can be read from. If `False`, `read` will raise
an exception.
"""
return (self._flags & self.FLAG_READ) == self.FLAG_READ

def writable(self):
"""
Check if the file can be written to.
:returns:
`True` if the file can be written to. If `False`, `write` will
raise an exception.
"""
return (self._flags & self.FLAG_WRITE) == self.FLAG_WRITE

def seekable(self):
"""
Check if the file supports random access.
:returns:
`True` if the file supports random access. If `False`, `seek` will
raise an exception.
"""
return False

def readinto(self, buff):
"""
Read up to ``len(buff)`` bytes into :class:`bytearray` *buff* and
return the number of bytes read.
:returns:
The number of bytes read.
"""
data = self.read(len(buff))
buff[:len(data)] = data
return len(data)

def read(self, size=None):
"""
Read at most ``size`` bytes from the file (less if we hit the end of the
Expand All @@ -132,7 +174,7 @@ def read(self, size=None):
text data.
:param int size: maximum number of bytes to read
:return:
:returns:
data read from the file (as bytes), or an empty string if EOF was
encountered immediately
"""
Expand All @@ -155,12 +197,12 @@ def read(self, size=None):
result += new_data
self._realpos += len(new_data)
self._pos += len(new_data)
return result
return result
if size <= len(self._rbuffer):
result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:]
self._pos += len(result)
return result
return result
while len(self._rbuffer) < size:
read_size = size - len(self._rbuffer)
if self._flags & self.FLAG_BUFFERED:
Expand All @@ -176,7 +218,7 @@ def read(self, size=None):
result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:]
self._pos += len(result)
return result
return result

def readline(self, size=None):
"""
Expand All @@ -192,7 +234,7 @@ def readline(self, size=None):
characters (``'\\0'``) if they occurred in the input.
:param int size: maximum length of returned string.
:return:
:returns:
next line of the file, or an empty string if the end of the
file has been reached.
Expand Down Expand Up @@ -254,7 +296,7 @@ def readline(self, size=None):
xpos = pos + 1
if (line[pos] == cr_byte_value) and (xpos < len(line)) and (line[xpos] == linefeed_byte_value):
xpos += 1
# if the string was truncated, _rbuffer needs to have the string after
# if the string was truncated, _rbuffer needs to have the string after
# the newline character plus the truncated part of the line we stored
# earlier in _rbuffer
self._rbuffer = line[xpos:] + self._rbuffer if truncated else line[xpos:]
Expand All @@ -277,7 +319,7 @@ def readlines(self, sizehint=None):
after rounding up to an internal buffer size) are read.
:param int sizehint: desired maximum number of bytes to read.
:return: `list` of lines read from the file.
:returns: `list` of lines read from the file.
"""
lines = []
byte_count = 0
Expand All @@ -300,7 +342,7 @@ def seek(self, offset, whence=0):
If a file is opened in append mode (``'a'`` or ``'a+'``), any seek
operations will be undone at the next write (as the file position
will move back to the end of the file).
:param int offset:
position to move to within the file, relative to ``whence``.
:param int whence:
Expand All @@ -317,7 +359,7 @@ def tell(self):
useful if the underlying file doesn't support random access, or was
opened in append mode.
:return: file position (`number <int>` of bytes).
:returns: file position (`number <int>` of bytes).
"""
return self._pos

Expand Down
64 changes: 37 additions & 27 deletions paramiko/sftp_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,13 +64,13 @@ def __init__(self, sftp, handle, mode='r', bufsize=-1):

def __del__(self):
self._close(async=True)

def close(self):
"""
Close the file.
"""
self._close(async=False)

def _close(self, async=False):
# We allow double-close without signaling an error, because real
# Python file objects do. However, we must protect against actually
Expand Down Expand Up @@ -112,7 +112,7 @@ def _data_in_prefetch_requests(self, offset, size):
return True
# well, we have part of the request. see if another chunk has the rest.
return self._data_in_prefetch_requests(buf_offset + buf_size, offset + size - buf_offset - buf_size)

def _data_in_prefetch_buffers(self, offset):
"""
if a block of data is present in the prefetch buffers, at the given
Expand All @@ -129,7 +129,7 @@ def _data_in_prefetch_buffers(self, offset):
# it's not here
return None
return index

def _read_prefetch(self, size):
"""
read data out of the prefetch buffer, if possible. if the data isn't
Expand All @@ -149,7 +149,7 @@ def _read_prefetch(self, size):
return None
prefetch = self._prefetch_data[offset]
del self._prefetch_data[offset]

buf_offset = self._realpos - offset
if buf_offset > 0:
self._prefetch_data[offset] = prefetch[:buf_offset]
Expand All @@ -158,7 +158,7 @@ def _read_prefetch(self, size):
self._prefetch_data[self._realpos + size] = prefetch[size:]
prefetch = prefetch[:size]
return prefetch

def _read(self, size):
size = min(size, self.MAX_REQUEST_SIZE)
if self._prefetching:
Expand Down Expand Up @@ -217,6 +217,16 @@ def setblocking(self, blocking):
"""
self.sftp.sock.setblocking(blocking)

def seekable(self):
"""
Check if the file supports random access.
:return:
`True` if the file supports random access. If `False`,
:meth:`seek` will raise an exception
"""
return True

def seek(self, offset, whence=0):
self.flush()
if whence == self.SEEK_SET:
Expand Down Expand Up @@ -253,7 +263,7 @@ def chmod(self, mode):
attr = SFTPAttributes()
attr.st_mode = mode
self.sftp._request(CMD_FSETSTAT, self.handle, attr)

def chown(self, uid, gid):
"""
Change the owner (``uid``) and group (``gid``) of this file. As with
Expand Down Expand Up @@ -294,38 +304,38 @@ def truncate(self, size):
Change the size of this file. This usually extends
or shrinks the size of the file, just like the ``truncate()`` method on
Python file objects.
:param size: the new size of the file
:type size: int or long
"""
self.sftp._log(DEBUG, 'truncate(%s, %r)' % (hexlify(self.handle), size))
attr = SFTPAttributes()
attr.st_size = size
self.sftp._request(CMD_FSETSTAT, self.handle, attr)

def check(self, hash_algorithm, offset=0, length=0, block_size=0):
"""
Ask the server for a hash of a section of this file. This can be used
to verify a successful upload or download, or for various rsync-like
operations.
The file is hashed from ``offset``, for ``length`` bytes. If ``length``
is 0, the remainder of the file is hashed. Thus, if both ``offset``
and ``length`` are zero, the entire file is hashed.
Normally, ``block_size`` will be 0 (the default), and this method will
return a byte string representing the requested hash (for example, a
string of length 16 for MD5, or 20 for SHA-1). If a non-zero
``block_size`` is given, each chunk of the file (from ``offset`` to
``offset + length``) of ``block_size`` bytes is computed as a separate
hash. The hash results are all concatenated and returned as a single
string.
For example, ``check('sha1', 0, 1024, 512)`` will return a string of
length 40. The first 20 bytes will be the SHA-1 of the first 512 bytes
of the file, and the last 20 bytes will be the SHA-1 of the next 512
bytes.
:param str hash_algorithm:
the name of the hash algorithm to use (normally ``"sha1"`` or
``"md5"``)
Expand All @@ -343,13 +353,13 @@ def check(self, hash_algorithm, offset=0, length=0, block_size=0):
:return:
`str` of bytes representing the hash of each block, concatenated
together
:raises IOError: if the server doesn't support the "check-file"
extension, or possibly doesn't support the hash algorithm
requested
.. note:: Many (most?) servers don't support this extension yet.
.. versionadded:: 1.4
"""
t, msg = self.sftp._request(CMD_EXTENDED, 'check-file', self.handle,
Expand All @@ -358,7 +368,7 @@ def check(self, hash_algorithm, offset=0, length=0, block_size=0):
alg = msg.get_text()
data = msg.get_remainder()
return data

def set_pipelined(self, pipelined=True):
"""
Turn on/off the pipelining of write operations to this file. When
Expand All @@ -368,24 +378,24 @@ def set_pipelined(self, pipelined=True):
server responses are collected. This means that if there was an error
with one of your later writes, an exception might be thrown from within
`.close` instead of `.write`.
By default, files are not pipelined.
:param bool pipelined:
``True`` if pipelining should be turned on for this file; ``False``
otherwise
.. versionadded:: 1.5
"""
self.pipelined = pipelined

def prefetch(self, file_size):
"""
Pre-fetch the remaining contents of this file in anticipation of future
`.read` calls. If reading the entire file, pre-fetching can
dramatically improve the download speed by avoiding roundtrip latency.
The file's contents are incrementally buffered in a background thread.
The prefetched data is stored in a buffer until read via the `.read`
method. Once data has been read, it's removed from the buffer. The
data may be read in a random order (using `.seek`); chunks of the
Expand All @@ -402,20 +412,20 @@ def prefetch(self, file_size):
n += chunk
if len(chunks) > 0:
self._start_prefetch(chunks)

def readv(self, chunks):
"""
Read a set of blocks from the file by (offset, length). This is more
efficient than doing a series of `.seek` and `.read` calls, since the
prefetch machinery is used to retrieve all the requested blocks at
once.
:param chunks:
a list of (offset, length) tuples indicating which sections of the
file to read
:type chunks: list(tuple(long, int))
:return: a list of blocks read, in the same order as in ``chunks``
.. versionadded:: 1.5.4
"""
self.sftp._log(DEBUG, 'readv(%s, %r)' % (hexlify(self.handle), chunks))
Expand Down Expand Up @@ -454,7 +464,7 @@ def _start_prefetch(self, chunks):
t = threading.Thread(target=self._prefetch_thread, args=(chunks,))
t.setDaemon(True)
t.start()

def _prefetch_thread(self, chunks):
# do these read requests in a temporary thread because there may be
# a lot of them, so it may block.
Expand All @@ -480,7 +490,7 @@ def _async_response(self, t, msg, num):
del self._prefetch_extents[num]
if len(self._prefetch_extents) == 0:
self._prefetch_done = True

def _check_exception(self):
"""if there's a saved exception, raise & clear it"""
if self._saved_exception is not None:
Expand Down
Loading

0 comments on commit 578ae9d

Please sign in to comment.