Skip to content

Commit

Permalink
Improve output_if_timeout handling
Browse files Browse the repository at this point in the history
This fixes two issues with the implementation of output_if_timeout:
 1. The raw byte data was sent to the logger. In order to improve
    improve readability, the output is now decoded and stdout and stderr
    is separated by a header/footer.
 2. The parameter handled only the timeout generated by the library,
    not the timeout generated by robot. Now both timeouts write the output
    to the logger, if output_if_timeout=True

Additionally for paramiko, a last read of the channel is done, to fetch
as much of the output available as possible.
  • Loading branch information
MofX committed Mar 30, 2022
1 parent ba6c3ee commit 10fd9d7
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 34 deletions.
35 changes: 30 additions & 5 deletions src/SSHLibrary/abstractclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,18 @@
import ntpath
import fnmatch

from robot.errors import TimeoutError

from .config import (Configuration, IntegerEntry, NewlineEntry, StringEntry,
TimeEntry)
from .logger import logger
from .utils import is_bytes, is_string, unicode, is_list_like

from .utils import is_bytes, is_string, unicode, is_list_like, is_truthy

class SSHClientException(RuntimeError):
pass

class SSHClientTimeoutException(SSHClientException):
pass

class _ClientConfiguration(Configuration):

Expand Down Expand Up @@ -381,11 +384,24 @@ def read_command_output(self, timeout=None, output_during_execution=False, outpu
"""
if timeout:
timeout = float(TimeEntry(timeout).value)
try:
return self._started_commands.pop().read_outputs(timeout, output_during_execution, output_if_timeout)
except IndexError:

if not self._started_commands:
raise SSHClientException('No started commands to read output from.')

command = self._started_commands.pop()
try:
return command.read_outputs(timeout, output_during_execution, output_if_timeout)
except (TimeoutError, SSHClientTimeoutException):
if is_truthy(output_if_timeout):
try:
stdout, stderr = command.read_unfinished_outputs()
if stdout or stderr:
logger.info("---- Stdout ----\n" + stdout + ("-" * 16) + "\n")
logger.info("---- Stderr ----\n" + stderr + ("-" * 16) + "\n")
except NotImplementedError:
pass
raise

def write(self, text, add_newline=False):
"""Writes `text` in the current shell.
Expand Down Expand Up @@ -1351,6 +1367,15 @@ def read_outputs(self):
"""
raise NotImplementedError

def read_unfinished_outputs(self):
"""Returns the output of the last command, that hasn't been read.
This can be used, when the execution is aborted by a test timeout.
:returns: A 2-tuple (stdout, stderr) with values
`stdout` and `stderr` as strings.
"""
return NotImplementedError


class SFTPFileInfo(object):
"""Wrapper class for the language specific file information objects.
Expand Down
82 changes: 53 additions & 29 deletions src/SSHLibrary/pythonclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@

from .abstractclient import (AbstractShell, AbstractSFTPClient,
AbstractSSHClient, AbstractCommand,
SSHClientException, SFTPFileInfo)
SSHClientException, SSHClientTimeoutException,
SFTPFileInfo)
from .pythonforward import LocalPortForwarding
from .utils import is_bytes, is_list_like, is_unicode, is_truthy
from robot.api import logger
Expand Down Expand Up @@ -431,55 +432,78 @@ def _put_file(self, source, destination, mode, newline, path_separator, scp_pres
def _get_file(self, remote_path, local_path, scp_preserve_times=False):
self._scp_client.get(remote_path, local_path, preserve_times=is_truthy(scp_preserve_times))

class OutputBuffers():
def __init__(self, shell):
self._shell = shell
self._stdout_filebuffer = shell.makefile('rb', -1)
self._stderr_filebuffer = shell.makefile_stderr('rb', -1)
self._stdouts = []
self._stderrs = []

def read(self, do_logging=False):
if self._shell.recv_ready():
stdout_output = self._stdout_filebuffer.read(len(self._shell.in_buffer))
if do_logging:
logger.console(stdout_output)
self._stdouts.append(stdout_output)
if self._shell.recv_stderr_ready():
stderr_output = self._stderr_filebuffer.read(len(self._shell.in_stderr_buffer))
if do_logging:
logger.console(stderr_output)
self._stderrs.append(stderr_output)

def serialize(self, encoding):
stdout = (b''.join(self._stdouts) + self._stdout_filebuffer.read()).decode(encoding)
stderr = (b''.join(self._stderrs) + self._stderr_filebuffer.read()).decode(encoding)
return stdout, stderr

class RemoteCommand(AbstractCommand):
_buffers = None

def read_outputs(self, timeout=None, output_during_execution=False, output_if_timeout=False):
stderr, stdout = self._receive_stdout_and_stderr(timeout, output_during_execution, output_if_timeout)
rc = self._shell.recv_exit_status()
self._shell.close()
return stdout, stderr, rc

def read_unfinished_outputs(self):
if self._buffers is None:
return None, None
if not self._shell.closed:
self._buffers.read() # Get remaining unread output from channel

stdout, stderr = self._buffers.serialize(self._encoding)
self._cleanup_buffers()

return stdout, stderr

def _cleanup_buffers(self):
self._buffers = None

def _receive_stdout_and_stderr(self, timeout=None, output_during_execution=False, output_if_timeout=False):
stdout_filebuffer = self._shell.makefile('rb', -1)
stderr_filebuffer = self._shell.makefile_stderr('rb', -1)
stdouts = []
stderrs = []
self._buffers = OutputBuffers(self._shell)
while self._shell_open():
self._flush_stdout_and_stderr(stderr_filebuffer, stderrs, stdout_filebuffer, stdouts, timeout,
output_during_execution, output_if_timeout)
self._flush_stdout_and_stderr(timeout, output_during_execution, output_if_timeout)
time.sleep(0.01) # lets not be so busy
stdout = (b''.join(stdouts) + stdout_filebuffer.read()).decode(self._encoding)
stderr = (b''.join(stderrs) + stderr_filebuffer.read()).decode(self._encoding)

stdout, stderr = self._buffers.serialize(self._encoding)

self._buffers = None

return stderr, stdout

def _flush_stdout_and_stderr(self, stderr_filebuffer, stderrs, stdout_filebuffer, stdouts, timeout=None,
output_during_execution=False, output_if_timeout=False):
def _flush_stdout_and_stderr(self, timeout=None, output_during_execution=False, output_if_timeout=False):
do_logging = is_truthy(output_during_execution)
if timeout:
end_time = time.time() + timeout
while time.time() < end_time:
if self._shell.status_event.wait(0):
break
self._output_logging(stderr_filebuffer, stderrs, stdout_filebuffer, stdouts, output_during_execution)
self._buffers.read(do_logging)
if not self._shell.status_event.isSet():
if is_truthy(output_if_timeout):
logger.info(stdouts)
logger.info(stderrs)
raise SSHClientException('Timed out in %s seconds' % int(timeout))
raise SSHClientTimeoutException('Timed out in %s seconds' % int(timeout))
else:
self._output_logging(stderr_filebuffer, stderrs, stdout_filebuffer, stdouts, output_during_execution)

def _output_logging(self, stderr_filebuffer, stderrs, stdout_filebuffer, stdouts, output_during_execution=False):
if self._shell.recv_ready():
stdout_output = stdout_filebuffer.read(len(self._shell.in_buffer))
if is_truthy(output_during_execution):
logger.console(stdout_output)
stdouts.append(stdout_output)
if self._shell.recv_stderr_ready():
stderr_output = stderr_filebuffer.read(len(self._shell.in_stderr_buffer))
if is_truthy(output_during_execution):
logger.console(stderr_output)
stderrs.append(stderr_output)
self._buffers.read(do_logging)

def _shell_open(self):
return not (self._shell.closed or
Expand Down

0 comments on commit 10fd9d7

Please sign in to comment.