GUI acceptance tests using environment deployed from packages.

Build: #1488 failed

Job: Onezone basic failed

user sees that after unsupporting space number displayed in space counter for given provider decreases[1oz 1op deployed]: Test case result

The below summarizes the result of the test " user sees that after unsupporting space number displayed in space counter for given provider decreases[1oz 1op deployed]" in build 1,488 of Onedata Products - gui acceptance pkg - Chrome onezone basic tests.
Description
user sees that after unsupporting space number displayed in space counter for given provider decreases[1oz 1op deployed]
Test class
gui.scenarios.test_onezone_basic
Method
test_user_sees_that_after_unsupporting_space_number_displayed_in_space_counter_for_given_provider_decreases[1oz_1op_deployed]
Duration
28 secs
Status
Failed (New Failure)

Error Log

requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='dev-oneprovider-krakow.default.svc.cluster.local', port=443): Read timed out. (read timeout=20)
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
conn = <urllib3.connection.VerifiedHTTPSConnection object at 0x7f908eeaebe0>
method = 'GET', url = '/api/v3/oneprovider/spaces'
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fc18>
chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.22.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*'...hNmRiOWNoM2ExNwowMDFhY2lkIHRpbWUgPCAxNjUyNTYwOTgyCjAwMmZzaWduYXR1cmUg1HJqwvUGTh467xI00y02p202myEsRpcj9sSElbwiTxXWyoK'}}
timeout_obj = <urllib3.util.timeout.Timeout object at 0x7f908eeae320>
read_timeout = 20

    def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
                      **httplib_request_kw):
        """
            Perform a request on a given urllib connection object taken from our
            pool.
    
            :param conn:
                a connection from one of our connection pools
    
            :param timeout:
                Socket timeout in seconds for the request. This can be a
                float or integer, which will set the same timeout value for
                the socket connect and the socket read, or an instance of
                :class:`urllib3.util.Timeout`, which gives you more fine-grained
                control over your timeouts.
            """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls httplib.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        if chunked:
            conn.request_chunked(method, url, **httplib_request_kw)
        else:
            conn.request(method, url, **httplib_request_kw)
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, 'sock', None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:  # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:  # Python 2.6 and older, Python 3
                try:
                    httplib_response = conn.getresponse()
                except Exception as e:
                    # Remove the TypeError from the exception chain in Python 3;
                    # otherwise it looks like a programming error was the cause.
>                   six.raise_from(e, None)

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:384: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

value = timeout('The read operation timed out',), from_value = None

>   ???

<string>:2: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
conn = <urllib3.connection.VerifiedHTTPSConnection object at 0x7f908eeaebe0>
method = 'GET', url = '/api/v3/oneprovider/spaces'
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fc18>
chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.22.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*'...hNmRiOWNoM2ExNwowMDFhY2lkIHRpbWUgPCAxNjUyNTYwOTgyCjAwMmZzaWduYXR1cmUg1HJqwvUGTh467xI00y02p202myEsRpcj9sSElbwiTxXWyoK'}}
timeout_obj = <urllib3.util.timeout.Timeout object at 0x7f908eeae320>
read_timeout = 20

    def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
                      **httplib_request_kw):
        """
            Perform a request on a given urllib connection object taken from our
            pool.
    
            :param conn:
                a connection from one of our connection pools
    
            :param timeout:
                Socket timeout in seconds for the request. This can be a
                float or integer, which will set the same timeout value for
                the socket connect and the socket read, or an instance of
                :class:`urllib3.util.Timeout`, which gives you more fine-grained
                control over your timeouts.
            """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls httplib.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        if chunked:
            conn.request_chunked(method, url, **httplib_request_kw)
        else:
            conn.request(method, url, **httplib_request_kw)
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, 'sock', None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:  # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:  # Python 2.6 and older, Python 3
                try:
>                   httplib_response = conn.getresponse()

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:380: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connection.VerifiedHTTPSConnection object at 0x7f908eeaebe0>

    def getresponse(self):
        """Get the response from the server.
    
            If the HTTPConnection is in the correct state, returns an
            instance of HTTPResponse or of whatever object is returned by
            the response_class variable.
    
            If a request has not been sent or if a previous response has
            not be handled, ResponseNotReady is raised.  If the HTTP
            response indicates that the connection should be closed, then
            it will be closed before the response is returned.  When the
            connection is closed, the underlying socket is closed.
            """
    
        # if a prior response has been completed, then forget about it.
        if self.__response and self.__response.isclosed():
            self.__response = None
    
        # if a prior response exists, then it must be completed (otherwise, we
        # cannot read this response's header to determine the connection-close
        # behavior)
        #
        # note: if a prior response existed, but was connection-close, then the
        # socket and response were made independent of this HTTPConnection
        # object since a new request requires that we open a whole new
        # connection
        #
        # this means the prior response had one of two states:
        #   1) will_close: this connection was reset and the prior socket and
        #                  response operate independently
        #   2) persistent: the response was retained and we await its
        #                  isclosed() status to become true.
        #
        if self.__state != _CS_REQ_SENT or self.__response:
            raise ResponseNotReady(self.__state)
    
        if self.debuglevel > 0:
            response = self.response_class(self.sock, self.debuglevel,
                                           method=self._method)
        else:
            response = self.response_class(self.sock, method=self._method)
    
        try:
            try:
>               response.begin()

/usr/lib/python3.6/http/client.py:1331: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7f908eeae828>

    def begin(self):
        if self.headers is not None:
            # we've already started reading the response
            return
    
        # read until we get a non-100 response
        while True:
>           version, status, reason = self._read_status()

/usr/lib/python3.6/http/client.py:297: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7f908eeae828>

    def _read_status(self):
>       line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")

/usr/lib/python3.6/http/client.py:258: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <socket.SocketIO object at 0x7f908eeae208>
b = <memory at 0x7f908f1277c8>

    def readinto(self, b):
        """Read up to len(b) bytes into the writable buffer *b* and return
            the number of bytes read.  If the socket is non-blocking and no bytes
            are available, None is returned.
    
            If *b* is non-empty, a 0 return value indicates that the connection
            was shutdown at the other end.
            """
        self._checkClosed()
        self._checkReadable()
        if self._timeout_occurred:
            raise OSError("cannot read from timed out object")
        while True:
            try:
>               return self._sock.recv_into(b)

/usr/lib/python3.6/socket.py:586: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=2049, proto=6>
buffer = <memory at 0x7f908f1277c8>, nbytes = 8192, flags = 0

    def recv_into(self, buffer, nbytes=None, flags=0):
        self._checkClosed()
        if buffer and (nbytes is None):
            nbytes = len(buffer)
        elif nbytes is None:
            nbytes = 1024
        if self._sslobj:
            if flags != 0:
                raise ValueError(
                  "non-zero flags not allowed in calls to recv_into() on %s" %
                  self.__class__)
>           return self.read(nbytes, buffer)

/usr/lib/python3.6/ssl.py:1012: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=2049, proto=6>
len = 8192, buffer = <memory at 0x7f908f1277c8>

    def read(self, len=1024, buffer=None):
        """Read up to LEN bytes and return them.
            Return zero-length string on EOF."""
    
        self._checkClosed()
        if not self._sslobj:
            raise ValueError("Read on closed or unwrapped SSL socket.")
        try:
>           return self._sslobj.read(len, buffer)

/usr/lib/python3.6/ssl.py:874: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLObject object at 0x7f908eeae278>, len = 8192
buffer = <memory at 0x7f908f1277c8>

    def read(self, len=1024, buffer=None):
        """Read up to 'len' bytes from the SSL object and return them.
    
            If 'buffer' is provided, read into this buffer and return the number of
            bytes read.
            """
        if buffer is not None:
>           v = self._sslobj.read(len, buffer)
E           socket.timeout: The read operation timed out

/usr/lib/python3.6/ssl.py:631: timeout

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7f908ef2f710>
request = <PreparedRequest [GET]>, stream = False
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fa20>
verify = False, cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
            :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
            :param stream: (optional) Whether to stream the request content.
            :param timeout: (optional) How long to wait for the server to send
                data before giving up, as a float, or a :ref:`(connect timeout,
                read timeout) <timeouts>` tuple.
            :type timeout: float or tuple or urllib3 Timeout object
            :param verify: (optional) Either a boolean, in which case it controls whether
                we verify the server's TLS certificate, or a string, in which case it
                must be a path to a CA bundle to use
            :param cert: (optional) Any user-provided SSL certificate to be trusted.
            :param proxies: (optional) The proxies dictionary to apply to the request.
            :rtype: requests.Response
            """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
>                   timeout=timeout
                )

/usr/local/lib/python3.6/dist-packages/requests/adapters.py:449: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
method = 'GET', url = '/api/v3/oneprovider/spaces', body = None
headers = {'User-Agent': 'python-requests/2.22.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...FhNmRiOWNoM2ExNwowMDFhY2lkIHRpbWUgPCAxNjUyNTYwOTgyCjAwMmZzaWduYXR1cmUg1HJqwvUGTh467xI00y02p202myEsRpcj9sSElbwiTxXWyoK'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fa20>
pool_timeout = None, release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}, conn = None
release_this_conn = True, err = None, clean_exit = False
timeout_obj = <urllib3.util.timeout.Timeout object at 0x7f908ef2fc18>
is_new_proxy_conn = False

    def urlopen(self, method, url, body=None, headers=None, retries=None,
                redirect=True, assert_same_host=True, timeout=_Default,
                pool_timeout=None, release_conn=None, chunked=False,
                body_pos=None, **response_kw):
        """
            Get a connection from the pool and perform an HTTP request. This is the
            lowest level call for making a request, so you'll need to specify all
            the raw details.
    
            .. note::
    
               More commonly, it's appropriate to use a convenience method provided
               by :class:`.RequestMethods`, such as :meth:`request`.
    
            .. note::
    
               `release_conn` will only behave as expected if
               `preload_content=False` because we want to make
               `preload_content=False` the default behaviour someday soon without
               breaking backwards compatibility.
    
            :param method:
                HTTP request method (such as GET, POST, PUT, etc.)
    
            :param body:
                Data to send in the request body (useful for creating
                POST requests, see HTTPConnectionPool.post_url for
                more convenience).
    
            :param headers:
                Dictionary of custom headers to send, such as User-Agent,
                If-None-Match, etc. If None, pool headers are used. If provided,
                these headers completely replace any pool-specific headers.
    
            :param retries:
                Configure the number of retries to allow before raising a
                :class:`~urllib3.exceptions.MaxRetryError` exception.
    
                Pass ``None`` to retry until you receive a response. Pass a
                :class:`~urllib3.util.retry.Retry` object for fine-grained control
                over different types of retries.
                Pass an integer number to retry connection errors that many times,
                but no other types of errors. Pass zero to never retry.
    
                If ``False``, then retries are disabled and any exception is raised
                immediately. Also, instead of raising a MaxRetryError on redirects,
                the redirect response will be returned.
    
            :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
            :param redirect:
                If True, automatically handle redirects (status codes 301, 302,
                303, 307, 308). Each redirect counts as a retry. Disabling retries
                will disable redirect, too.
    
            :param assert_same_host:
                If ``True``, will make sure that the host of the pool requests is
                consistent else will raise HostChangedError. When False, you can
                use the pool on an HTTP proxy and request foreign hosts.
    
            :param timeout:
                If specified, overrides the default timeout for this one
                request. It may be a float (in seconds) or an instance of
                :class:`urllib3.util.Timeout`.
    
            :param pool_timeout:
                If set and the pool is set to block=True, then this method will
                block for ``pool_timeout`` seconds and raise EmptyPoolError if no
                connection is available within the time period.
    
            :param release_conn:
                If False, then the urlopen call will not release the connection
                back into the pool once a response is received (but will release if
                you read the entire contents of the response such as when
                `preload_content=True`). This is useful if you're not preloading
                the response's content immediately. You will need to call
                ``r.release_conn()`` on the response ``r`` to return the connection
                back into the pool. If None, it takes the value of
                ``response_kw.get('preload_content', True)``.
    
            :param chunked:
                If True, urllib3 will send the body using chunked transfer
                encoding. Otherwise, urllib3 will send the body using the standard
                content-length form. Defaults to False.
    
            :param int body_pos:
                Position to seek to in file-like body in the event of a retry or
                redirect. Typically this won't need to be set because urllib3 will
                auto-populate the value when needed.
    
            :param \\**response_kw:
                Additional parameters are passed to
                :meth:`urllib3.response.HTTPResponse.from_httplib`
            """
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get('preload_content', True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/shazow/urllib3/issues/651>
        release_this_conn = release_conn
    
        # Merge the proxy headers. Only do this in HTTP. We have to copy the
        # headers dict so we can safely change it without those changes being
        # reflected in anyone else's copy.
        if self.scheme == 'http':
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
            if is_new_proxy_conn:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(conn, method, url,
                                                  timeout=timeout_obj,
                                                  body=body, headers=headers,
                                                  chunked=chunked)
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Pass method to Response for length checking
            response_kw['request_method'] = method
    
            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(httplib_response,
                                                     pool=self,
                                                     connection=response_conn,
                                                     retries=retries,
                                                     **response_kw)
    
            # Everything went great!
            clean_exit = True
    
        except queue.Empty:
            # Timed out by queue.
            raise EmptyPoolError(self, "No pool connections are available.")
    
        except (TimeoutError, HTTPException, SocketError, ProtocolError,
                BaseSSLError, SSLError, CertificateError) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
            if isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError('Cannot connect to proxy.', e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError('Connection aborted.', e)
    
            retries = retries.increment(method, url, error=e, _pool=self,
>                                       _stacktrace=sys.exc_info()[2])

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:638: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'GET', url = '/api/v3/oneprovider/spaces', response = None
error = ReadTimeoutError("HTTPSConnectionPool(host='dev-oneprovider-krakow.default.svc.cluster.local', port=443): Read timed out. (read timeout=20)",)
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
_stacktrace = <traceback object at 0x7f908ef5ce08>

    def increment(self, method=None, url=None, response=None, error=None,
                  _pool=None, _stacktrace=None):
        """ Return a new Retry object with incremented retry counters.
    
            :param response: A response object, or None, if the server did not
                return a response.
            :type response: :class:`~urllib3.response.HTTPResponse`
            :param Exception error: An error encountered during the request, or
                None if the response was received successfully.
    
            :return: A new ``Retry`` object.
            """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)
    
        total = self.total
        if total is not None:
            total -= 1
    
        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        cause = 'unknown'
        status = None
        redirect_location = None
    
        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1
    
        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
>               raise six.reraise(type(error), error, _stacktrace)

/usr/local/lib/python3.6/dist-packages/urllib3/util/retry.py:367: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

tp = <class 'urllib3.exceptions.ReadTimeoutError'>
value = ReadTimeoutError("HTTPSConnectionPool(host='dev-oneprovider-krakow.default.svc.cluster.local', port=443): Read timed out. (read timeout=20)",)
tb = <traceback object at 0x7f908ef5ce08>

    def reraise(tp, value, tb=None):
        if value is None:
            value = tp()
        if value.__traceback__ is not tb:
            raise value.with_traceback(tb)
>       raise value

/usr/local/lib/python3.6/dist-packages/urllib3/packages/six.py:686: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
method = 'GET', url = '/api/v3/oneprovider/spaces', body = None
headers = {'User-Agent': 'python-requests/2.22.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...FhNmRiOWNoM2ExNwowMDFhY2lkIHRpbWUgPCAxNjUyNTYwOTgyCjAwMmZzaWduYXR1cmUg1HJqwvUGTh467xI00y02p202myEsRpcj9sSElbwiTxXWyoK'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fa20>
pool_timeout = None, release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}, conn = None
release_this_conn = True, err = None, clean_exit = False
timeout_obj = <urllib3.util.timeout.Timeout object at 0x7f908ef2fc18>
is_new_proxy_conn = False

    def urlopen(self, method, url, body=None, headers=None, retries=None,
                redirect=True, assert_same_host=True, timeout=_Default,
                pool_timeout=None, release_conn=None, chunked=False,
                body_pos=None, **response_kw):
        """
            Get a connection from the pool and perform an HTTP request. This is the
            lowest level call for making a request, so you'll need to specify all
            the raw details.
    
            .. note::
    
               More commonly, it's appropriate to use a convenience method provided
               by :class:`.RequestMethods`, such as :meth:`request`.
    
            .. note::
    
               `release_conn` will only behave as expected if
               `preload_content=False` because we want to make
               `preload_content=False` the default behaviour someday soon without
               breaking backwards compatibility.
    
            :param method:
                HTTP request method (such as GET, POST, PUT, etc.)
    
            :param body:
                Data to send in the request body (useful for creating
                POST requests, see HTTPConnectionPool.post_url for
                more convenience).
    
            :param headers:
                Dictionary of custom headers to send, such as User-Agent,
                If-None-Match, etc. If None, pool headers are used. If provided,
                these headers completely replace any pool-specific headers.
    
            :param retries:
                Configure the number of retries to allow before raising a
                :class:`~urllib3.exceptions.MaxRetryError` exception.
    
                Pass ``None`` to retry until you receive a response. Pass a
                :class:`~urllib3.util.retry.Retry` object for fine-grained control
                over different types of retries.
                Pass an integer number to retry connection errors that many times,
                but no other types of errors. Pass zero to never retry.
    
                If ``False``, then retries are disabled and any exception is raised
                immediately. Also, instead of raising a MaxRetryError on redirects,
                the redirect response will be returned.
    
            :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
            :param redirect:
                If True, automatically handle redirects (status codes 301, 302,
                303, 307, 308). Each redirect counts as a retry. Disabling retries
                will disable redirect, too.
    
            :param assert_same_host:
                If ``True``, will make sure that the host of the pool requests is
                consistent else will raise HostChangedError. When False, you can
                use the pool on an HTTP proxy and request foreign hosts.
    
            :param timeout:
                If specified, overrides the default timeout for this one
                request. It may be a float (in seconds) or an instance of
                :class:`urllib3.util.Timeout`.
    
            :param pool_timeout:
                If set and the pool is set to block=True, then this method will
                block for ``pool_timeout`` seconds and raise EmptyPoolError if no
                connection is available within the time period.
    
            :param release_conn:
                If False, then the urlopen call will not release the connection
                back into the pool once a response is received (but will release if
                you read the entire contents of the response such as when
                `preload_content=True`). This is useful if you're not preloading
                the response's content immediately. You will need to call
                ``r.release_conn()`` on the response ``r`` to return the connection
                back into the pool. If None, it takes the value of
                ``response_kw.get('preload_content', True)``.
    
            :param chunked:
                If True, urllib3 will send the body using chunked transfer
                encoding. Otherwise, urllib3 will send the body using the standard
                content-length form. Defaults to False.
    
            :param int body_pos:
                Position to seek to in file-like body in the event of a retry or
                redirect. Typically this won't need to be set because urllib3 will
                auto-populate the value when needed.
    
            :param \\**response_kw:
                Additional parameters are passed to
                :meth:`urllib3.response.HTTPResponse.from_httplib`
            """
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = response_kw.get('preload_content', True)
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/shazow/urllib3/issues/651>
        release_this_conn = release_conn
    
        # Merge the proxy headers. Only do this in HTTP. We have to copy the
        # headers dict so we can safely change it without those changes being
        # reflected in anyone else's copy.
        if self.scheme == 'http':
            headers = headers.copy()
            headers.update(self.proxy_headers)
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout
    
            is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
            if is_new_proxy_conn:
                self._prepare_proxy(conn)
    
            # Make the request on the httplib connection object.
            httplib_response = self._make_request(conn, method, url,
                                                  timeout=timeout_obj,
                                                  body=body, headers=headers,
>                                                 chunked=chunked)

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:600: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
conn = <urllib3.connection.VerifiedHTTPSConnection object at 0x7f908eeaebe0>
method = 'GET', url = '/api/v3/oneprovider/spaces'
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fc18>
chunked = False
httplib_request_kw = {'body': None, 'headers': {'User-Agent': 'python-requests/2.22.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*'...hNmRiOWNoM2ExNwowMDFhY2lkIHRpbWUgPCAxNjUyNTYwOTgyCjAwMmZzaWduYXR1cmUg1HJqwvUGTh467xI00y02p202myEsRpcj9sSElbwiTxXWyoK'}}
timeout_obj = <urllib3.util.timeout.Timeout object at 0x7f908eeae320>
read_timeout = 20

    def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
                      **httplib_request_kw):
        """
            Perform a request on a given urllib connection object taken from our
            pool.
    
            :param conn:
                a connection from one of our connection pools
    
            :param timeout:
                Socket timeout in seconds for the request. This can be a
                float or integer, which will set the same timeout value for
                the socket connect and the socket read, or an instance of
                :class:`urllib3.util.Timeout`, which gives you more fine-grained
                control over your timeouts.
            """
        self.num_requests += 1
    
        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout
    
        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise
    
        # conn.request() calls httplib.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        if chunked:
            conn.request_chunked(method, url, **httplib_request_kw)
        else:
            conn.request(method, url, **httplib_request_kw)
    
        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout
    
        # App Engine doesn't have a sock attr
        if getattr(conn, 'sock', None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)
    
        # Receive the response from the server
        try:
            try:  # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:  # Python 2.6 and older, Python 3
                try:
                    httplib_response = conn.getresponse()
                except Exception as e:
                    # Remove the TypeError from the exception chain in Python 3;
                    # otherwise it looks like a programming error was the cause.
                    six.raise_from(e, None)
        except (SocketTimeout, BaseSSLError, SocketError) as e:
>           self._raise_timeout(err=e, url=url, timeout_value=read_timeout)

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f908ef2fb00>
err = timeout('The read operation timed out',)
url = '/api/v3/oneprovider/spaces', timeout_value = 20

    def _raise_timeout(self, err, url, timeout_value):
        """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
    
        if isinstance(err, SocketTimeout):
>           raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
E           urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='dev-oneprovider-krakow.default.svc.cluster.local', port=443): Read timed out. (read timeout=20)

/usr/local/lib/python3.6/dist-packages/urllib3/connectionpool.py:306: ReadTimeoutError

During handling of the above exception, another exception occurred:

request = <FixtureRequest for <Function 'test_user_sees_that_after_unsupporting_space_number_displayed_in_space_counter_for_given_provider_decreases[1oz_1op_deployed]'>>

    @pytest.mark.usefixtures(*function_args)
    def scenario_wrapper(request):
>       _execute_scenario(feature, scenario, request, encoding)

/usr/local/lib/python3.6/dist-packages/pytest_bdd/scenario.py:227: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/usr/local/lib/python3.6/dist-packages/pytest_bdd/scenario.py:189: in _execute_scenario
    _execute_step_function(request, scenario, step, step_func)
/usr/local/lib/python3.6/dist-packages/pytest_bdd/scenario.py:130: in _execute_step_function
    step_func(**kwargs)
/usr/local/lib/python3.6/dist-packages/pytest_bdd/steps.py:162: in step_func
    result = request.getfixturevalue(func.__name__)
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:428: in getfixturevalue
    return self._get_active_fixturedef(argname).cached_result[0]
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:453: in _get_active_fixturedef
    self._compute_fixture_value(fixturedef)
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:524: in _compute_fixture_value
    fixturedef.execute(request=subrequest)
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:795: in execute
    return hook.pytest_fixture_setup(fixturedef=self, request=request)
/usr/local/lib/python3.6/dist-packages/pluggy/__init__.py:617: in __call__
    return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
/usr/local/lib/python3.6/dist-packages/pluggy/__init__.py:222: in _hookexec
    return self._inner_hookexec(hook, methods, kwargs)
/usr/local/lib/python3.6/dist-packages/pluggy/__init__.py:216: in <lambda>
    firstresult=hook.spec_opts.get('firstresult'),
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:826: in pytest_fixture_setup
    result = call_fixture_func(fixturefunc, request, kwargs)
/usr/local/lib/python3.6/dist-packages/_pytest/fixtures.py:718: in call_fixture_func
    res = fixturefunc(**kwargs)
tests/utils/bdd_utils.py:77: in wrapper
    return fun(*ba.args, **ba.kwargs)
tests/gui/steps/rest/env_up/spaces.py:104: in create_and_configure_spaces
    users, groups, storages, spaces)
tests/gui/steps/rest/env_up/spaces.py:136: in _create_and_configure_spaces
    users_to_add, users_db)
tests/gui/steps/rest/env_up/spaces.py:244: in _get_support
    wait_for_space_support(space_id, provider_hostname, all_members, users)
<decorator-gen-9>:2: in wait_for_space_support
    ???
tests/utils/utils.py:83: in wrapper
    result = fun(*args, **kwargs)
tests/gui/steps/rest/env_up/spaces.py:253: in wait_for_space_support
    headers={'X-Auth-Token': users[user].token}).content
tests/utils/rest_utils.py:45: in http_get
    verify, cert, auth, default_headers=default_headers)
tests/utils/rest_utils.py:81: in http_request
    cert=cert, auth=auth, data=data)
/usr/local/lib/python3.6/dist-packages/requests/api.py:75: in get
    return request('get', url, params=params, **kwargs)
/usr/local/lib/python3.6/dist-packages/requests/api.py:60: in request
    return session.request(method=method, url=url, **kwargs)
/usr/local/lib/python3.6/dist-packages/requests/sessions.py:533: in request
    resp = self.send(prep, **send_kwargs)
/usr/local/lib/python3.6/dist-packages/requests/sessions.py:646: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7f908ef2f710>
request = <PreparedRequest [GET]>, stream = False
timeout = <urllib3.util.timeout.Timeout object at 0x7f908ef2fa20>
verify = False, cert = None, proxies = OrderedDict()

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.
    
            :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
            :param stream: (optional) Whether to stream the request content.
            :param timeout: (optional) How long to wait for the server to send
                data before giving up, as a float, or a :ref:`(connect timeout,
                read timeout) <timeouts>` tuple.
            :type timeout: float or tuple or urllib3 Timeout object
            :param verify: (optional) Either a boolean, in which case it controls whether
                we verify the server's TLS certificate, or a string, in which case it
                must be a path to a CA bundle to use
            :param cert: (optional) Any user-provided SSL certificate to be trusted.
            :param proxies: (optional) The proxies dictionary to apply to the request.
            :rtype: requests.Response
            """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
    
        chunked = not (request.body is None or 'Content-Length' in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )
    
            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool
    
                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
    
                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)
    
                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)
    
                    low_conn.endheaders()
    
                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')
    
                    # Receive the response from the server
                    try:
                        # For Python 2.7, use buffering of HTTP responses
                        r = low_conn.getresponse(buffering=True)
                    except TypeError:
                        # For compatibility with Python 3.3+
                        r = low_conn.getresponse()
    
                    resp = HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise
    
        except (ProtocolError, socket.error) as err:
            raise ConnectionError(err, request=request)
    
        except MaxRetryError as e:
            if isinstance(e.reason, ConnectTimeoutError):
                # TODO: Remove this in 3.0.0: see #2811
                if not isinstance(e.reason, NewConnectionError):
                    raise ConnectTimeout(e, request=request)
    
            if isinstance(e.reason, ResponseError):
                raise RetryError(e, request=request)
    
            if isinstance(e.reason, _ProxyError):
                raise ProxyError(e, request=request)
    
            if isinstance(e.reason, _SSLError):
                # This branch is for urllib3 v1.22 and later.
                raise SSLError(e, request=request)
    
            raise ConnectionError(e, request=request)
    
        except ClosedPoolError as e:
            raise ConnectionError(e, request=request)
    
        except _ProxyError as e:
            raise ProxyError(e)
    
        except (_SSLError, _HTTPError) as e:
            if isinstance(e, _SSLError):
                # This branch is for urllib3 versions earlier than v1.22
                raise SSLError(e, request=request)
            elif isinstance(e, ReadTimeoutError):
>               raise ReadTimeout(e, request=request)
E               requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='dev-oneprovider-krakow.default.svc.cluster.local', port=443): Read timed out. (read timeout=20)

/usr/local/lib/python3.6/dist-packages/requests/adapters.py:529: ReadTimeout