GUI acceptance tests using environment deployed from packages.
Build: #2438 failed
Job: Metadata failed
user fails to remove directory containing file because of lack in privileges[1oz 1op deployed]: Test case result
The below summarizes the result of the test " user fails to remove directory containing file because of lack in privileges[1oz 1op deployed]" in build 2,438 of Onedata Products - gui acceptance pkg - Chrome POSIX tests.
- Description
- user fails to remove directory containing file because of lack in privileges[1oz 1op deployed]
- Test class
- gui.scenarios.test_oneprovider_posix
- Method
- test_user_fails_to_remove_directory_containing_file_because_of_lack_in_privileges[1oz_1op_deployed]
- Jira Issue
-
- Duration
- < 1 sec
- Status
- Failed (New Failure)
Error Log
test setup failure self = <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310> def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: > conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw) /usr/local/lib/python3.8/dist-packages/urllib3/connection.py:170: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ address = ('10.87.23.8', 8443), timeout = None, source_address = None socket_options = [(6, 1, 1)] def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith('['): host = host.strip('[]') err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except socket.error as e: err = e if sock is not None: sock.close() sock = None if err is not None: > raise err /usr/local/lib/python3.8/dist-packages/urllib3/util/connection.py:79: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ address = ('10.87.23.8', 8443), timeout = None, source_address = None socket_options = [(6, 1, 1)] def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith('['): host = host.strip('[]') err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) > sock.connect(sa) E ConnectionRefusedError: [Errno 111] Connection refused /usr/local/lib/python3.8/dist-packages/urllib3/util/connection.py:69: ConnectionRefusedError During handling of the above exception, another exception occurred: self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fe1c253c310> method = 'GET', url = '/api/v1/namespaces/default/pods', body = None headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'User-Agent': 'Swagger-Codegen/9.0.0/python'} retries = Retry(total=0, connect=None, read=None, redirect=None, status=None) redirect = False, assert_same_host = False, timeout = None, pool_timeout = None release_conn = True, chunked = False, body_pos = None response_kw = {'preload_content': True, 'request_url': 'https://10.87.23.8:8443/api/v1/namespaces/default/pods'} conn = None, release_this_conn = True, err = None, clean_exit = False timeout_obj = <urllib3.util.timeout.Timeout object at 0x7fe1c25391c0> is_new_proxy_conn = False def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, chunked=False, body_pos=None, **response_kw): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \\**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ if headers is None: headers = self.headers if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get('preload_content', True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) conn = None # Track whether `conn` needs to be released before # returning/raising/recursing. Update this variable if necessary, and # leave `release_conn` constant throughout the function. That way, if # the function recurses, the original value of `release_conn` will be # passed down into the recursive call, and its value will be respected. # # See issue #651 [1] for details. # # [1] <https://github.com/shazow/urllib3/issues/651> release_this_conn = release_conn # Merge the proxy headers. Only do this in HTTP. We have to copy the # headers dict so we can safely change it without those changes being # reflected in anyone else's copy. if self.scheme == 'http': headers = headers.copy() headers.update(self.proxy_headers) # Must keep the exception bound to a separate variable or else Python 3 # complains about UnboundLocalError. err = None # Keep track of whether we cleanly exited the except block. This # ensures we do proper cleanup in finally. clean_exit = False # Rewind body position, if needed. Record current position # for future rewinds in the event of a redirect/retry. body_pos = set_file_position(body, body_pos) try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) if is_new_proxy_conn: self._prepare_proxy(conn) # Make the request on the httplib connection object. > httplib_response = self._make_request(conn, method, url, timeout=timeout_obj, body=body, headers=headers, chunked=chunked) /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:597: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fe1c253c310> conn = <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310> method = 'GET', url = '/api/v1/namespaces/default/pods' timeout = <urllib3.util.timeout.Timeout object at 0x7fe1c25391c0> chunked = False httplib_request_kw = {'body': None, 'headers': {'Accept': 'application/json', 'Content-Type': 'application/json', 'User-Agent': 'Swagger-Codegen/9.0.0/python'}} timeout_obj = <urllib3.util.timeout.Timeout object at 0x7fe1c25392b0> def _make_request(self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw): """ Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. """ self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = timeout_obj.connect_timeout # Trigger any extra validation we need to do. try: > self._validate_conn(conn) /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:343: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fe1c253c310> conn = <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310> def _validate_conn(self, conn): """ Called right before a request is made, after the socket is created. """ super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` > conn.connect() /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:849: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310> def connect(self): # Add certificate verification > conn = self._new_conn() /usr/local/lib/python3.8/dist-packages/urllib3/connection.py:314: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310> def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except SocketError as e: > raise NewConnectionError( self, "Failed to establish a new connection: %s" % e) E urllib3.exceptions.NewConnectionError: <urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310>: Failed to establish a new connection: [Errno 111] Connection refused /usr/local/lib/python3.8/dist-packages/urllib3/connection.py:179: NewConnectionError During handling of the above exception, another exception occurred: users = {'admin': <tests.utils.user_utils.AdminUser object at 0x7fe1c253c430>, 'onepanel': <tests.utils.user_utils.AdminUser o...tests.utils.user_utils.User object at 0x7fe1c520edc0>, 'user1': <tests.utils.user_utils.User object at 0x7fe1c520efa0>} hosts = {'oneprovider-1': {'container-id': '35bae7c685857429896583f562e3987bffd7e64ed33dee27d36ad84e97abc1f1', 'hostname': 'de...27d6e65383dcd07', 'hostname': 'dev-onezone.default.svc.cluster.local', 'ip': '172.17.0.8', 'name': 'dev-onezone', ...}} @pytest.fixture(autouse=True) def emergency_passphrase(users, hosts): zone_pod_name = hosts['onezone']['pod-name'] > zone_pod = onenv_utils.match_pods(zone_pod_name)[0] tests/conftest.py:140: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tests/utils/onenv_utils.py:174: in match_pods pods_list = list_pods() tests/utils/onenv_utils.py:170: in list_pods return list(filter(lambda pod: is_pod(pod), list_pods_and_jobs())) tests/utils/onenv_utils.py:132: in list_pods_and_jobs return kube.list_namespaced_pod(namespace).items /usr/local/lib/python3.8/dist-packages/kubernetes/client/apis/core_v1_api.py:12469: in list_namespaced_pod (data) = self.list_namespaced_pod_with_http_info(namespace, **kwargs) /usr/local/lib/python3.8/dist-packages/kubernetes/client/apis/core_v1_api.py:12559: in list_namespaced_pod_with_http_info return self.api_client.call_api('/api/v1/namespaces/{namespace}/pods', 'GET', /usr/local/lib/python3.8/dist-packages/kubernetes/client/api_client.py:330: in call_api return self.__call_api(resource_path, method, /usr/local/lib/python3.8/dist-packages/kubernetes/client/api_client.py:163: in __call_api response_data = self.request(method, url, /usr/local/lib/python3.8/dist-packages/kubernetes/client/api_client.py:351: in request return self.rest_client.GET(url, /usr/local/lib/python3.8/dist-packages/kubernetes/client/rest.py:227: in GET return self.request("GET", url, /usr/local/lib/python3.8/dist-packages/kubernetes/client/rest.py:201: in request r = self.pool_manager.request(method, url, /usr/local/lib/python3.8/dist-packages/urllib3/request.py:66: in request return self.request_encode_url(method, url, fields=fields, /usr/local/lib/python3.8/dist-packages/urllib3/request.py:89: in request_encode_url return self.urlopen(method, url, **extra_kw) /usr/local/lib/python3.8/dist-packages/urllib3/poolmanager.py:322: in urlopen response = conn.urlopen(method, u.request_uri, **kw) /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:663: in urlopen return self.urlopen(method, url, body, headers, retries, /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:663: in urlopen return self.urlopen(method, url, body, headers, retries, /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:663: in urlopen return self.urlopen(method, url, body, headers, retries, /usr/local/lib/python3.8/dist-packages/urllib3/connectionpool.py:637: in urlopen retries = retries.increment(method, url, error=e, _pool=self, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = Retry(total=0, connect=None, read=None, redirect=None, status=None) method = 'GET', url = '/api/v1/namespaces/default/pods', response = None error = NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310>: Failed to establish a new connection: [Errno 111] Connection refused') _pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fe1c253c310> _stacktrace = <traceback object at 0x7fe1c2543d00> def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None): """ Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. """ if self.total is False and error: # Disabled, indicate to re-raise the error. raise six.reraise(type(error), error, _stacktrace) total = self.total if total is not None: total -= 1 connect = self.connect read = self.read redirect = self.redirect status_count = self.status cause = 'unknown' status = None redirect_location = None if error and self._is_connection_error(error): # Connect retry? if connect is False: raise six.reraise(type(error), error, _stacktrace) elif connect is not None: connect -= 1 elif error and self._is_read_error(error): # Read retry? if read is False or not self._is_method_retryable(method): raise six.reraise(type(error), error, _stacktrace) elif read is not None: read -= 1 elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: redirect -= 1 cause = 'too many redirects' redirect_location = response.get_redirect_location() status = response.status else: # Incrementing because of a server error like a 500 in # status_forcelist and a the given method is in the whitelist cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: status_count -= 1 cause = ResponseError.SPECIFIC_ERROR.format( status_code=response.status) status = response.status history = self.history + (RequestHistory(method, url, error, status, redirect_location),) new_retry = self.new( total=total, connect=connect, read=read, redirect=redirect, status=status_count, history=history) if new_retry.is_exhausted(): > raise MaxRetryError(_pool, url, error or ResponseError(cause)) E urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='10.87.23.8', port=8443): Max retries exceeded with url: /api/v1/namespaces/default/pods (Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7fe1c2539310>: Failed to establish a new connection: [Errno 111] Connection refused')) /usr/local/lib/python3.8/dist-packages/urllib3/util/retry.py:398: MaxRetryError