GoogleCloudPlatform/cloud-sql-python-connector

system.test_connector_object: test_multiple_connectors failed

flaky-bot opened this issue · 2 comments

Note: #726 was also for this test, but it was closed more than 10 days ago. So, I didn't mark it flaky.


commit: 65cccf3
buildURL: https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/5798479891
status: failed

Test output
self = 
def _new_conn(self):
    """Establish a socket connection and set nodelay settings on it.

    :return: New socket connection.
    """
    extra_kw = {}
    if self.source_address:
        extra_kw["source_address"] = self.source_address

    if self.socket_options:
        extra_kw["socket_options"] = self.socket_options

    try:
      conn = connection.create_connection(
            (self._dns_host, self.port), self.timeout, **extra_kw
        )

.nox/system-3-8/lib/python3.8/site-packages/urllib3/connection.py:174:


.nox/system-3-8/lib/python3.8/site-packages/urllib3/util/connection.py:95: in create_connection
raise err


address = ('pipelines.actions.githubusercontent.com', 443), timeout = 120
source_address = None, socket_options = [(6, 1, 1)]

def create_connection(
    address,
    timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
    source_address=None,
    socket_options=None,
):
    """Connect to *address* and return the socket object.

    Convenience function.  Connect to *address* (a 2-tuple ``(host,
    port)``) and return the socket object.  Passing the optional
    *timeout* parameter will set the timeout on the socket instance
    before attempting to connect.  If no *timeout* is supplied, the
    global default timeout setting returned by :func:`socket.getdefaulttimeout`
    is used.  If *source_address* is set it must be a tuple of (host, port)
    for the socket to bind as a source address before making the connection.
    An host of '' or port 0 tells the OS to use the default.
    """

    host, port = address
    if host.startswith("["):
        host = host.strip("[]")
    err = None

    # Using the value from allowed_gai_family() in the context of getaddrinfo lets
    # us select whether to work with IPv4 DNS records, IPv6 records, or both.
    # The original create_connection function always returns all records.
    family = allowed_gai_family()

    try:
        host.encode("idna")
    except UnicodeError:
        return six.raise_from(
            LocationParseError(u"'%s', label empty or too long" % host), None
        )

    for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
        af, socktype, proto, canonname, sa = res
        sock = None
        try:
            sock = socket.socket(af, socktype, proto)

            # If provided, set socket level options before connecting.
            _set_socket_options(sock, socket_options)

            if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
                sock.settimeout(timeout)
            if source_address:
                sock.bind(source_address)
          sock.connect(sa)

E TimeoutError: [Errno 60] Operation timed out

.nox/system-3-8/lib/python3.8/site-packages/urllib3/util/connection.py:85: TimeoutError

During handling of the above exception, another exception occurred:

self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x10d9779a0>
method = 'GET'
url = '/umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000-0000-000000000000/_apis/distributedtask/hubs/A...cations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f'
body = None
headers = {'User-Agent': 'python-requests/2.31.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '/', 'Connection': 'keep-aliv...l2ltNFYNgX0oU5PVDQTz5QCkzMaxCAPDv2aMeJuro3ea911Ftzo6Ez02uYxY9zmHqmxz7XBcRH6Y60_reBYCnEAdZnLiQHPCpX9VjCWETq0tdc738Oppw'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=120, read=120, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None
response_kw = {'decode_content': False, 'preload_content': False}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0...%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

def urlopen(
    self,
    method,
    url,
    body=None,
    headers=None,
    retries=None,
    redirect=True,
    assert_same_host=True,
    timeout=_Default,
    pool_timeout=None,
    release_conn=None,
    chunked=False,
    body_pos=None,
    **response_kw
):
    """
    Get a connection from the pool and perform an HTTP request. This is the
    lowest level call for making a request, so you'll need to specify all
    the raw details.

    .. note::

       More commonly, it's appropriate to use a convenience method provided
       by :class:`.RequestMethods`, such as :meth:`request`.

    .. note::

       `release_conn` will only behave as expected if
       `preload_content=False` because we want to make
       `preload_content=False` the default behaviour someday soon without
       breaking backwards compatibility.

    :param method:
        HTTP request method (such as GET, POST, PUT, etc.)

    :param url:
        The URL to perform the request on.

    :param body:
        Data to send in the request body, either :class:`str`, :class:`bytes`,
        an iterable of :class:`str`/:class:`bytes`, or a file-like object.

    :param headers:
        Dictionary of custom headers to send, such as User-Agent,
        If-None-Match, etc. If None, pool headers are used. If provided,
        these headers completely replace any pool-specific headers.

    :param retries:
        Configure the number of retries to allow before raising a
        :class:`~urllib3.exceptions.MaxRetryError` exception.

        Pass ``None`` to retry until you receive a response. Pass a
        :class:`~urllib3.util.retry.Retry` object for fine-grained control
        over different types of retries.
        Pass an integer number to retry connection errors that many times,
        but no other types of errors. Pass zero to never retry.

        If ``False``, then retries are disabled and any exception is raised
        immediately. Also, instead of raising a MaxRetryError on redirects,
        the redirect response will be returned.

    :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.

    :param redirect:
        If True, automatically handle redirects (status codes 301, 302,
        303, 307, 308). Each redirect counts as a retry. Disabling retries
        will disable redirect, too.

    :param assert_same_host:
        If ``True``, will make sure that the host of the pool requests is
        consistent else will raise HostChangedError. When ``False``, you can
        use the pool on an HTTP proxy and request foreign hosts.

    :param timeout:
        If specified, overrides the default timeout for this one
        request. It may be a float (in seconds) or an instance of
        :class:`urllib3.util.Timeout`.

    :param pool_timeout:
        If set and the pool is set to block=True, then this method will
        block for ``pool_timeout`` seconds and raise EmptyPoolError if no
        connection is available within the time period.

    :param release_conn:
        If False, then the urlopen call will not release the connection
        back into the pool once a response is received (but will release if
        you read the entire contents of the response such as when
        `preload_content=True`). This is useful if you're not preloading
        the response's content immediately. You will need to call
        ``r.release_conn()`` on the response ``r`` to return the connection
        back into the pool. If None, it takes the value of
        ``response_kw.get('preload_content', True)``.

    :param chunked:
        If True, urllib3 will send the body using chunked transfer
        encoding. Otherwise, urllib3 will send the body using the standard
        content-length form. Defaults to False.

    :param int body_pos:
        Position to seek to in file-like body in the event of a retry or
        redirect. Typically this won't need to be set because urllib3 will
        auto-populate the value when needed.

    :param \\**response_kw:
        Additional parameters are passed to
        :meth:`urllib3.response.HTTPResponse.from_httplib`
    """

    parsed_url = parse_url(url)
    destination_scheme = parsed_url.scheme

    if headers is None:
        headers = self.headers

    if not isinstance(retries, Retry):
        retries = Retry.from_int(retries, redirect=redirect, default=self.retries)

    if release_conn is None:
        release_conn = response_kw.get("preload_content", True)

    # Check host
    if assert_same_host and not self.is_same_host(url):
        raise HostChangedError(self, url, retries)

    # Ensure that the URL we're connecting to is properly encoded
    if url.startswith("/"):
        url = six.ensure_str(_encode_target(url))
    else:
        url = six.ensure_str(parsed_url.url)

    conn = None

    # Track whether `conn` needs to be released before
    # returning/raising/recursing. Update this variable if necessary, and
    # leave `release_conn` constant throughout the function. That way, if
    # the function recurses, the original value of `release_conn` will be
    # passed down into the recursive call, and its value will be respected.
    #
    # See issue #651 [1] for details.
    #
    # [1] <https://github.com/urllib3/urllib3/issues/651>
    release_this_conn = release_conn

    http_tunnel_required = connection_requires_http_tunnel(
        self.proxy, self.proxy_config, destination_scheme
    )

    # Merge the proxy headers. Only done when not using HTTP CONNECT. We
    # have to copy the headers dict so we can safely change it without those
    # changes being reflected in anyone else's copy.
    if not http_tunnel_required:
        headers = headers.copy()
        headers.update(self.proxy_headers)

    # Must keep the exception bound to a separate variable or else Python 3
    # complains about UnboundLocalError.
    err = None

    # Keep track of whether we cleanly exited the except block. This
    # ensures we do proper cleanup in finally.
    clean_exit = False

    # Rewind body position, if needed. Record current position
    # for future rewinds in the event of a redirect/retry.
    body_pos = set_file_position(body, body_pos)

    try:
        # Request a connection from the queue.
        timeout_obj = self._get_timeout(timeout)
        conn = self._get_conn(timeout=pool_timeout)

        conn.timeout = timeout_obj.connect_timeout

        is_new_proxy_conn = self.proxy is not None and not getattr(
            conn, "sock", None
        )
        if is_new_proxy_conn and http_tunnel_required:
            self._prepare_proxy(conn)

        # Make the request on the httplib connection object.
      httplib_response = self._make_request(
            conn,
            method,
            url,
            timeout=timeout_obj,
            body=body,
            headers=headers,
            chunked=chunked,
        )

.nox/system-3-8/lib/python3.8/site-packages/urllib3/connectionpool.py:714:


.nox/system-3-8/lib/python3.8/site-packages/urllib3/connectionpool.py:403: in _make_request
self._validate_conn(conn)
.nox/system-3-8/lib/python3.8/site-packages/urllib3/connectionpool.py:1053: in _validate_conn
conn.connect()
.nox/system-3-8/lib/python3.8/site-packages/urllib3/connection.py:363: in connect
self.sock = conn = self._new_conn()


self = <urllib3.connection.HTTPSConnection object at 0x10d766280>

def _new_conn(self):
    """Establish a socket connection and set nodelay settings on it.

    :return: New socket connection.
    """
    extra_kw = {}
    if self.source_address:
        extra_kw["source_address"] = self.source_address

    if self.socket_options:
        extra_kw["socket_options"] = self.socket_options

    try:
        conn = connection.create_connection(
            (self._dns_host, self.port), self.timeout, **extra_kw
        )

    except SocketTimeout:
        raise ConnectTimeoutError(
            self,
            "Connection to %s timed out. (connect timeout=%s)"
            % (self.host, self.timeout),
        )

    except SocketError as e:
      raise NewConnectionError(
            self, "Failed to establish a new connection: %s" % e
        )

E urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out

.nox/system-3-8/lib/python3.8/site-packages/urllib3/connection.py:186: NewConnectionError

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x10d977a90>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=120, read=120, total=None), verify = True, cert = None
proxies = OrderedDict()

def send(
    self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
    """Sends PreparedRequest object. Returns Response object.

    :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
    :param stream: (optional) Whether to stream the request content.
    :param timeout: (optional) How long to wait for the server to send
        data before giving up, as a float, or a :ref:`(connect timeout,
        read timeout) <timeouts>` tuple.
    :type timeout: float or tuple or urllib3 Timeout object
    :param verify: (optional) Either a boolean, in which case it controls whether
        we verify the server's TLS certificate, or a string, in which case it
        must be a path to a CA bundle to use
    :param cert: (optional) Any user-provided SSL certificate to be trusted.
    :param proxies: (optional) The proxies dictionary to apply to the request.
    :rtype: requests.Response
    """

    try:
        conn = self.get_connection(request.url, proxies)
    except LocationValueError as e:
        raise InvalidURL(e, request=request)

    self.cert_verify(conn, request.url, verify, cert)
    url = self.request_url(request, proxies)
    self.add_headers(
        request,
        stream=stream,
        timeout=timeout,
        verify=verify,
        cert=cert,
        proxies=proxies,
    )

    chunked = not (request.body is None or "Content-Length" in request.headers)

    if isinstance(timeout, tuple):
        try:
            connect, read = timeout
            timeout = TimeoutSauce(connect=connect, read=read)
        except ValueError:
            raise ValueError(
                f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                f"or a single float to set both timeouts to the same value."
            )
    elif isinstance(timeout, TimeoutSauce):
        pass
    else:
        timeout = TimeoutSauce(connect=timeout, read=timeout)

    try:
      resp = conn.urlopen(
            method=request.method,
            url=url,
            body=request.body,
            headers=request.headers,
            redirect=False,
            assert_same_host=False,
            preload_content=False,
            decode_content=False,
            retries=self.max_retries,
            timeout=timeout,
            chunked=chunked,
        )

.nox/system-3-8/lib/python3.8/site-packages/requests/adapters.py:486:


.nox/system-3-8/lib/python3.8/site-packages/urllib3/connectionpool.py:798: in urlopen
retries = retries.increment(


self = Retry(total=0, connect=None, read=False, redirect=None, status=None)
method = 'GET'
url = '/umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000-0000-000000000000/_apis/distributedtask/hubs/A...cations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f'
response = None
error = NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out')
_pool = <urllib3.connectionpool.HTTPSConnectionPool object at 0x10d9779a0>
_stacktrace = <traceback object at 0x10d8d7500>

def increment(
    self,
    method=None,
    url=None,
    response=None,
    error=None,
    _pool=None,
    _stacktrace=None,
):
    """Return a new Retry object with incremented retry counters.

    :param response: A response object, or None, if the server did not
        return a response.
    :type response: :class:`~urllib3.response.HTTPResponse`
    :param Exception error: An error encountered during the request, or
        None if the response was received successfully.

    :return: A new ``Retry`` object.
    """
    if self.total is False and error:
        # Disabled, indicate to re-raise the error.
        raise six.reraise(type(error), error, _stacktrace)

    total = self.total
    if total is not None:
        total -= 1

    connect = self.connect
    read = self.read
    redirect = self.redirect
    status_count = self.status
    other = self.other
    cause = "unknown"
    status = None
    redirect_location = None

    if error and self._is_connection_error(error):
        # Connect retry?
        if connect is False:
            raise six.reraise(type(error), error, _stacktrace)
        elif connect is not None:
            connect -= 1

    elif error and self._is_read_error(error):
        # Read retry?
        if read is False or not self._is_method_retryable(method):
            raise six.reraise(type(error), error, _stacktrace)
        elif read is not None:
            read -= 1

    elif error:
        # Other retry?
        if other is not None:
            other -= 1

    elif response and response.get_redirect_location():
        # Redirect retry?
        if redirect is not None:
            redirect -= 1
        cause = "too many redirects"
        redirect_location = response.get_redirect_location()
        status = response.status

    else:
        # Incrementing because of a server error like a 500 in
        # status_forcelist and the given method is in the allowed_methods
        cause = ResponseError.GENERIC_ERROR
        if response and response.status:
            if status_count is not None:
                status_count -= 1
            cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
            status = response.status

    history = self.history + (
        RequestHistory(method, url, error, status, redirect_location),
    )

    new_retry = self.new(
        total=total,
        connect=connect,
        read=read,
        redirect=redirect,
        status=status_count,
        other=other,
        history=history,
    )

    if new_retry.is_exhausted():
      raise MaxRetryError(_pool, url, error or ResponseError(cause))

E urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='pipelines.actions.githubusercontent.com', port=443): Max retries exceeded with url: /umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000-0000-000000000000/_apis/distributedtask/hubs/Actions/plans/738358b5-d9b2-481a-997d-606ae38da79b/jobs/1af45b58-8939-5fb3-28f4-65d1ba6d480d/idtoken?api-version=2.0&audience=https%3A%2F%2Fiam.googleapis.com%2Fprojects%2F1021588826382%2Flocations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out'))

.nox/system-3-8/lib/python3.8/site-packages/urllib3/util/retry.py:592: MaxRetryError

During handling of the above exception, another exception occurred:

self = <google.auth.transport.requests.Request object at 0x10d9773a0>
url = 'https://pipelines.actions.githubusercontent.com/umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000...cations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f'
method = 'GET', body = None
headers = {'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImVCWl9jbjNzWFlBZDBjaDRUSEJLSElnT3dPRSJ9.eyJuYW1...l2ltNFYNgX0oU5PVDQTz5QCkzMaxCAPDv2aMeJuro3ea911Ftzo6Ez02uYxY9zmHqmxz7XBcRH6Y60_reBYCnEAdZnLiQHPCpX9VjCWETq0tdc738Oppw'}
timeout = 120, kwargs = {}
new_exc = TransportError(ConnectionError(MaxRetryError("HTTPSConnectionPool(host='pipelines.actions.githubusercontent.com', port...ion.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out'))")))

def __call__(
    self,
    url,
    method="GET",
    body=None,
    headers=None,
    timeout=_DEFAULT_TIMEOUT,
    **kwargs
):
    """Make an HTTP request using requests.

    Args:
        url (str): The URI to be requested.
        method (str): The HTTP method to use for the request. Defaults
            to 'GET'.
        body (bytes): The payload or body in HTTP request.
        headers (Mapping[str, str]): Request headers.
        timeout (Optional[int]): The number of seconds to wait for a
            response from the server. If not specified or if None, the
            requests default timeout will be used.
        kwargs: Additional arguments passed through to the underlying
            requests :meth:`~requests.Session.request` method.

    Returns:
        google.auth.transport.Response: The HTTP response.

    Raises:
        google.auth.exceptions.TransportError: If any exception occurred.
    """
    try:
        _LOGGER.debug("Making request: %s %s", method, url)
      response = self.session.request(
            method, url, data=body, headers=headers, timeout=timeout, **kwargs
        )

.nox/system-3-8/lib/python3.8/site-packages/google/auth/transport/requests.py:193:


.nox/system-3-8/lib/python3.8/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
.nox/system-3-8/lib/python3.8/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)


self = <requests.adapters.HTTPAdapter object at 0x10d977a90>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=120, read=120, total=None), verify = True, cert = None
proxies = OrderedDict()

def send(
    self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
    """Sends PreparedRequest object. Returns Response object.

    :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
    :param stream: (optional) Whether to stream the request content.
    :param timeout: (optional) How long to wait for the server to send
        data before giving up, as a float, or a :ref:`(connect timeout,
        read timeout) <timeouts>` tuple.
    :type timeout: float or tuple or urllib3 Timeout object
    :param verify: (optional) Either a boolean, in which case it controls whether
        we verify the server's TLS certificate, or a string, in which case it
        must be a path to a CA bundle to use
    :param cert: (optional) Any user-provided SSL certificate to be trusted.
    :param proxies: (optional) The proxies dictionary to apply to the request.
    :rtype: requests.Response
    """

    try:
        conn = self.get_connection(request.url, proxies)
    except LocationValueError as e:
        raise InvalidURL(e, request=request)

    self.cert_verify(conn, request.url, verify, cert)
    url = self.request_url(request, proxies)
    self.add_headers(
        request,
        stream=stream,
        timeout=timeout,
        verify=verify,
        cert=cert,
        proxies=proxies,
    )

    chunked = not (request.body is None or "Content-Length" in request.headers)

    if isinstance(timeout, tuple):
        try:
            connect, read = timeout
            timeout = TimeoutSauce(connect=connect, read=read)
        except ValueError:
            raise ValueError(
                f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                f"or a single float to set both timeouts to the same value."
            )
    elif isinstance(timeout, TimeoutSauce):
        pass
    else:
        timeout = TimeoutSauce(connect=timeout, read=timeout)

    try:
        resp = conn.urlopen(
            method=request.method,
            url=url,
            body=request.body,
            headers=request.headers,
            redirect=False,
            assert_same_host=False,
            preload_content=False,
            decode_content=False,
            retries=self.max_retries,
            timeout=timeout,
            chunked=chunked,
        )

    except (ProtocolError, OSError) as err:
        raise ConnectionError(err, request=request)

    except MaxRetryError as e:
        if isinstance(e.reason, ConnectTimeoutError):
            # TODO: Remove this in 3.0.0: see #2811
            if not isinstance(e.reason, NewConnectionError):
                raise ConnectTimeout(e, request=request)

        if isinstance(e.reason, ResponseError):
            raise RetryError(e, request=request)

        if isinstance(e.reason, _ProxyError):
            raise ProxyError(e, request=request)

        if isinstance(e.reason, _SSLError):
            # This branch is for urllib3 v1.22 and later.
            raise SSLError(e, request=request)
      raise ConnectionError(e, request=request)

E requests.exceptions.ConnectionError: HTTPSConnectionPool(host='pipelines.actions.githubusercontent.com', port=443): Max retries exceeded with url: /umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000-0000-000000000000/_apis/distributedtask/hubs/Actions/plans/738358b5-d9b2-481a-997d-606ae38da79b/jobs/1af45b58-8939-5fb3-28f4-65d1ba6d480d/idtoken?api-version=2.0&audience=https%3A%2F%2Fiam.googleapis.com%2Fprojects%2F1021588826382%2Flocations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out'))

.nox/system-3-8/lib/python3.8/site-packages/requests/adapters.py:519: ConnectionError

The above exception was the direct cause of the following exception:

def test_multiple_connectors() -> None:
    """Test that same Cloud SQL instance can connect with two Connector objects."""
    first_connector = Connector()
    second_connector = Connector()
    try:
        pool = init_connection_engine(first_connector)
        pool2 = init_connection_engine(second_connector)
      with pool.connect() as conn:

tests/system/test_connector_object.py:81:


.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/engine/base.py:3264: in connect
return self._connection_cls(self)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/engine/base.py:145: in init
self._dbapi_connection = engine.raw_connection()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/engine/base.py:3288: in raw_connection
return self.pool.connect()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:452: in connect
return _ConnectionFairy._checkout(self)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:1267: in _checkout
fairy = _ConnectionRecord.checkout(pool)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:716: in checkout
rec = pool._do_get()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/impl.py:170: in _do_get
self._dec_overflow()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py:147: in exit
raise exc_value.with_traceback(exc_tb)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/impl.py:167: in _do_get
return self._create_connection()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:393: in _create_connection
return _ConnectionRecord(self)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:678: in init
self.__connect()
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:903: in __connect
pool.logger.debug("Error on connect(): %s", e)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py:147: in exit
raise exc_value.with_traceback(exc_tb)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:898: in __connect
self.dbapi_connection = connection = pool._invoke_creator(self)
.nox/system-3-8/lib/python3.8/site-packages/sqlalchemy/pool/base.py:365: in
return lambda rec: creator_fn()
tests/system/test_connector_object.py:38: in getconn
conn = custom_connector.connect(
google/cloud/sql/connector/connector.py:163: in connect
return connect_task.result()
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/concurrent/futures/_base.py:444: in result
return self.__get_result()
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/concurrent/futures/_base.py:389: in __get_result
raise self._exception
google/cloud/sql/connector/connector.py:208: in connect_async
instance = Instance(
google/cloud/sql/connector/instance.py:250: in init
self._credentials = _auth_init(credentials)
google/cloud/sql/connector/utils.py:124: in _auth_init
credentials, _ = default(scopes=scopes)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/_default.py:675: in default
project_id = credentials.get_project_id(request=request)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/external_account.py:339: in get_project_id
self.before_request(request, "GET", url, headers)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/credentials.py:151: in before_request
self.refresh(request)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/external_account.py:360: in refresh
self._impersonated_credentials.refresh(request)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/impersonated_credentials.py:247: in refresh
self._update_token(request)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/impersonated_credentials.py:260: in _update_token
self._source_credentials.refresh(request)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/external_account.py:378: in refresh
subject_token=self.retrieve_subject_token(request),
.nox/system-3-8/lib/python3.8/site-packages/google/auth/identity_pool.py:158: in retrieve_subject_token
self._get_token_data(request),
.nox/system-3-8/lib/python3.8/site-packages/google/auth/identity_pool.py:167: in _get_token_data
return self._get_url_data(
.nox/system-3-8/lib/python3.8/site-packages/google/auth/identity_pool.py:179: in _get_url_data
response = request(url=url, method="GET", headers=headers)
.nox/system-3-8/lib/python3.8/site-packages/google/auth/transport/requests.py:199: in call
six.raise_from(new_exc, caught_exc)


value = None
from_value = ConnectionError(MaxRetryError("HTTPSConnectionPool(host='pipelines.actions.githubusercontent.com', port=443): Max retr...tion.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out'))"))

???
E google.auth.exceptions.TransportError: HTTPSConnectionPool(host='pipelines.actions.githubusercontent.com', port=443): Max retries exceeded with url: /umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000-0000-000000000000/_apis/distributedtask/hubs/Actions/plans/738358b5-d9b2-481a-997d-606ae38da79b/jobs/1af45b58-8939-5fb3-28f4-65d1ba6d480d/idtoken?api-version=2.0&audience=https%3A%2F%2Fiam.googleapis.com%2Fprojects%2F1021588826382%2Flocations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f (Caused by NewConnectionError('<urllib3.connection.HTTPSConnection object at 0x10d766280>: Failed to establish a new connection: [Errno 60] Operation timed out'))

:3: TransportError

During handling of the above exception, another exception occurred:

def test_multiple_connectors() -> None:
    """Test that same Cloud SQL instance can connect with two Connector objects."""
    first_connector = Connector()
    second_connector = Connector()
    try:
        pool = init_connection_engine(first_connector)
        pool2 = init_connection_engine(second_connector)

        with pool.connect() as conn:
            conn.execute(sqlalchemy.text("SELECT 1"))

        with pool2.connect() as conn:
            conn.execute(sqlalchemy.text("SELECT 1"))

        instance_connection_string = os.environ["MYSQL_CONNECTION_NAME"]
        assert instance_connection_string in first_connector._instances
        assert instance_connection_string in second_connector._instances
        assert (
            first_connector._instances[instance_connection_string]
            != second_connector._instances[instance_connection_string]
        )
    except Exception as e:
      logging.exception("Failed to connect with multiple Connector objects!", e)

tests/system/test_connector_object.py:95:


../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:2057: in exception
error(msg, *args, exc_info=exc_info, **kwargs)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:2049: in error
root.error(msg, *args, **kwargs)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1475: in error
self._log(ERROR, msg, args, **kwargs)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1589: in _log
self.handle(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1599: in handle
self.callHandlers(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1661: in callHandlers
hdlr.handle(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:954: in handle
self.emit(record)
.nox/system-3-8/lib/python3.8/site-packages/_pytest/logging.py:372: in emit
super().emit(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1093: in emit
self.handleError(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:1085: in emit
msg = self.format(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:929: in format
return fmt.format(record)
.nox/system-3-8/lib/python3.8/site-packages/_pytest/logging.py:136: in format
return super().format(record)
../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:668: in format
record.message = record.getMessage()


self = <LogRecord: root, 40, /Users/runner/work/cloud-sql-python-connector/cloud-sql-python-connector/tests/system/test_connector_object.py, 95, "Failed to connect with multiple Connector objects!">

def getMessage(self):
    """
    Return the message for this LogRecord.

    Return the message for this LogRecord after merging any user-supplied
    arguments with the message.
    """
    msg = str(self.msg)
    if self.args:
      msg = msg % self.args

E TypeError: not all arguments converted during string formatting

../../../hostedtoolcache/Python/3.8.17/x64/lib/python3.8/logging/init.py:373: TypeError

Looks like this issue is flaky. 😟

I'm going to leave this open and stop commenting.

A human should fix and close this.


When run at the same commit (65cccf3), this test passed in one build (https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/5798479891) and failed in another build (https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/5798479891).

Quota issue I believe as I opened and merged several renovate PRs