Uploaded image for project: 'Subscription Watch'
  1. Subscription Watch
  2. SWATCH-3974 Evaluate post-stage test failures to prepare for prod release
  3. SWATCH-4086

test_tally_openshift_payg.py::test_verify_tally_system_table_data_with_telemeter

XMLWordPrintable

    • Icon: Sub-task Sub-task
    • Resolution: Done
    • Icon: Major Major
    • None
    • None
    • None
    • None
    • False
    • Hide

      None

      Show
      None
    • False
    • subs-swatch-thunder
    • Swatch Thunder Sprint 2, Swatch Thunder Sprint 3

      test_tally_openshift_payg.py::test_verify_tally_system_table_data_with_telemeter[OpenShift-metrics]

      test_tally_openshift_payg.py::test_verify_tally_system_table_data_with_telemeter[OpenShift-dedicated-metrics]

       

      https://reportportal-smqe.apps.dno.ocp-hub.prod.psi.redhat.com/ui/#subscription_watch/launches/all/347065/28748424/log?item0Params=filter.eq.hasStats%3Dtrue%26filter.eq.hasChildren%3Dfalse%26filter.in.type%3DSTEP%26filter.in.status%3DFAILED

       

      self = <urllib3.connection.HTTPConnection object at 0x7fed6a5af650>
      
          def _new_conn(self) -> socket.socket:
              """Establish a socket connection and set nodelay settings on it.
          
              :return: New socket connection.
              """
              try:
      >           sock = connection.create_connection(
                      (self._dns_host, self.port),
                      self.timeout,
                      source_address=self.source_address,
                      socket_options=self.socket_options,
                  )
      
      /iqe_venv/lib/python3.12/site-packages/urllib3/connection.py:198: 
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      /iqe_venv/lib/python3.12/site-packages/urllib3/util/connection.py:85: in create_connection
          raise err
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      
      address = ('10.0.151.243', 80), timeout = None, source_address = None
      socket_options = [(6, 1, 1)]
      
          def create_connection(
              address: tuple[str, int],
              timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
              source_address: tuple[str, int] | None = None,
              socket_options: _TYPE_SOCKET_OPTIONS | None = None,
          ) -> socket.socket:
              """Connect to *address* and return the socket object.
          
              Convenience function.  Connect to *address* (a 2-tuple ``(host,
              port)``) and return the socket object.  Passing the optional
              *timeout* parameter will set the timeout on the socket instance
              before attempting to connect.  If no *timeout* is supplied, the
              global default timeout setting returned by :func:`socket.getdefaulttimeout`
              is used.  If *source_address* is set it must be a tuple of (host, port)
              for the socket to bind as a source address before making the connection.
              An host of '' or port 0 tells the OS to use the default.
              """
          
              host, port = address
              if host.startswith("["):
                  host = host.strip("[]")
              err = None
          
              # Using the value from allowed_gai_family() in the context of getaddrinfo lets
              # us select whether to work with IPv4 DNS records, IPv6 records, or both.
              # The original create_connection function always returns all records.
              family = allowed_gai_family()
          
              try:
                  host.encode("idna")
              except UnicodeError:
                  raise LocationParseError(f"'{host}', label empty or too long") from None
          
              for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
                  af, socktype, proto, canonname, sa = res
                  sock = None
                  try:
                      sock = socket.socket(af, socktype, proto)
          
                      # If provided, set socket level options before connecting.
                      _set_socket_options(sock, socket_options)
          
                      if timeout is not _DEFAULT_TIMEOUT:
                          sock.settimeout(timeout)
                      if source_address:
                          sock.bind(source_address)
      >               sock.connect(sa)
      E               ConnectionRefusedError: [Errno 111] Connection refused
      
      /iqe_venv/lib/python3.12/site-packages/urllib3/util/connection.py:73: ConnectionRefusedError
      
      The above exception was the direct cause of the following exception:
      
      self = <urllib3.connectionpool.HTTPConnectionPool object at 0x7fed6ae37ad0>
      method = 'GET'
      url = '/api/v1/query_range?query=max%28cluster%3Ausage%3Aworkload%3Acapacity_physical_cpu_hours%29+by+%28_id%29+%2A+on%28_id...%2C+support%3D~%27Premium%7CStandard%7CSelf-Support%7CNone%27%7D%5B1h%5D%29&start=1759280400&end=1759363200&step=3600s'
      body = None
      headers = {'User-Agent': 'python-requests/2.32.5', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
      retries = Retry(total=0, connect=None, read=None, redirect=None, status=None)
      redirect = False, assert_same_host = False
      timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
      release_conn = False, chunked = False, body_pos = None, preload_content = False
      decode_content = False, response_kw = {}
      parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/api/v1/query_range', query='query=max%28cluster%3Ausage%3Awor...27Premium%7CStandard%7CSelf-Support%7CNone%27%7D%5B1h%5D%29&start=1759280400&end=1759363200&step=3600s', fragment=None)
      destination_scheme = None, conn = None, release_this_conn = True
      http_tunnel_required = False, err = None, clean_exit = False
      
          def urlopen(  # type: ignore[override]
              self,
              method: str,
              url: str,
              body: _TYPE_BODY | None = None,
              headers: typing.Mapping[str, str] | None = None,
              retries: Retry | bool | int | None = None,
              redirect: bool = True,
              assert_same_host: bool = True,
              timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
              pool_timeout: int | None = None,
              release_conn: bool | None = None,
              chunked: bool = False,
              body_pos: _TYPE_BODY_POSITION | None = None,
              preload_content: bool = True,
              decode_content: bool = True,
              **response_kw: typing.Any,
          ) -> BaseHTTPResponse:
              """
              Get a connection from the pool and perform an HTTP request. This is the
              lowest level call for making a request, so you'll need to specify all
              the raw details.
          
              .. note::
          
                 More commonly, it's appropriate to use a convenience method
                 such as :meth:`request`.
          
              .. note::
          
                 `release_conn` will only behave as expected if
                 `preload_content=False` because we want to make
                 `preload_content=False` the default behaviour someday soon without
                 breaking backwards compatibility.
          
              :param method:
                  HTTP request method (such as GET, POST, PUT, etc.)
          
              :param url:
                  The URL to perform the request on.
          
              :param body:
                  Data to send in the request body, either :class:`str`, :class:`bytes`,
                  an iterable of :class:`str`/:class:`bytes`, or a file-like object.
          
              :param headers:
                  Dictionary of custom headers to send, such as User-Agent,
                  If-None-Match, etc. If None, pool headers are used. If provided,
                  these headers completely replace any pool-specific headers.
          
              :param retries:
                  Configure the number of retries to allow before raising a
                  :class:`~urllib3.exceptions.MaxRetryError` exception.
          
                  If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
                  :class:`~urllib3.util.retry.Retry` object for fine-grained control
                  over different types of retries.
                  Pass an integer number to retry connection errors that many times,
                  but no other types of errors. Pass zero to never retry.
          
                  If ``False``, then retries are disabled and any exception is raised
                  immediately. Also, instead of raising a MaxRetryError on redirects,
                  the redirect response will be returned.
          
              :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
          
              :param redirect:
                  If True, automatically handle redirects (status codes 301, 302,
                  303, 307, 308). Each redirect counts as a retry. Disabling retries
                  will disable redirect, too.
          
              :param assert_same_host:
                  If ``True``, will make sure that the host of the pool requests is
                  consistent else will raise HostChangedError. When ``False``, you can
                  use the pool on an HTTP proxy and request foreign hosts.
          
              :param timeout:
                  If specified, overrides the default timeout for this one
                  request. It may be a float (in seconds) or an instance of
                  :class:`urllib3.util.Timeout`.
          
              :param pool_timeout:
                  If set and the pool is set to block=True, then this method will
                  block for ``pool_timeout`` seconds and raise EmptyPoolError if no
                  connection is available within the time period.
          
              :param bool preload_content:
                  If True, the response's body will be preloaded into memory.
          
              :param bool decode_content:
                  If True, will attempt to decode the body based on the
                  'content-encoding' header.
          
              :param release_conn:
                  If False, then the urlopen call will not release the connection
                  back into the pool once a response is received (but will release if
                  you read the entire contents of the response such as when
                  `preload_content=True`). This is useful if you're not preloading
                  the response's content immediately. You will need to call
                  ``r.release_conn()`` on the response ``r`` to return the connection
                  back into the pool. If None, it takes the value of ``preload_content``
                  which defaults to ``True``.
          
              :param bool chunked:
                  If True, urllib3 will send the body using chunked transfer
                  encoding. Otherwise, urllib3 will send the body using the standard
                  content-length form. Defaults to False.
          
              :param int body_pos:
                  Position to seek to in file-like body in the event of a retry or
                  redirect. Typically this won't need to be set because urllib3 will
                  auto-populate the value when needed.
              """
              parsed_url = parse_url(url)
              destination_scheme = parsed_url.scheme
          
              if headers is None:
                  headers = self.headers
          
              if not isinstance(retries, Retry):
                  retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
          
              if release_conn is None:
                  release_conn = preload_content
          
              # Check host
              if assert_same_host and not self.is_same_host(url):
                  raise HostChangedError(self, url, retries)
          
              # Ensure that the URL we're connecting to is properly encoded
              if url.startswith("/"):
                  url = to_str(_encode_target(url))
              else:
                  url = to_str(parsed_url.url)
          
              conn = None
          
              # Track whether `conn` needs to be released before
              # returning/raising/recursing. Update this variable if necessary, and
              # leave `release_conn` constant throughout the function. That way, if
              # the function recurses, the original value of `release_conn` will be
              # passed down into the recursive call, and its value will be respected.
              #
              # See issue #651 [1] for details.
              #
              # [1] <https://github.com/urllib3/urllib3/issues/651>
              release_this_conn = release_conn
          
              http_tunnel_required = connection_requires_http_tunnel(
                  self.proxy, self.proxy_config, destination_scheme
              )
          
              # Merge the proxy headers. Only done when not using HTTP CONNECT. We
              # have to copy the headers dict so we can safely change it without those
              # changes being reflected in anyone else's copy.
              if not http_tunnel_required:
                  headers = headers.copy()  # type: ignore[attr-defined]
                  headers.update(self.proxy_headers)  # type: ignore[union-attr]
          
              # Must keep the exception bound to a separate variable or else Python 3
              # complains about UnboundLocalError.
              err = None
          
              # Keep track of whether we cleanly exited the except block. This
              # ensures we do proper cleanup in finally.
              clean_exit = False
          
              # Rewind body position, if needed. Record current position
              # for future rewinds in the event of a redirect/retry.
              body_pos = set_file_position(body, body_pos)
          
              try:
                  # Request a connection from the queue.
                  timeout_obj = self._get_timeout(timeout)
                  conn = self._get_conn(timeout=pool_timeout)
          
                  conn.timeout = timeout_obj.connect_timeout  # type: ignore[assignment]
          
                  # Is this a closed/new connection that requires CONNECT tunnelling?
                  if self.proxy is not None and http_tunnel_required and conn.is_closed:
                      try:
                          self._prepare_proxy(conn)
                      except (BaseSSLError, OSError, SocketTimeout) as e:
                          self._raise_timeout(
                              err=e, url=self.proxy.url, timeout_value=conn.timeout
                          )
                          raise
          
                  # If we're going to release the connection in ``finally:``, then
                  # the response doesn't need to know about the connection. Otherwise
                  # it will also try to release it and we'll have a double-release
                  # mess.
                  response_conn = conn if not release_conn else None
          
                  # Make the request on the HTTPConnection object
      >           response = self._make_request(
                      conn,
                      method,
                      url,
                      timeout=timeout_obj,
                      body=body,
                      headers=headers,
                      chunked=chunked,
                      retries=retries,
                      response_conn=response_conn,
                      preload_content=preload_content,
                      decode_content=decode_content,
                      **response_kw,
                  )
      
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:787: 
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:493: in _make_request
          conn.request(
      /iqe_venv/lib/python3.12/site-packages/urllib3/connection.py:494: in request
          self.endheaders()
      /iqe_venv/.local/share/uv/python/cpython-3.12.11-linux-x86_64-gnu/lib/python3.12/http/client.py:1333: in endheaders
          self._send_output(message_body, encode_chunked=encode_chunked)
      /iqe_venv/.local/share/uv/python/cpython-3.12.11-linux-x86_64-gnu/lib/python3.12/http/client.py:1093: in _send_output
          self.send(msg)
      /iqe_venv/.local/share/uv/python/cpython-3.12.11-linux-x86_64-gnu/lib/python3.12/http/client.py:1037: in send
          self.connect()
      /iqe_venv/lib/python3.12/site-packages/urllib3/connection.py:325: in connect
          self.sock = self._new_conn()
                      ^^^^^^^^^^^^^^^^
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      
      self = <urllib3.connection.HTTPConnection object at 0x7fed6a5af650>
      
          def _new_conn(self) -> socket.socket:
              """Establish a socket connection and set nodelay settings on it.
          
              :return: New socket connection.
              """
              try:
                  sock = connection.create_connection(
                      (self._dns_host, self.port),
                      self.timeout,
                      source_address=self.source_address,
                      socket_options=self.socket_options,
                  )
              except socket.gaierror as e:
                  raise NameResolutionError(self.host, self, e) from e
              except SocketTimeout as e:
                  raise ConnectTimeoutError(
                      self,
                      f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
                  ) from e
          
              except OSError as e:
      >           raise NewConnectionError(
                      self, f"Failed to establish a new connection: {e}"
                  ) from e
      E           urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPConnection object at 0x7fed6a5af650>: Failed to establish a new connection: [Errno 111] Connection refused
      
      /iqe_venv/lib/python3.12/site-packages/urllib3/connection.py:213: NewConnectionError
      
      The above exception was the direct cause of the following exception:
      
      self = <requests.adapters.HTTPAdapter object at 0x7fed6a92a510>
      request = <PreparedRequest [GET]>, stream = False
      timeout = Timeout(connect=None, read=None, total=None), verify = False
      cert = None, proxies = OrderedDict()
      
          def send(
              self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
          ):
              """Sends PreparedRequest object. Returns Response object.
          
              :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
              :param stream: (optional) Whether to stream the request content.
              :param timeout: (optional) How long to wait for the server to send
                  data before giving up, as a float, or a :ref:`(connect timeout,
                  read timeout) <timeouts>` tuple.
              :type timeout: float or tuple or urllib3 Timeout object
              :param verify: (optional) Either a boolean, in which case it controls whether
                  we verify the server's TLS certificate, or a string, in which case it
                  must be a path to a CA bundle to use
              :param cert: (optional) Any user-provided SSL certificate to be trusted.
              :param proxies: (optional) The proxies dictionary to apply to the request.
              :rtype: requests.Response
              """
          
              try:
                  conn = self.get_connection_with_tls_context(
                      request, verify, proxies=proxies, cert=cert
                  )
              except LocationValueError as e:
                  raise InvalidURL(e, request=request)
          
              self.cert_verify(conn, request.url, verify, cert)
              url = self.request_url(request, proxies)
              self.add_headers(
                  request,
                  stream=stream,
                  timeout=timeout,
                  verify=verify,
                  cert=cert,
                  proxies=proxies,
              )
          
              chunked = not (request.body is None or "Content-Length" in request.headers)
          
              if isinstance(timeout, tuple):
                  try:
                      connect, read = timeout
                      timeout = TimeoutSauce(connect=connect, read=read)
                  except ValueError:
                      raise ValueError(
                          f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                          f"or a single float to set both timeouts to the same value."
                      )
              elif isinstance(timeout, TimeoutSauce):
                  pass
              else:
                  timeout = TimeoutSauce(connect=timeout, read=timeout)
          
              try:
      >           resp = conn.urlopen(
                      method=request.method,
                      url=url,
                      body=request.body,
                      headers=request.headers,
                      redirect=False,
                      assert_same_host=False,
                      preload_content=False,
                      decode_content=False,
                      retries=self.max_retries,
                      timeout=timeout,
                      chunked=chunked,
                  )
      
      /iqe_venv/lib/python3.12/site-packages/requests/adapters.py:644: 
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:871: in urlopen
          return self.urlopen(
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:871: in urlopen
          return self.urlopen(
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:871: in urlopen
          return self.urlopen(
      /iqe_venv/lib/python3.12/site-packages/urllib3/connectionpool.py:841: in urlopen
          retries = retries.increment(
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      
      self = Retry(total=0, connect=None, read=None, redirect=None, status=None)
      method = 'GET'
      url = '/api/v1/query_range?query=max%28cluster%3Ausage%3Aworkload%3Acapacity_physical_cpu_hours%29+by+%28_id%29+%2A+on%28_id...%2C+support%3D~%27Premium%7CStandard%7CSelf-Support%7CNone%27%7D%5B1h%5D%29&start=1759280400&end=1759363200&step=3600s'
      response = None
      error = NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fed6a5af650>: Failed to establish a new connection: [Errno 111] Connection refused')
      _pool = <urllib3.connectionpool.HTTPConnectionPool object at 0x7fed6ae37ad0>
      _stacktrace = <traceback object at 0x7fed699ede40>
      
          def increment(
              self,
              method: str | None = None,
              url: str | None = None,
              response: BaseHTTPResponse | None = None,
              error: Exception | None = None,
              _pool: ConnectionPool | None = None,
              _stacktrace: TracebackType | None = None,
          ) -> Self:
              """Return a new Retry object with incremented retry counters.
          
              :param response: A response object, or None, if the server did not
                  return a response.
              :type response: :class:`~urllib3.response.BaseHTTPResponse`
              :param Exception error: An error encountered during the request, or
                  None if the response was received successfully.
          
              :return: A new ``Retry`` object.
              """
              if self.total is False and error:
                  # Disabled, indicate to re-raise the error.
                  raise reraise(type(error), error, _stacktrace)
          
              total = self.total
              if total is not None:
                  total -= 1
          
              connect = self.connect
              read = self.read
              redirect = self.redirect
              status_count = self.status
              other = self.other
              cause = "unknown"
              status = None
              redirect_location = None
          
              if error and self._is_connection_error(error):
                  # Connect retry?
                  if connect is False:
                      raise reraise(type(error), error, _stacktrace)
                  elif connect is not None:
                      connect -= 1
          
              elif error and self._is_read_error(error):
                  # Read retry?
                  if read is False or method is None or not self._is_method_retryable(method):
                      raise reraise(type(error), error, _stacktrace)
                  elif read is not None:
                      read -= 1
          
              elif error:
                  # Other retry?
                  if other is not None:
                      other -= 1
          
              elif response and response.get_redirect_location():
                  # Redirect retry?
                  if redirect is not None:
                      redirect -= 1
                  cause = "too many redirects"
                  response_redirect_location = response.get_redirect_location()
                  if response_redirect_location:
                      redirect_location = response_redirect_location
                  status = response.status
          
              else:
                  # Incrementing because of a server error like a 500 in
                  # status_forcelist and the given method is in the allowed_methods
                  cause = ResponseError.GENERIC_ERROR
                  if response and response.status:
                      if status_count is not None:
                          status_count -= 1
                      cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
                      status = response.status
          
              history = self.history + (
                  RequestHistory(method, url, error, status, redirect_location),
              )
          
              new_retry = self.new(
                  total=total,
                  connect=connect,
                  read=read,
                  redirect=redirect,
                  status=status_count,
                  other=other,
                  history=history,
              )
          
              if new_retry.is_exhausted():
                  reason = error or ResponseError(cause)
      >           raise MaxRetryError(_pool, url, reason) from reason  # type: ignore[arg-type]
                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      E           urllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='10.0.151.243', port=80): Max retries exceeded with url: /api/v1/query_range?query=max%28cluster%3Ausage%3Aworkload%3Acapacity_physical_cpu_hours%29+by+%28_id%29+%2A+on%28_id%29+group_right+min_over_time%28ocm_subscription%7Bproduct%3D%27osd%27%2C+external_organization%3D%2713259775%27%2C+billing_model%3D%27marketplace%27%2Cmetered_by_rh%21%3D%27false%27%2C+support%3D~%27Premium%7CStandard%7CSelf-Support%7CNone%27%7D%5B1h%5D%29&start=1759280400&end=1759363200&step=3600s (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fed6a5af650>: Failed to establish a new connection: [Errno 111] Connection refused'))
      
      /iqe_venv/lib/python3.12/site-packages/urllib3/util/retry.py:519: MaxRetryError
      
      During handling of the above exception, another exception occurred:
      
      application = <iqe.base.application.Application object at 0x7fed71275bb0>
      product_id = 'OpenShift-dedicated-metrics'
      
          @pytest.mark.post_stage_deploy
          @pytest.mark.parametrize("product_id", ["OpenShift-metrics", "OpenShift-dedicated-metrics"])
          def test_verify_tally_system_table_data_with_telemeter(application, product_id):
              """Verify monthly tally OCP/OSD core-hours data and system table data matches with telemeter.
              metadata:
                  assignee: aunayak
                  negative: false
                  importance: medium
                  requirements: payg_tally
                  test_steps:
                      1. Read existing tally data for PAYG OCP of current and previous month.
                      2. Read tally data from telemeter.
                      3. Verify results matches.
                  expected_results:
                      1. Tally OCP/OSD core-hours data match with telemeter.
              """
              application.rhsm_subscriptions.sync_tally_hourly(
                  hours=312, product_id=product_id, timeout_tries=2
              )
          
              start, end = get_range()
          
              tally = application.rhsm_subscriptions.get_tally_report(
                  product_id=product_id,
                  beginning=start,
                  ending=end,
                  granularity="Daily",
                  metric_id="Cores",
              )
              tally_data = [{t["date"]: t["value"]} for t in tally["data"] if "value" in t]
          
      >       telemeter_tally = application.rhsm_subscriptions.telemeter_get_daily_tally(
                  product_id=product_id,
                  beginning=start,
                  metric_id="Cores",
              )
      
      /iqe_venv/lib/python3.12/site-packages/iqe_rhsm_subscriptions/tests/integration/swatch_tally/test_tally_openshift_payg.py:177: 
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      /iqe_venv/lib/python3.12/site-packages/iqe_rhsm_subscriptions/__init__.py:2370: in telemeter_get_daily_tally
          results = self.prometheus_api.custom_query_range(
      /iqe_venv/lib/python3.12/site-packages/iqe_rhsm_subscriptions/rhsmlib/prometheus.py:34: in custom_query_range
          metric_data = pc.custom_query_range(
      /iqe_venv/lib/python3.12/site-packages/prometheus_api_client/prometheus_connect.py:457: in custom_query_range
          response = self._session.get(
      /iqe_venv/lib/python3.12/site-packages/requests/sessions.py:602: in get
          return self.request("GET", url, **kwargs)
                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      /iqe_venv/lib/python3.12/site-packages/requests/sessions.py:589: in request
          resp = self.send(prep, **send_kwargs)
                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      /iqe_venv/lib/python3.12/site-packages/requests/sessions.py:703: in send
          r = adapter.send(request, **kwargs)
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
      
      self = <requests.adapters.HTTPAdapter object at 0x7fed6a92a510>
      request = <PreparedRequest [GET]>, stream = False
      timeout = Timeout(connect=None, read=None, total=None), verify = False
      cert = None, proxies = OrderedDict()
      
          def send(
              self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
          ):
              """Sends PreparedRequest object. Returns Response object.
          
              :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
              :param stream: (optional) Whether to stream the request content.
              :param timeout: (optional) How long to wait for the server to send
                  data before giving up, as a float, or a :ref:`(connect timeout,
                  read timeout) <timeouts>` tuple.
              :type timeout: float or tuple or urllib3 Timeout object
              :param verify: (optional) Either a boolean, in which case it controls whether
                  we verify the server's TLS certificate, or a string, in which case it
                  must be a path to a CA bundle to use
              :param cert: (optional) Any user-provided SSL certificate to be trusted.
              :param proxies: (optional) The proxies dictionary to apply to the request.
              :rtype: requests.Response
              """
          
              try:
                  conn = self.get_connection_with_tls_context(
                      request, verify, proxies=proxies, cert=cert
                  )
              except LocationValueError as e:
                  raise InvalidURL(e, request=request)
          
              self.cert_verify(conn, request.url, verify, cert)
              url = self.request_url(request, proxies)
              self.add_headers(
                  request,
                  stream=stream,
                  timeout=timeout,
                  verify=verify,
                  cert=cert,
                  proxies=proxies,
              )
          
              chunked = not (request.body is None or "Content-Length" in request.headers)
          
              if isinstance(timeout, tuple):
                  try:
                      connect, read = timeout
                      timeout = TimeoutSauce(connect=connect, read=read)
                  except ValueError:
                      raise ValueError(
                          f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                          f"or a single float to set both timeouts to the same value."
                      )
              elif isinstance(timeout, TimeoutSauce):
                  pass
              else:
                  timeout = TimeoutSauce(connect=timeout, read=timeout)
          
              try:
                  resp = conn.urlopen(
                      method=request.method,
                      url=url,
                      body=request.body,
                      headers=request.headers,
                      redirect=False,
                      assert_same_host=False,
                      preload_content=False,
                      decode_content=False,
                      retries=self.max_retries,
                      timeout=timeout,
                      chunked=chunked,
                  )
          
              except (ProtocolError, OSError) as err:
                  raise ConnectionError(err, request=request)
          
              except MaxRetryError as e:
                  if isinstance(e.reason, ConnectTimeoutError):
                      # TODO: Remove this in 3.0.0: see #2811
                      if not isinstance(e.reason, NewConnectionError):
                          raise ConnectTimeout(e, request=request)
          
                  if isinstance(e.reason, ResponseError):
                      raise RetryError(e, request=request)
          
                  if isinstance(e.reason, _ProxyError):
                      raise ProxyError(e, request=request)
          
                  if isinstance(e.reason, _SSLError):
                      # This branch is for urllib3 v1.22 and later.
                      raise SSLError(e, request=request)
          
      >           raise ConnectionError(e, request=request)
      E           requests.exceptions.ConnectionError: HTTPConnectionPool(host='10.0.151.243', port=80): Max retries exceeded with url: /api/v1/query_range?query=max%28cluster%3Ausage%3Aworkload%3Acapacity_physical_cpu_hours%29+by+%28_id%29+%2A+on%28_id%29+group_right+min_over_time%28ocm_subscription%7Bproduct%3D%27osd%27%2C+external_organization%3D%2713259775%27%2C+billing_model%3D%27marketplace%27%2Cmetered_by_rh%21%3D%27false%27%2C+support%3D~%27Premium%7CStandard%7CSelf-Support%7CNone%27%7D%5B1h%5D%29&start=1759280400&end=1759363200&step=3600s (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fed6a5af650>: Failed to establish a new connection: [Errno 111] Connection refused'))
      
      /iqe_venv/lib/python3.12/site-packages/requests/adapters.py:677: ConnectionError 

              rhn-sqe-tmcknigh Trayvon McKnight
              lburnett0 Lindsey Burnett
              Votes:
              0 Vote for this issue
              Watchers:
              2 Start watching this issue

                Created:
                Updated:
                Resolved: