whoami: 1002310000 ./execute_clc_interop_commands.sh: line 6: cypress: command not found Cypress API URL: https://api.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us:6443 + echo 'Initiating CLC E2E tests...' Initiating CLC E2E tests... + export BROWSER=chrome + BROWSER=chrome + export CYPRESS_OC_IDP=kube:admin + CYPRESS_OC_IDP=kube:admin + export CYPRESS_OPTIONS_HUB_USER=kubeadmin + CYPRESS_OPTIONS_HUB_USER=kubeadmin + export CYPRESS_SPOKE_CLUSTER= + CYPRESS_SPOKE_CLUSTER= + export CYPRESS_CLC_OC_IDP=clc-e2e-htpasswd + CYPRESS_CLC_OC_IDP=clc-e2e-htpasswd + export CYPRESS_CLC_RBAC_PASS=test-RBAC-4-e2e + CYPRESS_CLC_RBAC_PASS=test-RBAC-4-e2e + export CYPRESS_CLC_OCP_IMAGE_VERSION=4.16.0 + CYPRESS_CLC_OCP_IMAGE_VERSION=4.16.0 + export CYPRESS_CLC_OCP_IMAGE_REGISTRY=quay.io/openshift-release-dev/ocp-release + CYPRESS_CLC_OCP_IMAGE_REGISTRY=quay.io/openshift-release-dev/ocp-release + export CYPRESS_ACM_NAMESPACE=ocm + CYPRESS_ACM_NAMESPACE=ocm + export CYPRESS_MCE_NAMESPACE=multicluster-engine + CYPRESS_MCE_NAMESPACE=multicluster-engine + [[ -z kubeadmin ]] + [[ -z YqxBD-Uahyq-wpYsd-zcW8p ]] + [[ -z https://api.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us:6443 ]] + echo -e 'Doing oc login using CYPRESS_HUB_API_URL, CYPRESS_OPTIONS_HUB_USER, CYPRESS_OPTIONS_HUB_PASSWORD' Doing oc login using CYPRESS_HUB_API_URL, CYPRESS_OPTIONS_HUB_USER, CYPRESS_OPTIONS_HUB_PASSWORD + set +x WARNING: Using insecure TLS client config. Setting this option is not supported! Login successful. You have access to 99 projects, the list has been suppressed. You can list all projects with 'oc projects' Using project "default". ++ oc whoami --show-console + export CYPRESS_BASE_URL=https://console-openshift-console.apps.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us + CYPRESS_BASE_URL=https://console-openshift-console.apps.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us + echo -e 'Running tests with the following environment:\n' Running tests with the following environment: + echo -e '\tCYPRESS_HUB_API_URL : https://api.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us:6443' CYPRESS_HUB_API_URL : https://api.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us:6443 + echo -e '\tCYPRESS_OPTIONS_HUB_BASE_URL : https://console-openshift-console.apps.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us' CYPRESS_OPTIONS_HUB_BASE_URL : https://console-openshift-console.apps.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us + echo -e '\tCYPRESS_OPTIONS_HUB_USER : kubeadmin' CYPRESS_OPTIONS_HUB_USER : kubeadmin + echo + bash build/setup-client.sh --2024-07-29 05:49:14-- https://releases.ansible.com/ansible-tower/cli/ansible-tower-cli-3.8.3-1.tar.gz Resolving releases.ansible.com (releases.ansible.com)... 104.26.1.234, 104.26.0.234, 172.67.68.251, ... Connecting to releases.ansible.com (releases.ansible.com)|104.26.1.234|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 92652 (90K) [application/x-gzip] Saving to: 'ansible-tower-cli-3.8.3-1.tar.gz' 0K .......... .......... .......... .......... .......... 55% 20.3M 0s 50K .......... .......... .......... .......... 100% 46.4M=0.003s 2024-07-29 05:49:14 (27.1 MB/s) - 'ansible-tower-cli-3.8.3-1.tar.gz' saved [92652/92652] awxkit-3.8.3/ awxkit-3.8.3/awxkit/ awxkit-3.8.3/awxkit/api/ awxkit-3.8.3/awxkit/api/mixins/ awxkit-3.8.3/awxkit/api/mixins/__init__.py awxkit-3.8.3/awxkit/api/mixins/has_copy.py awxkit-3.8.3/awxkit/api/mixins/has_create.py awxkit-3.8.3/awxkit/api/mixins/has_instance_groups.py awxkit-3.8.3/awxkit/api/mixins/has_notifications.py awxkit-3.8.3/awxkit/api/mixins/has_status.py awxkit-3.8.3/awxkit/api/mixins/has_survey.py awxkit-3.8.3/awxkit/api/mixins/has_variables.py awxkit-3.8.3/awxkit/api/pages/ awxkit-3.8.3/awxkit/api/pages/__init__.py awxkit-3.8.3/awxkit/api/pages/access_list.py awxkit-3.8.3/awxkit/api/pages/activity_stream.py awxkit-3.8.3/awxkit/api/pages/ad_hoc_commands.py awxkit-3.8.3/awxkit/api/pages/api.py awxkit-3.8.3/awxkit/api/pages/applications.py awxkit-3.8.3/awxkit/api/pages/authtoken.py awxkit-3.8.3/awxkit/api/pages/base.py awxkit-3.8.3/awxkit/api/pages/config.py awxkit-3.8.3/awxkit/api/pages/credential_input_sources.py awxkit-3.8.3/awxkit/api/pages/credentials.py awxkit-3.8.3/awxkit/api/pages/dashboard.py awxkit-3.8.3/awxkit/api/pages/instance_groups.py awxkit-3.8.3/awxkit/api/pages/instances.py awxkit-3.8.3/awxkit/api/pages/inventory.py awxkit-3.8.3/awxkit/api/pages/job_templates.py awxkit-3.8.3/awxkit/api/pages/jobs.py awxkit-3.8.3/awxkit/api/pages/labels.py awxkit-3.8.3/awxkit/api/pages/metrics.py awxkit-3.8.3/awxkit/api/pages/notification_templates.py awxkit-3.8.3/awxkit/api/pages/notifications.py awxkit-3.8.3/awxkit/api/pages/organizations.py awxkit-3.8.3/awxkit/api/pages/page.py awxkit-3.8.3/awxkit/api/pages/ping.py awxkit-3.8.3/awxkit/api/pages/projects.py awxkit-3.8.3/awxkit/api/pages/roles.py awxkit-3.8.3/awxkit/api/pages/schedules.py awxkit-3.8.3/awxkit/api/pages/settings.py awxkit-3.8.3/awxkit/api/pages/subscriptions.py awxkit-3.8.3/awxkit/api/pages/survey_spec.py awxkit-3.8.3/awxkit/api/pages/system_job_templates.py awxkit-3.8.3/awxkit/api/pages/system_jobs.py awxkit-3.8.3/awxkit/api/pages/teams.py awxkit-3.8.3/awxkit/api/pages/unified_job_templates.py awxkit-3.8.3/awxkit/api/pages/unified_jobs.py awxkit-3.8.3/awxkit/api/pages/users.py awxkit-3.8.3/awxkit/api/pages/workflow_approvals.py awxkit-3.8.3/awxkit/api/pages/workflow_job_nodes.py awxkit-3.8.3/awxkit/api/pages/workflow_job_template_nodes.py awxkit-3.8.3/awxkit/api/pages/workflow_job_templates.py awxkit-3.8.3/awxkit/api/pages/workflow_jobs.py awxkit-3.8.3/awxkit/api/__init__.py awxkit-3.8.3/awxkit/api/client.py awxkit-3.8.3/awxkit/api/registry.py awxkit-3.8.3/awxkit/api/resources.py awxkit-3.8.3/awxkit/api/utils.py awxkit-3.8.3/awxkit/awx/ awxkit-3.8.3/awxkit/awx/__init__.py awxkit-3.8.3/awxkit/awx/inventory.py awxkit-3.8.3/awxkit/awx/utils.py awxkit-3.8.3/awxkit/cli/ awxkit-3.8.3/awxkit/cli/docs/ awxkit-3.8.3/awxkit/cli/docs/source/ awxkit-3.8.3/awxkit/cli/docs/source/conf.py awxkit-3.8.3/awxkit/cli/docs/README.md awxkit-3.8.3/awxkit/cli/__init__.py awxkit-3.8.3/awxkit/cli/client.py awxkit-3.8.3/awxkit/cli/custom.py awxkit-3.8.3/awxkit/cli/format.py awxkit-3.8.3/awxkit/cli/options.py awxkit-3.8.3/awxkit/cli/resource.py awxkit-3.8.3/awxkit/cli/sphinx.py awxkit-3.8.3/awxkit/cli/stdout.py awxkit-3.8.3/awxkit/cli/utils.py awxkit-3.8.3/awxkit/scripts/ awxkit-3.8.3/awxkit/scripts/__init__.py awxkit-3.8.3/awxkit/scripts/basic_session.py awxkit-3.8.3/awxkit/utils/ awxkit-3.8.3/awxkit/utils/__init__.py awxkit-3.8.3/awxkit/utils/toposort.py awxkit-3.8.3/awxkit/__init__.py awxkit-3.8.3/awxkit/config.py awxkit-3.8.3/awxkit/exceptions.py awxkit-3.8.3/awxkit/words.py awxkit-3.8.3/awxkit/ws.py awxkit-3.8.3/awxkit/yaml_file.py awxkit-3.8.3/awxkit.egg-info/ awxkit-3.8.3/awxkit.egg-info/PKG-INFO awxkit-3.8.3/awxkit.egg-info/SOURCES.txt awxkit-3.8.3/awxkit.egg-info/dependency_links.txt awxkit-3.8.3/awxkit.egg-info/entry_points.txt awxkit-3.8.3/awxkit.egg-info/requires.txt awxkit-3.8.3/awxkit.egg-info/top_level.txt awxkit-3.8.3/test/ awxkit-3.8.3/test/cli/ awxkit-3.8.3/test/cli/test_client.py awxkit-3.8.3/test/cli/test_config.py awxkit-3.8.3/test/cli/test_format.py awxkit-3.8.3/test/cli/test_options.py awxkit-3.8.3/test/__init__.py awxkit-3.8.3/test/test_credentials.py awxkit-3.8.3/test/test_dependency_resolver.py awxkit-3.8.3/test/test_registry.py awxkit-3.8.3/test/test_utils.py awxkit-3.8.3/test/test_ws.py awxkit-3.8.3/MANIFEST.in awxkit-3.8.3/README.md awxkit-3.8.3/VERSION awxkit-3.8.3/requirements.txt awxkit-3.8.3/setup.py awxkit-3.8.3/PKG-INFO awxkit-3.8.3/setup.cfg /tmp/clc/awxkit-3.8.3 /tmp/clc running install error: can't create or remove files in install directory The following error occurred while trying to add or remove files in the installation directory: [Errno 13] Permission denied: '/usr/local/lib/python3.9' The installation directory you specified (via --install-dir, --prefix, or the distutils default setting) was: /usr/local/lib/python3.9/site-packages/ This directory does not currently exist. Please create it and try again, or choose a different installation directory (using the -d or --install-dir option). /tmp/clc + '[' -z ']' + echo 'CYPRESS_HUB_OCP_VERSION is not defined, setting...' CYPRESS_HUB_OCP_VERSION is not defined, setting... + echo 'oc get clusterversion -ojsonpath='\''{.items[0].status.desired.version}'\''' oc get clusterversion -ojsonpath='{.items[0].status.desired.version}' ++ oc get clusterversion '-ojsonpath={.items[0].status.desired.version}' + export CYPRESS_HUB_OCP_VERSION=4.16.0-0.nightly-2024-07-28-155803 + CYPRESS_HUB_OCP_VERSION=4.16.0-0.nightly-2024-07-28-155803 + '[' -z x ']' + [[ chrome == \c\h\r\o\m\e ]] + command -v google-chrome + [[ 0 -ne 0 ]] + [[ -z multicluster-engine ]] + [[ -z ocm ]] ++ date + echo 'Mon Jul 29 05:49:15 UTC 2024 ==== Ensure hypershift-addon is in good condition ====' Mon Jul 29 05:49:15 UTC 2024 ==== Ensure hypershift-addon is in good condition ==== + bash build/hypershift/enableHypershift.sh Mon Jul 29 05:49:15 UTC 2024 ==== Verify ACM or MCE is installed ==== multiclusterengine waiting for hypershift-addon... Hypershift addon is available ++ date + echo 'Mon Jul 29 05:49:15 UTC 2024 ==== Enable SNO feature (dev preview) ====' Mon Jul 29 05:49:15 UTC 2024 ==== Enable SNO feature (dev preview) ==== + oc patch configmaps console-mce-config -n multicluster-engine --patch '{"data":{"singleNodeOpenshift": "enabled"}}' configmap/console-mce-config patched + [[ ! -z ocm ]] + [[ ocm != '' ]] + oc patch configmaps console-mce-config -n multicluster-engine --patch '{"data":{"singleNodeOpenshift": "enabled"}}' configmap/console-mce-config patched (no change) ++ date + echo 'Mon Jul 29 05:49:16 UTC 2024 ==== Create the ManagedClusterSet used for Applications ====' Mon Jul 29 05:49:16 UTC 2024 ==== Create the ManagedClusterSet used for Applications ==== + oc apply -f resources/auto_gitops_clusterset.yaml managedclusterset.cluster.open-cluster-management.io/auto-gitops-cluster-set unchanged + HEADLESS=--headless + [[ '' == true ]] + [[ -z '' ]] + export NODE_ENV=production + NODE_ENV=production + [[ ! -f ./options.yaml ]] + npx browserslist@latest --update-db npx: installed 7 in 1.903s The --update-db command is deprecated. Please use npx update-browserslist-db@latest instead. Browserslist DB update will still be made. Latest version: 1.0.30001643 Installed version: 1.0.30001643 caniuse-lite is up to date caniuse-lite has been successfully updated No target browser changes + echo -e 'Setting env to run in: production' Setting env to run in: production + section_title 'Running CLC UI tests.' + export TERM=xterm-256color + TERM=xterm-256color ++ tput bold ++ tput sgr0 + printf '\nRunning CLC UI tests. (B\n' Running CLC UI tests. (B + [[ -d reports ]] + echo 'List the clusters fixtures' List the clusters fixtures + ls -lrth 'cypress/fixtures/importClusters/*' ls: cannot access 'cypress/fixtures/importClusters/*': No such file or directory + [[ production == \c\i\m ]] + [[ ! -z '' ]] + case $TEST_STAGE in + run_destroy + export CLOUD_PROVIDERS=XXX + CLOUD_PROVIDERS=XXX + export SNO_CLUSTERS= + SNO_CLUSTERS= + export CLUSTER_POOL= + CLUSTER_POOL= + tags= + [[ ! -z XXX ]] ++ echo XXX ++ tr -s , ' ' + for cps in $(echo $CLOUD_PROVIDERS | tr -s "," " ") + tags='@destroy+-sno+-@clusterpool-@single+XXX ' + [[ ! -z '' ]] + [[ ! -z '' ]] + [[ @destroy+-sno+-@clusterpool-@single+XXX != '' ]] + npx cypress run --browser chrome --headless --reporter cypress-multi-reporters --spec cypress/tests/clusters/managedClusters/destroy/destroyClusters.spec.js,cypress/tests/clusters/managedClusters/destroy/destroyHostedClusters.spec.js,cypress/tests/clusters/managedClusters/destroy/destroySNOClusters.spec.js,cypress/tests/credentials/deleteCredentials.spec.js,cypress/tests/tech-preview/clusterpools/clusterpools_destroy.spec.js --env 'grepFilterSpecs=true,grepOmitFiltered=true,grepTags="@destroy+-sno+-@clusterpool-@single+XXX "' It looks like this is your first time using Cypress: 9.7.0 [STARTED] Task without title. [SUCCESS] Task without title. Opening Cypress... [557:0729/054922.936301:ERROR:zygote_host_impl_linux.cc(263)] Failed to adjust OOM score of renderer with pid 710: Permission denied (13) [710:0729/054922.958142:ERROR:sandbox_linux.cc(377)] InitializeSandbox() called with multiple threads in process gpu-process. [710:0729/054922.960693:ERROR:gpu_memory_buffer_support_x11.cc(44)] dri3 extension not supported. Adding Date.now() suffix to all cluster names in options-*.yaml loaded: XXX cluster name used in tests will be: acm-interops-opp-ci-1722232165257 XXX cluster pool name used in tests will be: XXXXX-clc-auto-pool-XXX1-1722232165257 azure cluster name used in tests will be: acm-interops-opp-ci-az-1722232165257 azure cluster pool name used in tests will be: XXXXX-clc-auto-pool-azure1-1722232165257 gcp cluster name used in tests will be: acm-interops-opp-ci-gcp-1722232165257 gcp cluster pool name used in tests will be: XXXXX-clc-auto-pool-gcp1-1722232165257 azgov cluster name used in tests will be: acm-interops-opp-ci-azg-1722232165257 azgov cluster pool name used in tests will be: XXXXX-clc-auto-pool-azgov-1722232165257 kubevirt cluster name used in tests will be: interops-hcp-kv-1722232165257 Pull secret is set, using the one in options.yaml... cypress-grep: filtering using tag(s) "@destroy+-sno+-@clusterpool-@single+XXX " cypress-grep: will omit filtered tests cypress-grep: filtering specs using tag "@destroy+-sno+-@clusterpool-@single+XXX " Grep "undefined" has eliminated all specs Will leave all specs to run to filter at run-time ====================================================================================================  (Run Starting)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Cypress: 9.7.0 │  │ Browser: Chrome 124 (headless) │  │ Node Version: v14.21.1 (/usr/bin/node) │  │ Specs: 5 found (clusters/managedClusters/destroy/destroyClusters.spec.js, clusters/ma │  │ nagedClusters/destroy/destroyHostedClusters.spec.js, clusters/managedClusters/ │  │ destroy/destroySNOClusters.spec.js, credentials/deleteCredentials.spec.js, tec │  │ h-preview/clusterpools...) │  │ Searched: cypress/tests/clusters/managedClusters/destroy/destroyClusters.spec.js, cypres │  │ s/tests/clusters/managedClusters/destroy/destroyHostedClusters.spec.js, cypres │  │ s/tests/clusters/managedClusters/destroy/destroySNOClusters.spec.js, cypress/t │  │ ests/credentials/deleteCredentials.spec.js, cypress/tests/tech-preview/cluster │  │ pools/clusterpools_destroy.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘ ──────────────────────────────────────────────────────────────────────────────────────────────────── Running: clusters/managedClusters/destroy/destroyClusters.spec.js (1 of 5) Browserslist: caniuse-lite is outdated. Please run: npx browserslist@latest --update-db Why you should do it regularly: https://github.com/browserslist/browserslist#browsers-data-updating   destroy clusters   ✓ RHACM4K-7477: CLC: Destroy an AWS managed cluster via the UI (435432ms)   1 passing (7m) [mochawesome] Report JSON saved to /tmp/clc/results/json/mochawesome-report.json  (Results)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Tests: 1 │  │ Passing: 1 │  │ Failing: 0 │  │ Pending: 0 │  │ Skipped: 0 │  │ Screenshots: 0 │  │ Video: true │  │ Duration: 7 minutes, 15 seconds │  │ Spec Ran: clusters/managedClusters/destroy/destroyClusters.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘ ──────────────────────────────────────────────────────────────────────────────────────────────────── Running: clusters/managedClusters/destroy/destroyHostedClusters.spec.js (2 of 5)   0 passing (3ms) [mochawesome] Report JSON saved to /tmp/clc/results/json/mochawesome-report_001.json  (Results)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Tests: 0 │  │ Passing: 0 │  │ Failing: 0 │  │ Pending: 0 │  │ Skipped: 0 │  │ Screenshots: 0 │  │ Video: true │  │ Duration: 0 seconds │  │ Spec Ran: clusters/managedClusters/destroy/destroyHostedClusters.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘ ──────────────────────────────────────────────────────────────────────────────────────────────────── Running: clusters/managedClusters/destroy/destroySNOClusters.spec.js (3 of 5)   0 passing (3ms) [mochawesome] Report JSON saved to /tmp/clc/results/json/mochawesome-report_002.json  (Results)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Tests: 0 │  │ Passing: 0 │  │ Failing: 0 │  │ Pending: 0 │  │ Skipped: 0 │  │ Screenshots: 0 │  │ Video: true │  │ Duration: 0 seconds │  │ Spec Ran: clusters/managedClusters/destroy/destroySNOClusters.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘ ──────────────────────────────────────────────────────────────────────────────────────────────────── Running: credentials/deleteCredentials.spec.js (4 of 5)   Delete provider credentials  ✓ RHACM4K-7901: CLC: Credentials - Delete the AWS provider credentials (19228ms)   1 passing (19s) [mochawesome] Report JSON saved to /tmp/clc/results/json/mochawesome-report_003.json  (Results)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Tests: 1 │  │ Passing: 1 │  │ Failing: 0 │  │ Pending: 0 │  │ Skipped: 0 │  │ Screenshots: 0 │  │ Video: true │  │ Duration: 19 seconds │  │ Spec Ran: credentials/deleteCredentials.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘ ──────────────────────────────────────────────────────────────────────────────────────────────────── Running: tech-preview/clusterpools/clusterpools_destroy.spec.js (5 of 5)   destroy clusterpools  1) RHACM4K-3072: CLC: Cluster Pools - Verify that a user is able to destroy a AWS cluster pool by using "ellipsis/options"  cy:command ✔  window  cy:request ✔  GET https://api.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us:6443/apis/hive.openshift.io/v1/namespaces/XXXXX-clc-auto-pool-XXX1-1722232165257-ns/clusterpools/XXXXX-clc-auto-pool-XXX1-1722232165257 Status: 404 Response body: { "kind": "Status", "apiVersion": "v1", "metadata": {}, "status": "Failure", "message": "clusterpools.hive.openshift.io \"XXXXX-clc-auto-pool-XXX1-1722232165257\" not found", "reason": "NotFound", "details": { "name": "XXXXX-clc-auto-pool-XXX1-1722232165257", "group": "hive.openshift.io", "kind": "clusterpools" }, "code": 404 }  cy:command ✘  assert Unable to destroy AWS cluster due to cluster pool resource issue. clusterpools.hive.openshift.io "XXXXX-clc-auto-pool-XXX1-1722232165257" not found: expected **404** to match /20[0,1]/   0 passing (7s)  1 failing  1) destroy clusterpools RHACM4K-3072: CLC: Cluster Pools - Verify that a user is able to destroy a AWS cluster pool by using "ellipsis/options":  AssertionError: Unable to destroy AWS cluster due to cluster pool resource issue. clusterpools.hive.openshift.io "XXXXX-clc-auto-pool-XXX1-1722232165257" not found: expected 404 to match /20[0,1]/ at Context.eval (https://console-openshift-console.apps.ci-op-fzlpf2gy-059fa.release-ci.cnv-qe.rhood.us/__cypress/tests?p=cypress/support/index.js:2837:62)  [mochawesome] Report JSON saved to /tmp/clc/results/json/mochawesome-report_004.json  (Results)  ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ Tests: 1 │  │ Passing: 0 │  │ Failing: 1 │  │ Pending: 0 │  │ Skipped: 0 │  │ Screenshots: 1 │  │ Video: true │  │ Duration: 6 seconds │  │ Spec Ran: tech-preview/clusterpools/clusterpools_destroy.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘  (Screenshots)   - /tmp/clc/results/screenshots/tech-preview/clusterpools/clusterpools_destroy.spec (1280x720)   .js/destroy clusterpools -- RHACM4K-3072 CLC Cluster Pools - Verify that a user    is able to destroy a AWS cluster pool by using ellipsisoptions (failed).png  (Video)   - Started processing: Compressing to 32 CRF   - Finished processing: /tmp/clc/results/videos/tech-preview/clusterpools/clusterpo (0 seconds)   ols_destroy.spec.js.mp4 ====================================================================================================  (Run Finished)   Spec Tests Passing Failing Pending Skipped    ┌────────────────────────────────────────────────────────────────────────────────────────────────┐  │ ✔ clusters/managedClusters/destroy/de 07:15 1 1 - - - │  │ stroyClusters.spec.js │  ├────────────────────────────────────────────────────────────────────────────────────────────────┤  │ ✔ clusters/managedClusters/destroy/de 1ms - - - - - │  │ stroyHostedClusters.spec.js │  ├────────────────────────────────────────────────────────────────────────────────────────────────┤  │ ✔ clusters/managedClusters/destroy/de 1ms - - - - - │  │ stroySNOClusters.spec.js │  ├────────────────────────────────────────────────────────────────────────────────────────────────┤  │ ✔ credentials/deleteCredentials.spec. 00:19 1 1 - - - │  │ js │  ├────────────────────────────────────────────────────────────────────────────────────────────────┤  │ ✖ tech-preview/clusterpools/clusterpo 00:06 1 - 1 - - │  │ ols_destroy.spec.js │  └────────────────────────────────────────────────────────────────────────────────────────────────┘   ✖ 1 of 5 failed (20%) 07:41 3 2 1 - -   + save_report destroy + [[ ! -d reports ]] + mkdir -p reports + _stage=destroy + _group= + [[ -d reports/destroy/ ]] + mkdir -p reports/destroy/ + cp -r results/junit_cypress-2e49af3e1cd86439dea74760bd0abbb3.xml results/junit_cypress-56ac2d6d221c202519f2cbd220334993.xml results/junit_cypress-8ccf1e08950ef859bc3fc0700938e6fb.xml results/junit_cypress-b7598174f4da16592c17a87caccd6d8e.xml results/junit_cypress-d924cd74f361ed2e62c0f5e04d13f99e.xml reports/ + cp -r results/json results/junit_cypress-2e49af3e1cd86439dea74760bd0abbb3.xml results/junit_cypress-56ac2d6d221c202519f2cbd220334993.xml results/junit_cypress-8ccf1e08950ef859bc3fc0700938e6fb.xml results/junit_cypress-b7598174f4da16592c17a87caccd6d8e.xml results/junit_cypress-d924cd74f361ed2e62c0f5e04d13f99e.xml results/screenshots results/videos reports/destroy/